VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65493

Last change on this file since 65493 was 65493, checked in by vboxsync, 8 years ago

CPUM,PGM: cmpxchg16b work (stats).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 629.9 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65493 2017-01-27 23:24:29Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551
552/** @name ..... opcodes.
553 *
554 * @{
555 */
556
557/** @} */
558
559
560/** @name Two byte opcodes (first byte 0x0f).
561 *
562 * @{
563 */
564
565/** Opcode 0x0f 0x00 /0. */
566FNIEMOPRM_DEF(iemOp_Grp6_sldt)
567{
568 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
569 IEMOP_HLP_MIN_286();
570 IEMOP_HLP_NO_REAL_OR_V86_MODE();
571
572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
573 {
574 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
575 switch (pVCpu->iem.s.enmEffOpSize)
576 {
577 case IEMMODE_16BIT:
578 IEM_MC_BEGIN(0, 1);
579 IEM_MC_LOCAL(uint16_t, u16Ldtr);
580 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
581 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 break;
585
586 case IEMMODE_32BIT:
587 IEM_MC_BEGIN(0, 1);
588 IEM_MC_LOCAL(uint32_t, u32Ldtr);
589 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
590 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
591 IEM_MC_ADVANCE_RIP();
592 IEM_MC_END();
593 break;
594
595 case IEMMODE_64BIT:
596 IEM_MC_BEGIN(0, 1);
597 IEM_MC_LOCAL(uint64_t, u64Ldtr);
598 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
599 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
600 IEM_MC_ADVANCE_RIP();
601 IEM_MC_END();
602 break;
603
604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
605 }
606 }
607 else
608 {
609 IEM_MC_BEGIN(0, 2);
610 IEM_MC_LOCAL(uint16_t, u16Ldtr);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
613 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
614 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
615 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
616 IEM_MC_ADVANCE_RIP();
617 IEM_MC_END();
618 }
619 return VINF_SUCCESS;
620}
621
622
623/** Opcode 0x0f 0x00 /1. */
624FNIEMOPRM_DEF(iemOp_Grp6_str)
625{
626 IEMOP_MNEMONIC(str, "str Rv/Mw");
627 IEMOP_HLP_MIN_286();
628 IEMOP_HLP_NO_REAL_OR_V86_MODE();
629
630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
631 {
632 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
633 switch (pVCpu->iem.s.enmEffOpSize)
634 {
635 case IEMMODE_16BIT:
636 IEM_MC_BEGIN(0, 1);
637 IEM_MC_LOCAL(uint16_t, u16Tr);
638 IEM_MC_FETCH_TR_U16(u16Tr);
639 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
640 IEM_MC_ADVANCE_RIP();
641 IEM_MC_END();
642 break;
643
644 case IEMMODE_32BIT:
645 IEM_MC_BEGIN(0, 1);
646 IEM_MC_LOCAL(uint32_t, u32Tr);
647 IEM_MC_FETCH_TR_U32(u32Tr);
648 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
649 IEM_MC_ADVANCE_RIP();
650 IEM_MC_END();
651 break;
652
653 case IEMMODE_64BIT:
654 IEM_MC_BEGIN(0, 1);
655 IEM_MC_LOCAL(uint64_t, u64Tr);
656 IEM_MC_FETCH_TR_U64(u64Tr);
657 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
658 IEM_MC_ADVANCE_RIP();
659 IEM_MC_END();
660 break;
661
662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
663 }
664 }
665 else
666 {
667 IEM_MC_BEGIN(0, 2);
668 IEM_MC_LOCAL(uint16_t, u16Tr);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
671 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
672 IEM_MC_FETCH_TR_U16(u16Tr);
673 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 }
677 return VINF_SUCCESS;
678}
679
680
681/** Opcode 0x0f 0x00 /2. */
682FNIEMOPRM_DEF(iemOp_Grp6_lldt)
683{
684 IEMOP_MNEMONIC(lldt, "lldt Ew");
685 IEMOP_HLP_MIN_286();
686 IEMOP_HLP_NO_REAL_OR_V86_MODE();
687
688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
689 {
690 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
691 IEM_MC_BEGIN(1, 0);
692 IEM_MC_ARG(uint16_t, u16Sel, 0);
693 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
694 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
695 IEM_MC_END();
696 }
697 else
698 {
699 IEM_MC_BEGIN(1, 1);
700 IEM_MC_ARG(uint16_t, u16Sel, 0);
701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
703 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
704 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
705 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
706 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
707 IEM_MC_END();
708 }
709 return VINF_SUCCESS;
710}
711
712
713/** Opcode 0x0f 0x00 /3. */
714FNIEMOPRM_DEF(iemOp_Grp6_ltr)
715{
716 IEMOP_MNEMONIC(ltr, "ltr Ew");
717 IEMOP_HLP_MIN_286();
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
723 IEM_MC_BEGIN(1, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
726 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
727 IEM_MC_END();
728 }
729 else
730 {
731 IEM_MC_BEGIN(1, 1);
732 IEM_MC_ARG(uint16_t, u16Sel, 0);
733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
737 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
738 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
739 IEM_MC_END();
740 }
741 return VINF_SUCCESS;
742}
743
744
745/** Opcode 0x0f 0x00 /3. */
746FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
747{
748 IEMOP_HLP_MIN_286();
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750
751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
752 {
753 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
754 IEM_MC_BEGIN(2, 0);
755 IEM_MC_ARG(uint16_t, u16Sel, 0);
756 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
757 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
758 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
759 IEM_MC_END();
760 }
761 else
762 {
763 IEM_MC_BEGIN(2, 1);
764 IEM_MC_ARG(uint16_t, u16Sel, 0);
765 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
768 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
769 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
770 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
771 IEM_MC_END();
772 }
773 return VINF_SUCCESS;
774}
775
776
777/** Opcode 0x0f 0x00 /4. */
778FNIEMOPRM_DEF(iemOp_Grp6_verr)
779{
780 IEMOP_MNEMONIC(verr, "verr Ew");
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
783}
784
785
786/** Opcode 0x0f 0x00 /5. */
787FNIEMOPRM_DEF(iemOp_Grp6_verw)
788{
789 IEMOP_MNEMONIC(verw, "verw Ew");
790 IEMOP_HLP_MIN_286();
791 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
792}
793
794
795/**
796 * Group 6 jump table.
797 */
798IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
799{
800 iemOp_Grp6_sldt,
801 iemOp_Grp6_str,
802 iemOp_Grp6_lldt,
803 iemOp_Grp6_ltr,
804 iemOp_Grp6_verr,
805 iemOp_Grp6_verw,
806 iemOp_InvalidWithRM,
807 iemOp_InvalidWithRM
808};
809
810/** Opcode 0x0f 0x00. */
811FNIEMOP_DEF(iemOp_Grp6)
812{
813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
814 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
815}
816
817
818/** Opcode 0x0f 0x01 /0. */
819FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
820{
821 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
822 IEMOP_HLP_MIN_286();
823 IEMOP_HLP_64BIT_OP_SIZE();
824 IEM_MC_BEGIN(2, 1);
825 IEM_MC_ARG(uint8_t, iEffSeg, 0);
826 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
829 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
830 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
831 IEM_MC_END();
832 return VINF_SUCCESS;
833}
834
835
836/** Opcode 0x0f 0x01 /0. */
837FNIEMOP_DEF(iemOp_Grp7_vmcall)
838{
839 IEMOP_BITCH_ABOUT_STUB();
840 return IEMOP_RAISE_INVALID_OPCODE();
841}
842
843
844/** Opcode 0x0f 0x01 /0. */
845FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
846{
847 IEMOP_BITCH_ABOUT_STUB();
848 return IEMOP_RAISE_INVALID_OPCODE();
849}
850
851
852/** Opcode 0x0f 0x01 /0. */
853FNIEMOP_DEF(iemOp_Grp7_vmresume)
854{
855 IEMOP_BITCH_ABOUT_STUB();
856 return IEMOP_RAISE_INVALID_OPCODE();
857}
858
859
860/** Opcode 0x0f 0x01 /0. */
861FNIEMOP_DEF(iemOp_Grp7_vmxoff)
862{
863 IEMOP_BITCH_ABOUT_STUB();
864 return IEMOP_RAISE_INVALID_OPCODE();
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC(sidt, "sidt Ms");
872 IEMOP_HLP_MIN_286();
873 IEMOP_HLP_64BIT_OP_SIZE();
874 IEM_MC_BEGIN(2, 1);
875 IEM_MC_ARG(uint8_t, iEffSeg, 0);
876 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
880 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 /1. */
887FNIEMOP_DEF(iemOp_Grp7_monitor)
888{
889 IEMOP_MNEMONIC(monitor, "monitor");
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
891 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
892}
893
894
895/** Opcode 0x0f 0x01 /1. */
896FNIEMOP_DEF(iemOp_Grp7_mwait)
897{
898 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
901}
902
903
904/** Opcode 0x0f 0x01 /2. */
905FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
906{
907 IEMOP_MNEMONIC(lgdt, "lgdt");
908 IEMOP_HLP_64BIT_OP_SIZE();
909 IEM_MC_BEGIN(3, 1);
910 IEM_MC_ARG(uint8_t, iEffSeg, 0);
911 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
912 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
915 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
916 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
917 IEM_MC_END();
918 return VINF_SUCCESS;
919}
920
921
922/** Opcode 0x0f 0x01 0xd0. */
923FNIEMOP_DEF(iemOp_Grp7_xgetbv)
924{
925 IEMOP_MNEMONIC(xgetbv, "xgetbv");
926 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
927 {
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
929 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
930 }
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 0xd1. */
936FNIEMOP_DEF(iemOp_Grp7_xsetbv)
937{
938 IEMOP_MNEMONIC(xsetbv, "xsetbv");
939 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
940 {
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
942 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
943 }
944 return IEMOP_RAISE_INVALID_OPCODE();
945}
946
947
948/** Opcode 0x0f 0x01 /3. */
949FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
950{
951 IEMOP_MNEMONIC(lidt, "lidt");
952 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
953 ? IEMMODE_64BIT
954 : pVCpu->iem.s.enmEffOpSize;
955 IEM_MC_BEGIN(3, 1);
956 IEM_MC_ARG(uint8_t, iEffSeg, 0);
957 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
958 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
961 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
962 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x01 0xd8. */
969FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
970
971/** Opcode 0x0f 0x01 0xd9. */
972FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
973
974/** Opcode 0x0f 0x01 0xda. */
975FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
976
977/** Opcode 0x0f 0x01 0xdb. */
978FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
979
980/** Opcode 0x0f 0x01 0xdc. */
981FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
982
983/** Opcode 0x0f 0x01 0xdd. */
984FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
985
986/** Opcode 0x0f 0x01 0xde. */
987FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
988
989/** Opcode 0x0f 0x01 0xdf. */
990FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
991
992/** Opcode 0x0f 0x01 /4. */
993FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
994{
995 IEMOP_MNEMONIC(smsw, "smsw");
996 IEMOP_HLP_MIN_286();
997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
998 {
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 switch (pVCpu->iem.s.enmEffOpSize)
1001 {
1002 case IEMMODE_16BIT:
1003 IEM_MC_BEGIN(0, 1);
1004 IEM_MC_LOCAL(uint16_t, u16Tmp);
1005 IEM_MC_FETCH_CR0_U16(u16Tmp);
1006 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1007 { /* likely */ }
1008 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1009 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1010 else
1011 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1012 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1013 IEM_MC_ADVANCE_RIP();
1014 IEM_MC_END();
1015 return VINF_SUCCESS;
1016
1017 case IEMMODE_32BIT:
1018 IEM_MC_BEGIN(0, 1);
1019 IEM_MC_LOCAL(uint32_t, u32Tmp);
1020 IEM_MC_FETCH_CR0_U32(u32Tmp);
1021 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 return VINF_SUCCESS;
1025
1026 case IEMMODE_64BIT:
1027 IEM_MC_BEGIN(0, 1);
1028 IEM_MC_LOCAL(uint64_t, u64Tmp);
1029 IEM_MC_FETCH_CR0_U64(u64Tmp);
1030 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1031 IEM_MC_ADVANCE_RIP();
1032 IEM_MC_END();
1033 return VINF_SUCCESS;
1034
1035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1036 }
1037 }
1038 else
1039 {
1040 /* Ignore operand size here, memory refs are always 16-bit. */
1041 IEM_MC_BEGIN(0, 2);
1042 IEM_MC_LOCAL(uint16_t, u16Tmp);
1043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1046 IEM_MC_FETCH_CR0_U16(u16Tmp);
1047 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1048 { /* likely */ }
1049 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1050 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1051 else
1052 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1053 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1054 IEM_MC_ADVANCE_RIP();
1055 IEM_MC_END();
1056 return VINF_SUCCESS;
1057 }
1058}
1059
1060
1061/** Opcode 0x0f 0x01 /6. */
1062FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1063{
1064 /* The operand size is effectively ignored, all is 16-bit and only the
1065 lower 3-bits are used. */
1066 IEMOP_MNEMONIC(lmsw, "lmsw");
1067 IEMOP_HLP_MIN_286();
1068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1069 {
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_BEGIN(1, 0);
1072 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1073 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1074 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1075 IEM_MC_END();
1076 }
1077 else
1078 {
1079 IEM_MC_BEGIN(1, 1);
1080 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1084 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1085 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1086 IEM_MC_END();
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/** Opcode 0x0f 0x01 /7. */
1093FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1094{
1095 IEMOP_MNEMONIC(invlpg, "invlpg");
1096 IEMOP_HLP_MIN_486();
1097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1098 IEM_MC_BEGIN(1, 1);
1099 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1101 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1102 IEM_MC_END();
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/** Opcode 0x0f 0x01 /7. */
1108FNIEMOP_DEF(iemOp_Grp7_swapgs)
1109{
1110 IEMOP_MNEMONIC(swapgs, "swapgs");
1111 IEMOP_HLP_ONLY_64BIT();
1112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1113 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1114}
1115
1116
1117/** Opcode 0x0f 0x01 /7. */
1118FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1119{
1120 NOREF(pVCpu);
1121 IEMOP_BITCH_ABOUT_STUB();
1122 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1123}
1124
1125
1126/** Opcode 0x0f 0x01. */
1127FNIEMOP_DEF(iemOp_Grp7)
1128{
1129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1130 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1131 {
1132 case 0:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1138 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1139 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1140 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1141 }
1142 return IEMOP_RAISE_INVALID_OPCODE();
1143
1144 case 1:
1145 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1146 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1147 switch (bRm & X86_MODRM_RM_MASK)
1148 {
1149 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1150 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1151 }
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 2:
1155 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1156 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1157 switch (bRm & X86_MODRM_RM_MASK)
1158 {
1159 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1160 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1161 }
1162 return IEMOP_RAISE_INVALID_OPCODE();
1163
1164 case 3:
1165 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1166 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1167 switch (bRm & X86_MODRM_RM_MASK)
1168 {
1169 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1170 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1171 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1172 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1173 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1174 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1175 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1176 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1178 }
1179
1180 case 4:
1181 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1182
1183 case 5:
1184 return IEMOP_RAISE_INVALID_OPCODE();
1185
1186 case 6:
1187 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1188
1189 case 7:
1190 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1191 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1192 switch (bRm & X86_MODRM_RM_MASK)
1193 {
1194 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1195 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1196 }
1197 return IEMOP_RAISE_INVALID_OPCODE();
1198
1199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1200 }
1201}
1202
1203/** Opcode 0x0f 0x00 /3. */
1204FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1205{
1206 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1208
1209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1210 {
1211 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1212 switch (pVCpu->iem.s.enmEffOpSize)
1213 {
1214 case IEMMODE_16BIT:
1215 {
1216 IEM_MC_BEGIN(3, 0);
1217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1218 IEM_MC_ARG(uint16_t, u16Sel, 1);
1219 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1220
1221 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1222 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1223 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1224
1225 IEM_MC_END();
1226 return VINF_SUCCESS;
1227 }
1228
1229 case IEMMODE_32BIT:
1230 case IEMMODE_64BIT:
1231 {
1232 IEM_MC_BEGIN(3, 0);
1233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1234 IEM_MC_ARG(uint16_t, u16Sel, 1);
1235 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1236
1237 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1238 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1239 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1246 }
1247 }
1248 else
1249 {
1250 switch (pVCpu->iem.s.enmEffOpSize)
1251 {
1252 case IEMMODE_16BIT:
1253 {
1254 IEM_MC_BEGIN(3, 1);
1255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1256 IEM_MC_ARG(uint16_t, u16Sel, 1);
1257 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259
1260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1261 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1262
1263 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1264 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1265 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1266
1267 IEM_MC_END();
1268 return VINF_SUCCESS;
1269 }
1270
1271 case IEMMODE_32BIT:
1272 case IEMMODE_64BIT:
1273 {
1274 IEM_MC_BEGIN(3, 1);
1275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1276 IEM_MC_ARG(uint16_t, u16Sel, 1);
1277 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279
1280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1281 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1282/** @todo testcase: make sure it's a 16-bit read. */
1283
1284 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1286 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1287
1288 IEM_MC_END();
1289 return VINF_SUCCESS;
1290 }
1291
1292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1293 }
1294 }
1295}
1296
1297
1298
1299/** Opcode 0x0f 0x02. */
1300FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1301{
1302 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1303 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1304}
1305
1306
1307/** Opcode 0x0f 0x03. */
1308FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1309{
1310 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1311 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1312}
1313
1314
1315/** Opcode 0x0f 0x05. */
1316FNIEMOP_DEF(iemOp_syscall)
1317{
1318 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1321}
1322
1323
1324/** Opcode 0x0f 0x06. */
1325FNIEMOP_DEF(iemOp_clts)
1326{
1327 IEMOP_MNEMONIC(clts, "clts");
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1330}
1331
1332
1333/** Opcode 0x0f 0x07. */
1334FNIEMOP_DEF(iemOp_sysret)
1335{
1336 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1339}
1340
1341
1342/** Opcode 0x0f 0x08. */
1343FNIEMOP_STUB(iemOp_invd);
1344// IEMOP_HLP_MIN_486();
1345
1346
1347/** Opcode 0x0f 0x09. */
1348FNIEMOP_DEF(iemOp_wbinvd)
1349{
1350 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1351 IEMOP_HLP_MIN_486();
1352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1353 IEM_MC_BEGIN(0, 0);
1354 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1355 IEM_MC_ADVANCE_RIP();
1356 IEM_MC_END();
1357 return VINF_SUCCESS; /* ignore for now */
1358}
1359
1360
1361/** Opcode 0x0f 0x0b. */
1362FNIEMOP_DEF(iemOp_ud2)
1363{
1364 IEMOP_MNEMONIC(ud2, "ud2");
1365 return IEMOP_RAISE_INVALID_OPCODE();
1366}
1367
1368/** Opcode 0x0f 0x0d. */
1369FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1370{
1371 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1372 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1373 {
1374 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1375 return IEMOP_RAISE_INVALID_OPCODE();
1376 }
1377
1378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1380 {
1381 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383 }
1384
1385 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1386 {
1387 case 2: /* Aliased to /0 for the time being. */
1388 case 4: /* Aliased to /0 for the time being. */
1389 case 5: /* Aliased to /0 for the time being. */
1390 case 6: /* Aliased to /0 for the time being. */
1391 case 7: /* Aliased to /0 for the time being. */
1392 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1393 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1394 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1396 }
1397
1398 IEM_MC_BEGIN(0, 1);
1399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1402 /* Currently a NOP. */
1403 NOREF(GCPtrEffSrc);
1404 IEM_MC_ADVANCE_RIP();
1405 IEM_MC_END();
1406 return VINF_SUCCESS;
1407}
1408
1409
1410/** Opcode 0x0f 0x0e. */
1411FNIEMOP_STUB(iemOp_femms);
1412
1413
1414/** Opcode 0x0f 0x0f 0x0c. */
1415FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1416
1417/** Opcode 0x0f 0x0f 0x0d. */
1418FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1419
1420/** Opcode 0x0f 0x0f 0x1c. */
1421FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1422
1423/** Opcode 0x0f 0x0f 0x1d. */
1424FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1425
1426/** Opcode 0x0f 0x0f 0x8a. */
1427FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1428
1429/** Opcode 0x0f 0x0f 0x8e. */
1430FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1431
1432/** Opcode 0x0f 0x0f 0x90. */
1433FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1434
1435/** Opcode 0x0f 0x0f 0x94. */
1436FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1437
1438/** Opcode 0x0f 0x0f 0x96. */
1439FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1440
1441/** Opcode 0x0f 0x0f 0x97. */
1442FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1443
1444/** Opcode 0x0f 0x0f 0x9a. */
1445FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1446
1447/** Opcode 0x0f 0x0f 0x9e. */
1448FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1449
1450/** Opcode 0x0f 0x0f 0xa0. */
1451FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1452
1453/** Opcode 0x0f 0x0f 0xa4. */
1454FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1455
1456/** Opcode 0x0f 0x0f 0xa6. */
1457FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1458
1459/** Opcode 0x0f 0x0f 0xa7. */
1460FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1461
1462/** Opcode 0x0f 0x0f 0xaa. */
1463FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1464
1465/** Opcode 0x0f 0x0f 0xae. */
1466FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1467
1468/** Opcode 0x0f 0x0f 0xb0. */
1469FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1470
1471/** Opcode 0x0f 0x0f 0xb4. */
1472FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1473
1474/** Opcode 0x0f 0x0f 0xb6. */
1475FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1476
1477/** Opcode 0x0f 0x0f 0xb7. */
1478FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1479
1480/** Opcode 0x0f 0x0f 0xbb. */
1481FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1482
1483/** Opcode 0x0f 0x0f 0xbf. */
1484FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1485
1486
1487/** Opcode 0x0f 0x0f. */
1488FNIEMOP_DEF(iemOp_3Dnow)
1489{
1490 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1491 {
1492 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1493 return IEMOP_RAISE_INVALID_OPCODE();
1494 }
1495
1496 /* This is pretty sparse, use switch instead of table. */
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 switch (b)
1499 {
1500 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1501 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1502 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1503 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1504 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1505 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1506 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1507 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1508 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1509 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1510 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1511 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1512 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1513 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1514 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1515 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1516 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1517 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1518 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1519 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1520 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1521 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1522 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1523 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1524 default:
1525 return IEMOP_RAISE_INVALID_OPCODE();
1526 }
1527}
1528
1529
1530/** Opcode 0x0f 0x10. */
1531FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1532
1533
1534/** Opcode 0x0f 0x11. */
1535FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1536{
1537 /* Quick hack. Need to restructure all of this later some time. */
1538 uint32_t const fRelevantPrefix = pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ);
1539 if (fRelevantPrefix == 0)
1540 {
1541 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1544 {
1545 /*
1546 * Register, register.
1547 */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_BEGIN(0, 0);
1550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1552 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1553 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 }
1557 else
1558 {
1559 /*
1560 * Memory, register.
1561 */
1562 IEM_MC_BEGIN(0, 2);
1563 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1565
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1568 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1570
1571 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1572 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1573
1574 IEM_MC_ADVANCE_RIP();
1575 IEM_MC_END();
1576 }
1577 }
1578 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ)
1579 {
1580 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1583 {
1584 /*
1585 * Register, register.
1586 */
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1588 IEM_MC_BEGIN(0, 1);
1589 IEM_MC_LOCAL(uint64_t, uSrc);
1590
1591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1593 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1594 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1595
1596 IEM_MC_ADVANCE_RIP();
1597 IEM_MC_END();
1598 }
1599 else
1600 {
1601 /*
1602 * Memory, register.
1603 */
1604 IEM_MC_BEGIN(0, 2);
1605 IEM_MC_LOCAL(uint64_t, uSrc);
1606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1607
1608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1610 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1611 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1612
1613 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 }
1620 else
1621 {
1622 IEMOP_BITCH_ABOUT_STUB();
1623 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1624 }
1625 return VINF_SUCCESS;
1626}
1627
1628
1629/** Opcode 0x0f 0x12. */
1630FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1631
1632
1633/** Opcode 0x0f 0x13. */
1634FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1635{
1636 /* Quick hack. Need to restructure all of this later some time. */
1637 if (pVCpu->iem.s.fPrefixes == IEM_OP_PRF_SIZE_OP)
1638 {
1639 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1642 {
1643#if 0
1644 /*
1645 * Register, register.
1646 */
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1648 IEM_MC_BEGIN(0, 1);
1649 IEM_MC_LOCAL(uint64_t, uSrc);
1650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1652 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1653 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1654 IEM_MC_ADVANCE_RIP();
1655 IEM_MC_END();
1656#else
1657 return IEMOP_RAISE_INVALID_OPCODE();
1658#endif
1659 }
1660 else
1661 {
1662 /*
1663 * Memory, register.
1664 */
1665 IEM_MC_BEGIN(0, 2);
1666 IEM_MC_LOCAL(uint64_t, uSrc);
1667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1668
1669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1670 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1671 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1673
1674 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1675 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1676
1677 IEM_MC_ADVANCE_RIP();
1678 IEM_MC_END();
1679 }
1680 return VINF_SUCCESS;
1681 }
1682
1683 IEMOP_BITCH_ABOUT_STUB();
1684 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1685}
1686
1687
1688/** Opcode 0x0f 0x14. */
1689FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1690/** Opcode 0x0f 0x15. */
1691FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1692/** Opcode 0x0f 0x16. */
1693FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1694/** Opcode 0x0f 0x17. */
1695FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1696
1697
1698/** Opcode 0x0f 0x18. */
1699FNIEMOP_DEF(iemOp_prefetch_Grp16)
1700{
1701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1702 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1703 {
1704 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1705 {
1706 case 4: /* Aliased to /0 for the time being according to AMD. */
1707 case 5: /* Aliased to /0 for the time being according to AMD. */
1708 case 6: /* Aliased to /0 for the time being according to AMD. */
1709 case 7: /* Aliased to /0 for the time being according to AMD. */
1710 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1711 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1712 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1713 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1715 }
1716
1717 IEM_MC_BEGIN(0, 1);
1718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1721 /* Currently a NOP. */
1722 NOREF(GCPtrEffSrc);
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 return VINF_SUCCESS;
1726 }
1727
1728 return IEMOP_RAISE_INVALID_OPCODE();
1729}
1730
1731
1732/** Opcode 0x0f 0x19..0x1f. */
1733FNIEMOP_DEF(iemOp_nop_Ev)
1734{
1735 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1738 {
1739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740 IEM_MC_BEGIN(0, 0);
1741 IEM_MC_ADVANCE_RIP();
1742 IEM_MC_END();
1743 }
1744 else
1745 {
1746 IEM_MC_BEGIN(0, 1);
1747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 /* Currently a NOP. */
1751 NOREF(GCPtrEffSrc);
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756}
1757
1758
1759/** Opcode 0x0f 0x20. */
1760FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1761{
1762 /* mod is ignored, as is operand size overrides. */
1763 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1764 IEMOP_HLP_MIN_386();
1765 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1766 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1767 else
1768 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1769
1770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1771 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1773 {
1774 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1776 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1777 iCrReg |= 8;
1778 }
1779 switch (iCrReg)
1780 {
1781 case 0: case 2: case 3: case 4: case 8:
1782 break;
1783 default:
1784 return IEMOP_RAISE_INVALID_OPCODE();
1785 }
1786 IEMOP_HLP_DONE_DECODING();
1787
1788 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1789}
1790
1791
1792/** Opcode 0x0f 0x21. */
1793FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1794{
1795 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1796 IEMOP_HLP_MIN_386();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1800 return IEMOP_RAISE_INVALID_OPCODE();
1801 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1802 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1803 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1804}
1805
1806
1807/** Opcode 0x0f 0x22. */
1808FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1809{
1810 /* mod is ignored, as is operand size overrides. */
1811 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1812 IEMOP_HLP_MIN_386();
1813 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1814 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1815 else
1816 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1817
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1821 {
1822 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1823 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1824 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1825 iCrReg |= 8;
1826 }
1827 switch (iCrReg)
1828 {
1829 case 0: case 2: case 3: case 4: case 8:
1830 break;
1831 default:
1832 return IEMOP_RAISE_INVALID_OPCODE();
1833 }
1834 IEMOP_HLP_DONE_DECODING();
1835
1836 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1837}
1838
1839
1840/** Opcode 0x0f 0x23. */
1841FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1842{
1843 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1844 IEMOP_HLP_MIN_386();
1845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1848 return IEMOP_RAISE_INVALID_OPCODE();
1849 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1850 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1851 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1852}
1853
1854
1855/** Opcode 0x0f 0x24. */
1856FNIEMOP_DEF(iemOp_mov_Rd_Td)
1857{
1858 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1859 /** @todo works on 386 and 486. */
1860 /* The RM byte is not considered, see testcase. */
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862}
1863
1864
1865/** Opcode 0x0f 0x26. */
1866FNIEMOP_DEF(iemOp_mov_Td_Rd)
1867{
1868 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1869 /** @todo works on 386 and 486. */
1870 /* The RM byte is not considered, see testcase. */
1871 return IEMOP_RAISE_INVALID_OPCODE();
1872}
1873
1874
1875/** Opcode 0x0f 0x28. */
1876FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1877{
1878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1879 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1880 else
1881 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1884 {
1885 /*
1886 * Register, register.
1887 */
1888 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1889 IEM_MC_BEGIN(0, 0);
1890 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1892 else
1893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1895 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1896 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1897 IEM_MC_ADVANCE_RIP();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 /*
1903 * Register, memory.
1904 */
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1911 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1912 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1913 else
1914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1916
1917 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1918 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1919
1920 IEM_MC_ADVANCE_RIP();
1921 IEM_MC_END();
1922 }
1923 return VINF_SUCCESS;
1924}
1925
1926
1927/** Opcode 0x0f 0x29. */
1928FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1929{
1930 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1931 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1932 else
1933 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1936 {
1937 /*
1938 * Register, register.
1939 */
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1941 IEM_MC_BEGIN(0, 0);
1942 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1944 else
1945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1946 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1947 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1948 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1949 IEM_MC_ADVANCE_RIP();
1950 IEM_MC_END();
1951 }
1952 else
1953 {
1954 /*
1955 * Memory, register.
1956 */
1957 IEM_MC_BEGIN(0, 2);
1958 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1960
1961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1964 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1965 else
1966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1968
1969 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1970 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1971
1972 IEM_MC_ADVANCE_RIP();
1973 IEM_MC_END();
1974 }
1975 return VINF_SUCCESS;
1976}
1977
1978
1979/** Opcode 0x0f 0x2a. */
1980FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1981
1982
1983/** Opcode 0x0f 0x2b. */
1984FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1985{
1986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1987 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1988 else
1989 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1991 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1992 {
1993 /*
1994 * memory, register.
1995 */
1996 IEM_MC_BEGIN(0, 2);
1997 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1999
2000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2002 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2003 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2004 else
2005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2007
2008 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 /* The register, register encoding is invalid. */
2015 else
2016 return IEMOP_RAISE_INVALID_OPCODE();
2017 return VINF_SUCCESS;
2018}
2019
2020
2021/** Opcode 0x0f 0x2c. */
2022FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
2023/** Opcode 0x0f 0x2d. */
2024FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
2025/** Opcode 0x0f 0x2e. */
2026FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
2027/** Opcode 0x0f 0x2f. */
2028FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
2029
2030
2031/** Opcode 0x0f 0x30. */
2032FNIEMOP_DEF(iemOp_wrmsr)
2033{
2034 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2037}
2038
2039
2040/** Opcode 0x0f 0x31. */
2041FNIEMOP_DEF(iemOp_rdtsc)
2042{
2043 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2046}
2047
2048
2049/** Opcode 0x0f 0x33. */
2050FNIEMOP_DEF(iemOp_rdmsr)
2051{
2052 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2055}
2056
2057
2058/** Opcode 0x0f 0x34. */
2059FNIEMOP_STUB(iemOp_rdpmc);
2060/** Opcode 0x0f 0x34. */
2061FNIEMOP_STUB(iemOp_sysenter);
2062/** Opcode 0x0f 0x35. */
2063FNIEMOP_STUB(iemOp_sysexit);
2064/** Opcode 0x0f 0x37. */
2065FNIEMOP_STUB(iemOp_getsec);
2066/** Opcode 0x0f 0x38. */
2067FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2068/** Opcode 0x0f 0x3a. */
2069FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2070
2071
2072/**
2073 * Implements a conditional move.
2074 *
2075 * Wish there was an obvious way to do this where we could share and reduce
2076 * code bloat.
2077 *
2078 * @param a_Cnd The conditional "microcode" operation.
2079 */
2080#define CMOV_X(a_Cnd) \
2081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2083 { \
2084 switch (pVCpu->iem.s.enmEffOpSize) \
2085 { \
2086 case IEMMODE_16BIT: \
2087 IEM_MC_BEGIN(0, 1); \
2088 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2089 a_Cnd { \
2090 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2091 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2092 } IEM_MC_ENDIF(); \
2093 IEM_MC_ADVANCE_RIP(); \
2094 IEM_MC_END(); \
2095 return VINF_SUCCESS; \
2096 \
2097 case IEMMODE_32BIT: \
2098 IEM_MC_BEGIN(0, 1); \
2099 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2100 a_Cnd { \
2101 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2102 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2103 } IEM_MC_ELSE() { \
2104 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2105 } IEM_MC_ENDIF(); \
2106 IEM_MC_ADVANCE_RIP(); \
2107 IEM_MC_END(); \
2108 return VINF_SUCCESS; \
2109 \
2110 case IEMMODE_64BIT: \
2111 IEM_MC_BEGIN(0, 1); \
2112 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2113 a_Cnd { \
2114 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2115 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2116 } IEM_MC_ENDIF(); \
2117 IEM_MC_ADVANCE_RIP(); \
2118 IEM_MC_END(); \
2119 return VINF_SUCCESS; \
2120 \
2121 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2122 } \
2123 } \
2124 else \
2125 { \
2126 switch (pVCpu->iem.s.enmEffOpSize) \
2127 { \
2128 case IEMMODE_16BIT: \
2129 IEM_MC_BEGIN(0, 2); \
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2131 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2133 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2134 a_Cnd { \
2135 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2136 } IEM_MC_ENDIF(); \
2137 IEM_MC_ADVANCE_RIP(); \
2138 IEM_MC_END(); \
2139 return VINF_SUCCESS; \
2140 \
2141 case IEMMODE_32BIT: \
2142 IEM_MC_BEGIN(0, 2); \
2143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2144 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2146 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2147 a_Cnd { \
2148 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2149 } IEM_MC_ELSE() { \
2150 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2151 } IEM_MC_ENDIF(); \
2152 IEM_MC_ADVANCE_RIP(); \
2153 IEM_MC_END(); \
2154 return VINF_SUCCESS; \
2155 \
2156 case IEMMODE_64BIT: \
2157 IEM_MC_BEGIN(0, 2); \
2158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2159 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2161 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2162 a_Cnd { \
2163 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2164 } IEM_MC_ENDIF(); \
2165 IEM_MC_ADVANCE_RIP(); \
2166 IEM_MC_END(); \
2167 return VINF_SUCCESS; \
2168 \
2169 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2170 } \
2171 } do {} while (0)
2172
2173
2174
2175/** Opcode 0x0f 0x40. */
2176FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2177{
2178 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2179 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2180}
2181
2182
2183/** Opcode 0x0f 0x41. */
2184FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2185{
2186 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2187 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2188}
2189
2190
2191/** Opcode 0x0f 0x42. */
2192FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2193{
2194 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2195 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2196}
2197
2198
2199/** Opcode 0x0f 0x43. */
2200FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2201{
2202 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2203 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2204}
2205
2206
2207/** Opcode 0x0f 0x44. */
2208FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2209{
2210 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2211 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2212}
2213
2214
2215/** Opcode 0x0f 0x45. */
2216FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2217{
2218 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2219 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2220}
2221
2222
2223/** Opcode 0x0f 0x46. */
2224FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2225{
2226 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2227 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2228}
2229
2230
2231/** Opcode 0x0f 0x47. */
2232FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2233{
2234 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2235 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2236}
2237
2238
2239/** Opcode 0x0f 0x48. */
2240FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2241{
2242 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2243 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2244}
2245
2246
2247/** Opcode 0x0f 0x49. */
2248FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2249{
2250 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2251 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2252}
2253
2254
2255/** Opcode 0x0f 0x4a. */
2256FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2257{
2258 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2259 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2260}
2261
2262
2263/** Opcode 0x0f 0x4b. */
2264FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2265{
2266 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2267 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2268}
2269
2270
2271/** Opcode 0x0f 0x4c. */
2272FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2273{
2274 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2275 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2276}
2277
2278
2279/** Opcode 0x0f 0x4d. */
2280FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2281{
2282 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2283 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2284}
2285
2286
2287/** Opcode 0x0f 0x4e. */
2288FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2289{
2290 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2291 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2292}
2293
2294
2295/** Opcode 0x0f 0x4f. */
2296FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2297{
2298 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2299 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2300}
2301
2302#undef CMOV_X
2303
2304/** Opcode 0x0f 0x50. */
2305FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2306/** Opcode 0x0f 0x51. */
2307FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2308/** Opcode 0x0f 0x52. */
2309FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2310/** Opcode 0x0f 0x53. */
2311FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2312/** Opcode 0x0f 0x54. */
2313FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2314/** Opcode 0x0f 0x55. */
2315FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2316/** Opcode 0x0f 0x56. */
2317FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2318/** Opcode 0x0f 0x57. */
2319FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2320/** Opcode 0x0f 0x58. */
2321FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2322/** Opcode 0x0f 0x59. */
2323FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2324/** Opcode 0x0f 0x5a. */
2325FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2326/** Opcode 0x0f 0x5b. */
2327FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2328/** Opcode 0x0f 0x5c. */
2329FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2330/** Opcode 0x0f 0x5d. */
2331FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2332/** Opcode 0x0f 0x5e. */
2333FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2334/** Opcode 0x0f 0x5f. */
2335FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2336
2337
2338/**
2339 * Common worker for SSE2 and MMX instructions on the forms:
2340 * pxxxx xmm1, xmm2/mem128
2341 * pxxxx mm1, mm2/mem32
2342 *
2343 * The 2nd operand is the first half of a register, which in the memory case
2344 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2345 * memory accessed for MMX.
2346 *
2347 * Exceptions type 4.
2348 */
2349FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2350{
2351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2352 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2353 {
2354 case IEM_OP_PRF_SIZE_OP: /* SSE */
2355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2356 {
2357 /*
2358 * Register, register.
2359 */
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_BEGIN(2, 0);
2362 IEM_MC_ARG(uint128_t *, pDst, 0);
2363 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_PREPARE_SSE_USAGE();
2366 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2367 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2368 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 else
2373 {
2374 /*
2375 * Register, memory.
2376 */
2377 IEM_MC_BEGIN(2, 2);
2378 IEM_MC_ARG(uint128_t *, pDst, 0);
2379 IEM_MC_LOCAL(uint64_t, uSrc);
2380 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2386 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2387
2388 IEM_MC_PREPARE_SSE_USAGE();
2389 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2390 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2391
2392 IEM_MC_ADVANCE_RIP();
2393 IEM_MC_END();
2394 }
2395 return VINF_SUCCESS;
2396
2397 case 0: /* MMX */
2398 if (!pImpl->pfnU64)
2399 return IEMOP_RAISE_INVALID_OPCODE();
2400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2401 {
2402 /*
2403 * Register, register.
2404 */
2405 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2406 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2408 IEM_MC_BEGIN(2, 0);
2409 IEM_MC_ARG(uint64_t *, pDst, 0);
2410 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2411 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2412 IEM_MC_PREPARE_FPU_USAGE();
2413 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2414 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2415 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2416 IEM_MC_ADVANCE_RIP();
2417 IEM_MC_END();
2418 }
2419 else
2420 {
2421 /*
2422 * Register, memory.
2423 */
2424 IEM_MC_BEGIN(2, 2);
2425 IEM_MC_ARG(uint64_t *, pDst, 0);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2429
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2432 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2433 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2434
2435 IEM_MC_PREPARE_FPU_USAGE();
2436 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2437 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2438
2439 IEM_MC_ADVANCE_RIP();
2440 IEM_MC_END();
2441 }
2442 return VINF_SUCCESS;
2443
2444 default:
2445 return IEMOP_RAISE_INVALID_OPCODE();
2446 }
2447}
2448
2449
2450/** Opcode 0x0f 0x60. */
2451FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2452{
2453 IEMOP_MNEMONIC(punpcklbw, "punpcklbw");
2454 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2455}
2456
2457
2458/** Opcode 0x0f 0x61. */
2459FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2460{
2461 IEMOP_MNEMONIC(punpcklwd, "punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2462 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2463}
2464
2465
2466/** Opcode 0x0f 0x62. */
2467FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2468{
2469 IEMOP_MNEMONIC(punpckldq, "punpckldq");
2470 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2471}
2472
2473
2474/** Opcode 0x0f 0x63. */
2475FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2476/** Opcode 0x0f 0x64. */
2477FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2478/** Opcode 0x0f 0x65. */
2479FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2480/** Opcode 0x0f 0x66. */
2481FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2482/** Opcode 0x0f 0x67. */
2483FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2484
2485
2486/**
2487 * Common worker for SSE2 and MMX instructions on the forms:
2488 * pxxxx xmm1, xmm2/mem128
2489 * pxxxx mm1, mm2/mem64
2490 *
2491 * The 2nd operand is the second half of a register, which in the memory case
2492 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2493 * where it may read the full 128 bits or only the upper 64 bits.
2494 *
2495 * Exceptions type 4.
2496 */
2497FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2498{
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2501 {
2502 case IEM_OP_PRF_SIZE_OP: /* SSE */
2503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2504 {
2505 /*
2506 * Register, register.
2507 */
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_BEGIN(2, 0);
2510 IEM_MC_ARG(uint128_t *, pDst, 0);
2511 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2512 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2513 IEM_MC_PREPARE_SSE_USAGE();
2514 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2515 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2516 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2517 IEM_MC_ADVANCE_RIP();
2518 IEM_MC_END();
2519 }
2520 else
2521 {
2522 /*
2523 * Register, memory.
2524 */
2525 IEM_MC_BEGIN(2, 2);
2526 IEM_MC_ARG(uint128_t *, pDst, 0);
2527 IEM_MC_LOCAL(uint128_t, uSrc);
2528 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2535
2536 IEM_MC_PREPARE_SSE_USAGE();
2537 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2538 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2539
2540 IEM_MC_ADVANCE_RIP();
2541 IEM_MC_END();
2542 }
2543 return VINF_SUCCESS;
2544
2545 case 0: /* MMX */
2546 if (!pImpl->pfnU64)
2547 return IEMOP_RAISE_INVALID_OPCODE();
2548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2549 {
2550 /*
2551 * Register, register.
2552 */
2553 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2554 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2556 IEM_MC_BEGIN(2, 0);
2557 IEM_MC_ARG(uint64_t *, pDst, 0);
2558 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2560 IEM_MC_PREPARE_FPU_USAGE();
2561 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2562 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2563 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2564 IEM_MC_ADVANCE_RIP();
2565 IEM_MC_END();
2566 }
2567 else
2568 {
2569 /*
2570 * Register, memory.
2571 */
2572 IEM_MC_BEGIN(2, 2);
2573 IEM_MC_ARG(uint64_t *, pDst, 0);
2574 IEM_MC_LOCAL(uint64_t, uSrc);
2575 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2577
2578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2582
2583 IEM_MC_PREPARE_FPU_USAGE();
2584 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2585 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2586
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 return VINF_SUCCESS;
2591
2592 default:
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594 }
2595}
2596
2597
2598/** Opcode 0x0f 0x68. */
2599FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2600{
2601 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2602 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2603}
2604
2605
2606/** Opcode 0x0f 0x69. */
2607FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2608{
2609 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2610 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2611}
2612
2613
2614/** Opcode 0x0f 0x6a. */
2615FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2616{
2617 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2618 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2619}
2620
2621/** Opcode 0x0f 0x6b. */
2622FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2623
2624
2625/** Opcode 0x0f 0x6c. */
2626FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2627{
2628 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq");
2629 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2630}
2631
2632
2633/** Opcode 0x0f 0x6d. */
2634FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2635{
2636 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2637 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2638}
2639
2640
2641/** Opcode 0x0f 0x6e. */
2642FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2646 {
2647 case IEM_OP_PRF_SIZE_OP: /* SSE */
2648 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2649 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2650 else
2651 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2653 {
2654 /* XMM, greg*/
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_BEGIN(0, 1);
2657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2660 {
2661 IEM_MC_LOCAL(uint64_t, u64Tmp);
2662 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2663 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2664 }
2665 else
2666 {
2667 IEM_MC_LOCAL(uint32_t, u32Tmp);
2668 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2669 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2670 }
2671 IEM_MC_ADVANCE_RIP();
2672 IEM_MC_END();
2673 }
2674 else
2675 {
2676 /* XMM, [mem] */
2677 IEM_MC_BEGIN(0, 2);
2678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2679 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2683 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2684 {
2685 IEM_MC_LOCAL(uint64_t, u64Tmp);
2686 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2687 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2688 }
2689 else
2690 {
2691 IEM_MC_LOCAL(uint32_t, u32Tmp);
2692 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2693 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2694 }
2695 IEM_MC_ADVANCE_RIP();
2696 IEM_MC_END();
2697 }
2698 return VINF_SUCCESS;
2699
2700 case 0: /* MMX */
2701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2702 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2703 else
2704 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2706 {
2707 /* MMX, greg */
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEM_MC_BEGIN(0, 1);
2710 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2712 IEM_MC_LOCAL(uint64_t, u64Tmp);
2713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2714 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2715 else
2716 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2717 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2718 IEM_MC_ADVANCE_RIP();
2719 IEM_MC_END();
2720 }
2721 else
2722 {
2723 /* MMX, [mem] */
2724 IEM_MC_BEGIN(0, 2);
2725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2726 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2730 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2731 {
2732 IEM_MC_LOCAL(uint64_t, u64Tmp);
2733 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2734 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2735 }
2736 else
2737 {
2738 IEM_MC_LOCAL(uint32_t, u32Tmp);
2739 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2740 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2741 }
2742 IEM_MC_ADVANCE_RIP();
2743 IEM_MC_END();
2744 }
2745 return VINF_SUCCESS;
2746
2747 default:
2748 return IEMOP_RAISE_INVALID_OPCODE();
2749 }
2750}
2751
2752
2753/** Opcode 0x0f 0x6f. */
2754FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2755{
2756 bool fAligned = false;
2757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2758 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2759 {
2760 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2761 fAligned = true;
2762 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2763 if (fAligned)
2764 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2765 else
2766 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2768 {
2769 /*
2770 * Register, register.
2771 */
2772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2773 IEM_MC_BEGIN(0, 0);
2774 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2775 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2776 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2777 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 else
2782 {
2783 /*
2784 * Register, memory.
2785 */
2786 IEM_MC_BEGIN(0, 2);
2787 IEM_MC_LOCAL(uint128_t, u128Tmp);
2788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2789
2790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2792 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 if (fAligned)
2795 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2796 else
2797 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2798 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2799
2800 IEM_MC_ADVANCE_RIP();
2801 IEM_MC_END();
2802 }
2803 return VINF_SUCCESS;
2804
2805 case 0: /* MMX */
2806 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2808 {
2809 /*
2810 * Register, register.
2811 */
2812 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2813 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEM_MC_BEGIN(0, 1);
2816 IEM_MC_LOCAL(uint64_t, u64Tmp);
2817 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2819 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2820 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2821 IEM_MC_ADVANCE_RIP();
2822 IEM_MC_END();
2823 }
2824 else
2825 {
2826 /*
2827 * Register, memory.
2828 */
2829 IEM_MC_BEGIN(0, 2);
2830 IEM_MC_LOCAL(uint64_t, u64Tmp);
2831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2832
2833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2835 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2836 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2837 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2838 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2839
2840 IEM_MC_ADVANCE_RIP();
2841 IEM_MC_END();
2842 }
2843 return VINF_SUCCESS;
2844
2845 default:
2846 return IEMOP_RAISE_INVALID_OPCODE();
2847 }
2848}
2849
2850
2851/** Opcode 0x0f 0x70. The immediate here is evil! */
2852FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2853{
2854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2855 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2856 {
2857 case IEM_OP_PRF_SIZE_OP: /* SSE */
2858 case IEM_OP_PRF_REPNZ: /* SSE */
2859 case IEM_OP_PRF_REPZ: /* SSE */
2860 {
2861 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2862 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2863 {
2864 case IEM_OP_PRF_SIZE_OP:
2865 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
2866 pfnAImpl = iemAImpl_pshufd;
2867 break;
2868 case IEM_OP_PRF_REPNZ:
2869 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
2870 pfnAImpl = iemAImpl_pshuflw;
2871 break;
2872 case IEM_OP_PRF_REPZ:
2873 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
2874 pfnAImpl = iemAImpl_pshufhw;
2875 break;
2876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2877 }
2878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2879 {
2880 /*
2881 * Register, register.
2882 */
2883 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2885
2886 IEM_MC_BEGIN(3, 0);
2887 IEM_MC_ARG(uint128_t *, pDst, 0);
2888 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2889 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2891 IEM_MC_PREPARE_SSE_USAGE();
2892 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2893 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2894 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2895 IEM_MC_ADVANCE_RIP();
2896 IEM_MC_END();
2897 }
2898 else
2899 {
2900 /*
2901 * Register, memory.
2902 */
2903 IEM_MC_BEGIN(3, 2);
2904 IEM_MC_ARG(uint128_t *, pDst, 0);
2905 IEM_MC_LOCAL(uint128_t, uSrc);
2906 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2908
2909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2910 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2911 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2914
2915 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2916 IEM_MC_PREPARE_SSE_USAGE();
2917 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2918 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2919
2920 IEM_MC_ADVANCE_RIP();
2921 IEM_MC_END();
2922 }
2923 return VINF_SUCCESS;
2924 }
2925
2926 case 0: /* MMX Extension */
2927 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2929 {
2930 /*
2931 * Register, register.
2932 */
2933 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935
2936 IEM_MC_BEGIN(3, 0);
2937 IEM_MC_ARG(uint64_t *, pDst, 0);
2938 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2939 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2940 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2941 IEM_MC_PREPARE_FPU_USAGE();
2942 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2943 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2944 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2945 IEM_MC_ADVANCE_RIP();
2946 IEM_MC_END();
2947 }
2948 else
2949 {
2950 /*
2951 * Register, memory.
2952 */
2953 IEM_MC_BEGIN(3, 2);
2954 IEM_MC_ARG(uint64_t *, pDst, 0);
2955 IEM_MC_LOCAL(uint64_t, uSrc);
2956 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2958
2959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2960 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2961 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2964
2965 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2966 IEM_MC_PREPARE_FPU_USAGE();
2967 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2968 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2969
2970 IEM_MC_ADVANCE_RIP();
2971 IEM_MC_END();
2972 }
2973 return VINF_SUCCESS;
2974
2975 default:
2976 return IEMOP_RAISE_INVALID_OPCODE();
2977 }
2978}
2979
2980
2981/** Opcode 0x0f 0x71 11/2. */
2982FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2983
2984/** Opcode 0x66 0x0f 0x71 11/2. */
2985FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2986
2987/** Opcode 0x0f 0x71 11/4. */
2988FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2989
2990/** Opcode 0x66 0x0f 0x71 11/4. */
2991FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2992
2993/** Opcode 0x0f 0x71 11/6. */
2994FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2995
2996/** Opcode 0x66 0x0f 0x71 11/6. */
2997FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2998
2999
3000/** Opcode 0x0f 0x71. */
3001FNIEMOP_DEF(iemOp_Grp12)
3002{
3003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3004 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3005 return IEMOP_RAISE_INVALID_OPCODE();
3006 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3007 {
3008 case 0: case 1: case 3: case 5: case 7:
3009 return IEMOP_RAISE_INVALID_OPCODE();
3010 case 2:
3011 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3012 {
3013 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3014 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3015 default: return IEMOP_RAISE_INVALID_OPCODE();
3016 }
3017 case 4:
3018 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3019 {
3020 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3021 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3022 default: return IEMOP_RAISE_INVALID_OPCODE();
3023 }
3024 case 6:
3025 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3026 {
3027 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3028 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3029 default: return IEMOP_RAISE_INVALID_OPCODE();
3030 }
3031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3032 }
3033}
3034
3035
3036/** Opcode 0x0f 0x72 11/2. */
3037FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3038
3039/** Opcode 0x66 0x0f 0x72 11/2. */
3040FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3041
3042/** Opcode 0x0f 0x72 11/4. */
3043FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3044
3045/** Opcode 0x66 0x0f 0x72 11/4. */
3046FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3047
3048/** Opcode 0x0f 0x72 11/6. */
3049FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3050
3051/** Opcode 0x66 0x0f 0x72 11/6. */
3052FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3053
3054
3055/** Opcode 0x0f 0x72. */
3056FNIEMOP_DEF(iemOp_Grp13)
3057{
3058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3059 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3060 return IEMOP_RAISE_INVALID_OPCODE();
3061 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3062 {
3063 case 0: case 1: case 3: case 5: case 7:
3064 return IEMOP_RAISE_INVALID_OPCODE();
3065 case 2:
3066 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3067 {
3068 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3069 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3070 default: return IEMOP_RAISE_INVALID_OPCODE();
3071 }
3072 case 4:
3073 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3074 {
3075 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3076 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3077 default: return IEMOP_RAISE_INVALID_OPCODE();
3078 }
3079 case 6:
3080 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3081 {
3082 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3083 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3084 default: return IEMOP_RAISE_INVALID_OPCODE();
3085 }
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088}
3089
3090
3091/** Opcode 0x0f 0x73 11/2. */
3092FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3093
3094/** Opcode 0x66 0x0f 0x73 11/2. */
3095FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3096
3097/** Opcode 0x66 0x0f 0x73 11/3. */
3098FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3099
3100/** Opcode 0x0f 0x73 11/6. */
3101FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3102
3103/** Opcode 0x66 0x0f 0x73 11/6. */
3104FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3105
3106/** Opcode 0x66 0x0f 0x73 11/7. */
3107FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3108
3109
3110/** Opcode 0x0f 0x73. */
3111FNIEMOP_DEF(iemOp_Grp14)
3112{
3113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3114 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3115 return IEMOP_RAISE_INVALID_OPCODE();
3116 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3117 {
3118 case 0: case 1: case 4: case 5:
3119 return IEMOP_RAISE_INVALID_OPCODE();
3120 case 2:
3121 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3122 {
3123 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3124 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3125 default: return IEMOP_RAISE_INVALID_OPCODE();
3126 }
3127 case 3:
3128 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3129 {
3130 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3131 default: return IEMOP_RAISE_INVALID_OPCODE();
3132 }
3133 case 6:
3134 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3135 {
3136 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3137 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3138 default: return IEMOP_RAISE_INVALID_OPCODE();
3139 }
3140 case 7:
3141 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3142 {
3143 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3144 default: return IEMOP_RAISE_INVALID_OPCODE();
3145 }
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148}
3149
3150
3151/**
3152 * Common worker for SSE2 and MMX instructions on the forms:
3153 * pxxx mm1, mm2/mem64
3154 * pxxx xmm1, xmm2/mem128
3155 *
3156 * Proper alignment of the 128-bit operand is enforced.
3157 * Exceptions type 4. SSE2 and MMX cpuid checks.
3158 */
3159FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3160{
3161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3162 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3163 {
3164 case IEM_OP_PRF_SIZE_OP: /* SSE */
3165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3166 {
3167 /*
3168 * Register, register.
3169 */
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171 IEM_MC_BEGIN(2, 0);
3172 IEM_MC_ARG(uint128_t *, pDst, 0);
3173 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3174 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3175 IEM_MC_PREPARE_SSE_USAGE();
3176 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3177 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3178 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3179 IEM_MC_ADVANCE_RIP();
3180 IEM_MC_END();
3181 }
3182 else
3183 {
3184 /*
3185 * Register, memory.
3186 */
3187 IEM_MC_BEGIN(2, 2);
3188 IEM_MC_ARG(uint128_t *, pDst, 0);
3189 IEM_MC_LOCAL(uint128_t, uSrc);
3190 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3192
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3196 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3197
3198 IEM_MC_PREPARE_SSE_USAGE();
3199 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3200 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3201
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 }
3205 return VINF_SUCCESS;
3206
3207 case 0: /* MMX */
3208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3209 {
3210 /*
3211 * Register, register.
3212 */
3213 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3214 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3216 IEM_MC_BEGIN(2, 0);
3217 IEM_MC_ARG(uint64_t *, pDst, 0);
3218 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3219 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3220 IEM_MC_PREPARE_FPU_USAGE();
3221 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3222 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3223 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3224 IEM_MC_ADVANCE_RIP();
3225 IEM_MC_END();
3226 }
3227 else
3228 {
3229 /*
3230 * Register, memory.
3231 */
3232 IEM_MC_BEGIN(2, 2);
3233 IEM_MC_ARG(uint64_t *, pDst, 0);
3234 IEM_MC_LOCAL(uint64_t, uSrc);
3235 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3241 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3242
3243 IEM_MC_PREPARE_FPU_USAGE();
3244 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3245 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3246
3247 IEM_MC_ADVANCE_RIP();
3248 IEM_MC_END();
3249 }
3250 return VINF_SUCCESS;
3251
3252 default:
3253 return IEMOP_RAISE_INVALID_OPCODE();
3254 }
3255}
3256
3257
3258/** Opcode 0x0f 0x74. */
3259FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3260{
3261 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3262 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3263}
3264
3265
3266/** Opcode 0x0f 0x75. */
3267FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3268{
3269 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3270 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3271}
3272
3273
3274/** Opcode 0x0f 0x76. */
3275FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3276{
3277 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3278 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3279}
3280
3281
3282/** Opcode 0x0f 0x77. */
3283FNIEMOP_STUB(iemOp_emms);
3284/** Opcode 0x0f 0x78. */
3285FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3286/** Opcode 0x0f 0x79. */
3287FNIEMOP_UD_STUB(iemOp_vmwrite);
3288/** Opcode 0x0f 0x7c. */
3289FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3290/** Opcode 0x0f 0x7d. */
3291FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3292
3293
3294/** Opcode 0x0f 0x7e. */
3295FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3296{
3297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3298 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3299 {
3300 case IEM_OP_PRF_SIZE_OP: /* SSE */
3301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3302 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3303 else
3304 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3306 {
3307 /* greg, XMM */
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 IEM_MC_BEGIN(0, 1);
3310 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3312 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3313 {
3314 IEM_MC_LOCAL(uint64_t, u64Tmp);
3315 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3316 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3317 }
3318 else
3319 {
3320 IEM_MC_LOCAL(uint32_t, u32Tmp);
3321 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3322 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3323 }
3324 IEM_MC_ADVANCE_RIP();
3325 IEM_MC_END();
3326 }
3327 else
3328 {
3329 /* [mem], XMM */
3330 IEM_MC_BEGIN(0, 2);
3331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3335 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3337 {
3338 IEM_MC_LOCAL(uint64_t, u64Tmp);
3339 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3340 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3341 }
3342 else
3343 {
3344 IEM_MC_LOCAL(uint32_t, u32Tmp);
3345 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3346 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3347 }
3348 IEM_MC_ADVANCE_RIP();
3349 IEM_MC_END();
3350 }
3351 return VINF_SUCCESS;
3352
3353 case 0: /* MMX */
3354 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3355 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3356 else
3357 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3358 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3359 {
3360 /* greg, MMX */
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_BEGIN(0, 1);
3363 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3365 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3366 {
3367 IEM_MC_LOCAL(uint64_t, u64Tmp);
3368 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3369 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3370 }
3371 else
3372 {
3373 IEM_MC_LOCAL(uint32_t, u32Tmp);
3374 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3375 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3376 }
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 }
3380 else
3381 {
3382 /* [mem], MMX */
3383 IEM_MC_BEGIN(0, 2);
3384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3389 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3390 {
3391 IEM_MC_LOCAL(uint64_t, u64Tmp);
3392 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3393 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3394 }
3395 else
3396 {
3397 IEM_MC_LOCAL(uint32_t, u32Tmp);
3398 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3399 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3400 }
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 return VINF_SUCCESS;
3405
3406 default:
3407 return IEMOP_RAISE_INVALID_OPCODE();
3408 }
3409}
3410
3411
3412/** Opcode 0x0f 0x7f. */
3413FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3414{
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 bool fAligned = false;
3417 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3418 {
3419 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3420 fAligned = true;
3421 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3422 if (fAligned)
3423 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3424 else
3425 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3427 {
3428 /*
3429 * Register, register.
3430 */
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3432 IEM_MC_BEGIN(0, 0);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3436 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3437 IEM_MC_ADVANCE_RIP();
3438 IEM_MC_END();
3439 }
3440 else
3441 {
3442 /*
3443 * Register, memory.
3444 */
3445 IEM_MC_BEGIN(0, 2);
3446 IEM_MC_LOCAL(uint128_t, u128Tmp);
3447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3453
3454 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3455 if (fAligned)
3456 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3457 else
3458 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3459
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 }
3463 return VINF_SUCCESS;
3464
3465 case 0: /* MMX */
3466 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3467
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /*
3471 * Register, register.
3472 */
3473 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3474 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3476 IEM_MC_BEGIN(0, 1);
3477 IEM_MC_LOCAL(uint64_t, u64Tmp);
3478 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3479 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3480 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3481 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /*
3488 * Register, memory.
3489 */
3490 IEM_MC_BEGIN(0, 2);
3491 IEM_MC_LOCAL(uint64_t, u64Tmp);
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3498
3499 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3500 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3501
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 return VINF_SUCCESS;
3506
3507 default:
3508 return IEMOP_RAISE_INVALID_OPCODE();
3509 }
3510}
3511
3512
3513
3514/** Opcode 0x0f 0x80. */
3515FNIEMOP_DEF(iemOp_jo_Jv)
3516{
3517 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3518 IEMOP_HLP_MIN_386();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3520 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3521 {
3522 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524
3525 IEM_MC_BEGIN(0, 0);
3526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3527 IEM_MC_REL_JMP_S16(i16Imm);
3528 } IEM_MC_ELSE() {
3529 IEM_MC_ADVANCE_RIP();
3530 } IEM_MC_ENDIF();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537
3538 IEM_MC_BEGIN(0, 0);
3539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3540 IEM_MC_REL_JMP_S32(i32Imm);
3541 } IEM_MC_ELSE() {
3542 IEM_MC_ADVANCE_RIP();
3543 } IEM_MC_ENDIF();
3544 IEM_MC_END();
3545 }
3546 return VINF_SUCCESS;
3547}
3548
3549
3550/** Opcode 0x0f 0x81. */
3551FNIEMOP_DEF(iemOp_jno_Jv)
3552{
3553 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3554 IEMOP_HLP_MIN_386();
3555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3556 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3557 {
3558 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3563 IEM_MC_ADVANCE_RIP();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S16(i16Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3576 IEM_MC_ADVANCE_RIP();
3577 } IEM_MC_ELSE() {
3578 IEM_MC_REL_JMP_S32(i32Imm);
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581 }
3582 return VINF_SUCCESS;
3583}
3584
3585
3586/** Opcode 0x0f 0x82. */
3587FNIEMOP_DEF(iemOp_jc_Jv)
3588{
3589 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3590 IEMOP_HLP_MIN_386();
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3592 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3593 {
3594 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596
3597 IEM_MC_BEGIN(0, 0);
3598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3599 IEM_MC_REL_JMP_S16(i16Imm);
3600 } IEM_MC_ELSE() {
3601 IEM_MC_ADVANCE_RIP();
3602 } IEM_MC_ENDIF();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3609
3610 IEM_MC_BEGIN(0, 0);
3611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3612 IEM_MC_REL_JMP_S32(i32Imm);
3613 } IEM_MC_ELSE() {
3614 IEM_MC_ADVANCE_RIP();
3615 } IEM_MC_ENDIF();
3616 IEM_MC_END();
3617 }
3618 return VINF_SUCCESS;
3619}
3620
3621
3622/** Opcode 0x0f 0x83. */
3623FNIEMOP_DEF(iemOp_jnc_Jv)
3624{
3625 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3626 IEMOP_HLP_MIN_386();
3627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3628 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3629 {
3630 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0);
3634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3635 IEM_MC_ADVANCE_RIP();
3636 } IEM_MC_ELSE() {
3637 IEM_MC_REL_JMP_S16(i16Imm);
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640 }
3641 else
3642 {
3643 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645
3646 IEM_MC_BEGIN(0, 0);
3647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3648 IEM_MC_ADVANCE_RIP();
3649 } IEM_MC_ELSE() {
3650 IEM_MC_REL_JMP_S32(i32Imm);
3651 } IEM_MC_ENDIF();
3652 IEM_MC_END();
3653 }
3654 return VINF_SUCCESS;
3655}
3656
3657
3658/** Opcode 0x0f 0x84. */
3659FNIEMOP_DEF(iemOp_je_Jv)
3660{
3661 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3662 IEMOP_HLP_MIN_386();
3663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3664 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3665 {
3666 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3668
3669 IEM_MC_BEGIN(0, 0);
3670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3671 IEM_MC_REL_JMP_S16(i16Imm);
3672 } IEM_MC_ELSE() {
3673 IEM_MC_ADVANCE_RIP();
3674 } IEM_MC_ENDIF();
3675 IEM_MC_END();
3676 }
3677 else
3678 {
3679 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681
3682 IEM_MC_BEGIN(0, 0);
3683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3684 IEM_MC_REL_JMP_S32(i32Imm);
3685 } IEM_MC_ELSE() {
3686 IEM_MC_ADVANCE_RIP();
3687 } IEM_MC_ENDIF();
3688 IEM_MC_END();
3689 }
3690 return VINF_SUCCESS;
3691}
3692
3693
3694/** Opcode 0x0f 0x85. */
3695FNIEMOP_DEF(iemOp_jne_Jv)
3696{
3697 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3698 IEMOP_HLP_MIN_386();
3699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3700 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3701 {
3702 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3704
3705 IEM_MC_BEGIN(0, 0);
3706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3707 IEM_MC_ADVANCE_RIP();
3708 } IEM_MC_ELSE() {
3709 IEM_MC_REL_JMP_S16(i16Imm);
3710 } IEM_MC_ENDIF();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3720 IEM_MC_ADVANCE_RIP();
3721 } IEM_MC_ELSE() {
3722 IEM_MC_REL_JMP_S32(i32Imm);
3723 } IEM_MC_ENDIF();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729
3730/** Opcode 0x0f 0x86. */
3731FNIEMOP_DEF(iemOp_jbe_Jv)
3732{
3733 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3734 IEMOP_HLP_MIN_386();
3735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3737 {
3738 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740
3741 IEM_MC_BEGIN(0, 0);
3742 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3743 IEM_MC_REL_JMP_S16(i16Imm);
3744 } IEM_MC_ELSE() {
3745 IEM_MC_ADVANCE_RIP();
3746 } IEM_MC_ENDIF();
3747 IEM_MC_END();
3748 }
3749 else
3750 {
3751 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0);
3755 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3756 IEM_MC_REL_JMP_S32(i32Imm);
3757 } IEM_MC_ELSE() {
3758 IEM_MC_ADVANCE_RIP();
3759 } IEM_MC_ENDIF();
3760 IEM_MC_END();
3761 }
3762 return VINF_SUCCESS;
3763}
3764
3765
3766/** Opcode 0x0f 0x87. */
3767FNIEMOP_DEF(iemOp_jnbe_Jv)
3768{
3769 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3770 IEMOP_HLP_MIN_386();
3771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3772 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3773 {
3774 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776
3777 IEM_MC_BEGIN(0, 0);
3778 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3779 IEM_MC_ADVANCE_RIP();
3780 } IEM_MC_ELSE() {
3781 IEM_MC_REL_JMP_S16(i16Imm);
3782 } IEM_MC_ENDIF();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789
3790 IEM_MC_BEGIN(0, 0);
3791 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3792 IEM_MC_ADVANCE_RIP();
3793 } IEM_MC_ELSE() {
3794 IEM_MC_REL_JMP_S32(i32Imm);
3795 } IEM_MC_ENDIF();
3796 IEM_MC_END();
3797 }
3798 return VINF_SUCCESS;
3799}
3800
3801
3802/** Opcode 0x0f 0x88. */
3803FNIEMOP_DEF(iemOp_js_Jv)
3804{
3805 IEMOP_MNEMONIC(js_Jv, "js Jv");
3806 IEMOP_HLP_MIN_386();
3807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3809 {
3810 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3812
3813 IEM_MC_BEGIN(0, 0);
3814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3815 IEM_MC_REL_JMP_S16(i16Imm);
3816 } IEM_MC_ELSE() {
3817 IEM_MC_ADVANCE_RIP();
3818 } IEM_MC_ENDIF();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825
3826 IEM_MC_BEGIN(0, 0);
3827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3828 IEM_MC_REL_JMP_S32(i32Imm);
3829 } IEM_MC_ELSE() {
3830 IEM_MC_ADVANCE_RIP();
3831 } IEM_MC_ENDIF();
3832 IEM_MC_END();
3833 }
3834 return VINF_SUCCESS;
3835}
3836
3837
3838/** Opcode 0x0f 0x89. */
3839FNIEMOP_DEF(iemOp_jns_Jv)
3840{
3841 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3842 IEMOP_HLP_MIN_386();
3843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3844 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3845 {
3846 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3848
3849 IEM_MC_BEGIN(0, 0);
3850 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3851 IEM_MC_ADVANCE_RIP();
3852 } IEM_MC_ELSE() {
3853 IEM_MC_REL_JMP_S16(i16Imm);
3854 } IEM_MC_ENDIF();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861
3862 IEM_MC_BEGIN(0, 0);
3863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3864 IEM_MC_ADVANCE_RIP();
3865 } IEM_MC_ELSE() {
3866 IEM_MC_REL_JMP_S32(i32Imm);
3867 } IEM_MC_ENDIF();
3868 IEM_MC_END();
3869 }
3870 return VINF_SUCCESS;
3871}
3872
3873
3874/** Opcode 0x0f 0x8a. */
3875FNIEMOP_DEF(iemOp_jp_Jv)
3876{
3877 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3878 IEMOP_HLP_MIN_386();
3879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3881 {
3882 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3884
3885 IEM_MC_BEGIN(0, 0);
3886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3887 IEM_MC_REL_JMP_S16(i16Imm);
3888 } IEM_MC_ELSE() {
3889 IEM_MC_ADVANCE_RIP();
3890 } IEM_MC_ENDIF();
3891 IEM_MC_END();
3892 }
3893 else
3894 {
3895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3897
3898 IEM_MC_BEGIN(0, 0);
3899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3900 IEM_MC_REL_JMP_S32(i32Imm);
3901 } IEM_MC_ELSE() {
3902 IEM_MC_ADVANCE_RIP();
3903 } IEM_MC_ENDIF();
3904 IEM_MC_END();
3905 }
3906 return VINF_SUCCESS;
3907}
3908
3909
3910/** Opcode 0x0f 0x8b. */
3911FNIEMOP_DEF(iemOp_jnp_Jv)
3912{
3913 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3914 IEMOP_HLP_MIN_386();
3915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3916 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3917 {
3918 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920
3921 IEM_MC_BEGIN(0, 0);
3922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3923 IEM_MC_ADVANCE_RIP();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_REL_JMP_S16(i16Imm);
3926 } IEM_MC_ENDIF();
3927 IEM_MC_END();
3928 }
3929 else
3930 {
3931 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0);
3935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3936 IEM_MC_ADVANCE_RIP();
3937 } IEM_MC_ELSE() {
3938 IEM_MC_REL_JMP_S32(i32Imm);
3939 } IEM_MC_ENDIF();
3940 IEM_MC_END();
3941 }
3942 return VINF_SUCCESS;
3943}
3944
3945
3946/** Opcode 0x0f 0x8c. */
3947FNIEMOP_DEF(iemOp_jl_Jv)
3948{
3949 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3950 IEMOP_HLP_MIN_386();
3951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3952 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3953 {
3954 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3956
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3959 IEM_MC_REL_JMP_S16(i16Imm);
3960 } IEM_MC_ELSE() {
3961 IEM_MC_ADVANCE_RIP();
3962 } IEM_MC_ENDIF();
3963 IEM_MC_END();
3964 }
3965 else
3966 {
3967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3969
3970 IEM_MC_BEGIN(0, 0);
3971 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3972 IEM_MC_REL_JMP_S32(i32Imm);
3973 } IEM_MC_ELSE() {
3974 IEM_MC_ADVANCE_RIP();
3975 } IEM_MC_ENDIF();
3976 IEM_MC_END();
3977 }
3978 return VINF_SUCCESS;
3979}
3980
3981
3982/** Opcode 0x0f 0x8d. */
3983FNIEMOP_DEF(iemOp_jnl_Jv)
3984{
3985 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3986 IEMOP_HLP_MIN_386();
3987 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3988 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3989 {
3990 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3992
3993 IEM_MC_BEGIN(0, 0);
3994 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3995 IEM_MC_ADVANCE_RIP();
3996 } IEM_MC_ELSE() {
3997 IEM_MC_REL_JMP_S16(i16Imm);
3998 } IEM_MC_ENDIF();
3999 IEM_MC_END();
4000 }
4001 else
4002 {
4003 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005
4006 IEM_MC_BEGIN(0, 0);
4007 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4008 IEM_MC_ADVANCE_RIP();
4009 } IEM_MC_ELSE() {
4010 IEM_MC_REL_JMP_S32(i32Imm);
4011 } IEM_MC_ENDIF();
4012 IEM_MC_END();
4013 }
4014 return VINF_SUCCESS;
4015}
4016
4017
4018/** Opcode 0x0f 0x8e. */
4019FNIEMOP_DEF(iemOp_jle_Jv)
4020{
4021 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4022 IEMOP_HLP_MIN_386();
4023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4024 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4025 {
4026 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4028
4029 IEM_MC_BEGIN(0, 0);
4030 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4031 IEM_MC_REL_JMP_S16(i16Imm);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_ADVANCE_RIP();
4034 } IEM_MC_ENDIF();
4035 IEM_MC_END();
4036 }
4037 else
4038 {
4039 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4041
4042 IEM_MC_BEGIN(0, 0);
4043 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4044 IEM_MC_REL_JMP_S32(i32Imm);
4045 } IEM_MC_ELSE() {
4046 IEM_MC_ADVANCE_RIP();
4047 } IEM_MC_ENDIF();
4048 IEM_MC_END();
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0x0f 0x8f. */
4055FNIEMOP_DEF(iemOp_jnle_Jv)
4056{
4057 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4058 IEMOP_HLP_MIN_386();
4059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4061 {
4062 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4064
4065 IEM_MC_BEGIN(0, 0);
4066 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4067 IEM_MC_ADVANCE_RIP();
4068 } IEM_MC_ELSE() {
4069 IEM_MC_REL_JMP_S16(i16Imm);
4070 } IEM_MC_ENDIF();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077
4078 IEM_MC_BEGIN(0, 0);
4079 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4080 IEM_MC_ADVANCE_RIP();
4081 } IEM_MC_ELSE() {
4082 IEM_MC_REL_JMP_S32(i32Imm);
4083 } IEM_MC_ENDIF();
4084 IEM_MC_END();
4085 }
4086 return VINF_SUCCESS;
4087}
4088
4089
4090/** Opcode 0x0f 0x90. */
4091FNIEMOP_DEF(iemOp_seto_Eb)
4092{
4093 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4094 IEMOP_HLP_MIN_386();
4095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4096
4097 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4098 * any way. AMD says it's "unused", whatever that means. We're
4099 * ignoring for now. */
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 {
4102 /* register target */
4103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4104 IEM_MC_BEGIN(0, 0);
4105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4107 } IEM_MC_ELSE() {
4108 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4109 } IEM_MC_ENDIF();
4110 IEM_MC_ADVANCE_RIP();
4111 IEM_MC_END();
4112 }
4113 else
4114 {
4115 /* memory target */
4116 IEM_MC_BEGIN(0, 1);
4117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4122 } IEM_MC_ELSE() {
4123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4124 } IEM_MC_ENDIF();
4125 IEM_MC_ADVANCE_RIP();
4126 IEM_MC_END();
4127 }
4128 return VINF_SUCCESS;
4129}
4130
4131
4132/** Opcode 0x0f 0x91. */
4133FNIEMOP_DEF(iemOp_setno_Eb)
4134{
4135 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4136 IEMOP_HLP_MIN_386();
4137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4138
4139 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4140 * any way. AMD says it's "unused", whatever that means. We're
4141 * ignoring for now. */
4142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4143 {
4144 /* register target */
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146 IEM_MC_BEGIN(0, 0);
4147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4149 } IEM_MC_ELSE() {
4150 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4151 } IEM_MC_ENDIF();
4152 IEM_MC_ADVANCE_RIP();
4153 IEM_MC_END();
4154 }
4155 else
4156 {
4157 /* memory target */
4158 IEM_MC_BEGIN(0, 1);
4159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4164 } IEM_MC_ELSE() {
4165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4166 } IEM_MC_ENDIF();
4167 IEM_MC_ADVANCE_RIP();
4168 IEM_MC_END();
4169 }
4170 return VINF_SUCCESS;
4171}
4172
4173
4174/** Opcode 0x0f 0x92. */
4175FNIEMOP_DEF(iemOp_setc_Eb)
4176{
4177 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4178 IEMOP_HLP_MIN_386();
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180
4181 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4182 * any way. AMD says it's "unused", whatever that means. We're
4183 * ignoring for now. */
4184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4185 {
4186 /* register target */
4187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4188 IEM_MC_BEGIN(0, 0);
4189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4190 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4191 } IEM_MC_ELSE() {
4192 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4193 } IEM_MC_ENDIF();
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 }
4197 else
4198 {
4199 /* memory target */
4200 IEM_MC_BEGIN(0, 1);
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4206 } IEM_MC_ELSE() {
4207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x93. */
4217FNIEMOP_DEF(iemOp_setnc_Eb)
4218{
4219 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4220 IEMOP_HLP_MIN_386();
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222
4223 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4224 * any way. AMD says it's "unused", whatever that means. We're
4225 * ignoring for now. */
4226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4227 {
4228 /* register target */
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4230 IEM_MC_BEGIN(0, 0);
4231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4232 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4233 } IEM_MC_ELSE() {
4234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4235 } IEM_MC_ENDIF();
4236 IEM_MC_ADVANCE_RIP();
4237 IEM_MC_END();
4238 }
4239 else
4240 {
4241 /* memory target */
4242 IEM_MC_BEGIN(0, 1);
4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4247 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4248 } IEM_MC_ELSE() {
4249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4250 } IEM_MC_ENDIF();
4251 IEM_MC_ADVANCE_RIP();
4252 IEM_MC_END();
4253 }
4254 return VINF_SUCCESS;
4255}
4256
4257
4258/** Opcode 0x0f 0x94. */
4259FNIEMOP_DEF(iemOp_sete_Eb)
4260{
4261 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4262 IEMOP_HLP_MIN_386();
4263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4264
4265 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4266 * any way. AMD says it's "unused", whatever that means. We're
4267 * ignoring for now. */
4268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4269 {
4270 /* register target */
4271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4272 IEM_MC_BEGIN(0, 0);
4273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4274 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4275 } IEM_MC_ELSE() {
4276 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4277 } IEM_MC_ENDIF();
4278 IEM_MC_ADVANCE_RIP();
4279 IEM_MC_END();
4280 }
4281 else
4282 {
4283 /* memory target */
4284 IEM_MC_BEGIN(0, 1);
4285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4290 } IEM_MC_ELSE() {
4291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4292 } IEM_MC_ENDIF();
4293 IEM_MC_ADVANCE_RIP();
4294 IEM_MC_END();
4295 }
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** Opcode 0x0f 0x95. */
4301FNIEMOP_DEF(iemOp_setne_Eb)
4302{
4303 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4304 IEMOP_HLP_MIN_386();
4305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4306
4307 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4308 * any way. AMD says it's "unused", whatever that means. We're
4309 * ignoring for now. */
4310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4311 {
4312 /* register target */
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314 IEM_MC_BEGIN(0, 0);
4315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 /* memory target */
4326 IEM_MC_BEGIN(0, 1);
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4332 } IEM_MC_ELSE() {
4333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4334 } IEM_MC_ENDIF();
4335 IEM_MC_ADVANCE_RIP();
4336 IEM_MC_END();
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0x0f 0x96. */
4343FNIEMOP_DEF(iemOp_setbe_Eb)
4344{
4345 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4346 IEMOP_HLP_MIN_386();
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348
4349 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4350 * any way. AMD says it's "unused", whatever that means. We're
4351 * ignoring for now. */
4352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4353 {
4354 /* register target */
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356 IEM_MC_BEGIN(0, 0);
4357 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4358 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4359 } IEM_MC_ELSE() {
4360 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4361 } IEM_MC_ENDIF();
4362 IEM_MC_ADVANCE_RIP();
4363 IEM_MC_END();
4364 }
4365 else
4366 {
4367 /* memory target */
4368 IEM_MC_BEGIN(0, 1);
4369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4373 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4374 } IEM_MC_ELSE() {
4375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_ADVANCE_RIP();
4378 IEM_MC_END();
4379 }
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/** Opcode 0x0f 0x97. */
4385FNIEMOP_DEF(iemOp_setnbe_Eb)
4386{
4387 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4388 IEMOP_HLP_MIN_386();
4389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4390
4391 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4392 * any way. AMD says it's "unused", whatever that means. We're
4393 * ignoring for now. */
4394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4395 {
4396 /* register target */
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_BEGIN(0, 0);
4399 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4400 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4401 } IEM_MC_ELSE() {
4402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4403 } IEM_MC_ENDIF();
4404 IEM_MC_ADVANCE_RIP();
4405 IEM_MC_END();
4406 }
4407 else
4408 {
4409 /* memory target */
4410 IEM_MC_BEGIN(0, 1);
4411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4414 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4416 } IEM_MC_ELSE() {
4417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4418 } IEM_MC_ENDIF();
4419 IEM_MC_ADVANCE_RIP();
4420 IEM_MC_END();
4421 }
4422 return VINF_SUCCESS;
4423}
4424
4425
4426/** Opcode 0x0f 0x98. */
4427FNIEMOP_DEF(iemOp_sets_Eb)
4428{
4429 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4430 IEMOP_HLP_MIN_386();
4431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4432
4433 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4434 * any way. AMD says it's "unused", whatever that means. We're
4435 * ignoring for now. */
4436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4437 {
4438 /* register target */
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4440 IEM_MC_BEGIN(0, 0);
4441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4442 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4443 } IEM_MC_ELSE() {
4444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4445 } IEM_MC_ENDIF();
4446 IEM_MC_ADVANCE_RIP();
4447 IEM_MC_END();
4448 }
4449 else
4450 {
4451 /* memory target */
4452 IEM_MC_BEGIN(0, 1);
4453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4458 } IEM_MC_ELSE() {
4459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4460 } IEM_MC_ENDIF();
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x99. */
4469FNIEMOP_DEF(iemOp_setns_Eb)
4470{
4471 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4472 IEMOP_HLP_MIN_386();
4473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4474
4475 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4476 * any way. AMD says it's "unused", whatever that means. We're
4477 * ignoring for now. */
4478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4479 {
4480 /* register target */
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482 IEM_MC_BEGIN(0, 0);
4483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4484 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4485 } IEM_MC_ELSE() {
4486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4487 } IEM_MC_ENDIF();
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 }
4491 else
4492 {
4493 /* memory target */
4494 IEM_MC_BEGIN(0, 1);
4495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4500 } IEM_MC_ELSE() {
4501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4502 } IEM_MC_ENDIF();
4503 IEM_MC_ADVANCE_RIP();
4504 IEM_MC_END();
4505 }
4506 return VINF_SUCCESS;
4507}
4508
4509
4510/** Opcode 0x0f 0x9a. */
4511FNIEMOP_DEF(iemOp_setp_Eb)
4512{
4513 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4514 IEMOP_HLP_MIN_386();
4515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4516
4517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4518 * any way. AMD says it's "unused", whatever that means. We're
4519 * ignoring for now. */
4520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4521 {
4522 /* register target */
4523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4524 IEM_MC_BEGIN(0, 0);
4525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4526 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4527 } IEM_MC_ELSE() {
4528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4529 } IEM_MC_ENDIF();
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 else
4534 {
4535 /* memory target */
4536 IEM_MC_BEGIN(0, 1);
4537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4542 } IEM_MC_ELSE() {
4543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4544 } IEM_MC_ENDIF();
4545 IEM_MC_ADVANCE_RIP();
4546 IEM_MC_END();
4547 }
4548 return VINF_SUCCESS;
4549}
4550
4551
4552/** Opcode 0x0f 0x9b. */
4553FNIEMOP_DEF(iemOp_setnp_Eb)
4554{
4555 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4556 IEMOP_HLP_MIN_386();
4557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4558
4559 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4560 * any way. AMD says it's "unused", whatever that means. We're
4561 * ignoring for now. */
4562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4563 {
4564 /* register target */
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566 IEM_MC_BEGIN(0, 0);
4567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4568 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4569 } IEM_MC_ELSE() {
4570 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4571 } IEM_MC_ENDIF();
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 }
4575 else
4576 {
4577 /* memory target */
4578 IEM_MC_BEGIN(0, 1);
4579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4583 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4584 } IEM_MC_ELSE() {
4585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4586 } IEM_MC_ENDIF();
4587 IEM_MC_ADVANCE_RIP();
4588 IEM_MC_END();
4589 }
4590 return VINF_SUCCESS;
4591}
4592
4593
4594/** Opcode 0x0f 0x9c. */
4595FNIEMOP_DEF(iemOp_setl_Eb)
4596{
4597 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4598 IEMOP_HLP_MIN_386();
4599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4600
4601 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4602 * any way. AMD says it's "unused", whatever that means. We're
4603 * ignoring for now. */
4604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4605 {
4606 /* register target */
4607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4608 IEM_MC_BEGIN(0, 0);
4609 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4610 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4611 } IEM_MC_ELSE() {
4612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4613 } IEM_MC_ENDIF();
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 }
4617 else
4618 {
4619 /* memory target */
4620 IEM_MC_BEGIN(0, 1);
4621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4625 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4626 } IEM_MC_ELSE() {
4627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4628 } IEM_MC_ENDIF();
4629 IEM_MC_ADVANCE_RIP();
4630 IEM_MC_END();
4631 }
4632 return VINF_SUCCESS;
4633}
4634
4635
4636/** Opcode 0x0f 0x9d. */
4637FNIEMOP_DEF(iemOp_setnl_Eb)
4638{
4639 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4640 IEMOP_HLP_MIN_386();
4641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4642
4643 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4644 * any way. AMD says it's "unused", whatever that means. We're
4645 * ignoring for now. */
4646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4647 {
4648 /* register target */
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_BEGIN(0, 0);
4651 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4652 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4653 } IEM_MC_ELSE() {
4654 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4655 } IEM_MC_ENDIF();
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 else
4660 {
4661 /* memory target */
4662 IEM_MC_BEGIN(0, 1);
4663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4666 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4667 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4668 } IEM_MC_ELSE() {
4669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4670 } IEM_MC_ENDIF();
4671 IEM_MC_ADVANCE_RIP();
4672 IEM_MC_END();
4673 }
4674 return VINF_SUCCESS;
4675}
4676
4677
4678/** Opcode 0x0f 0x9e. */
4679FNIEMOP_DEF(iemOp_setle_Eb)
4680{
4681 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4682 IEMOP_HLP_MIN_386();
4683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4684
4685 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4686 * any way. AMD says it's "unused", whatever that means. We're
4687 * ignoring for now. */
4688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4689 {
4690 /* register target */
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692 IEM_MC_BEGIN(0, 0);
4693 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4694 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4695 } IEM_MC_ELSE() {
4696 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4697 } IEM_MC_ENDIF();
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 /* memory target */
4704 IEM_MC_BEGIN(0, 1);
4705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4708 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4709 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4710 } IEM_MC_ELSE() {
4711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4712 } IEM_MC_ENDIF();
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 }
4716 return VINF_SUCCESS;
4717}
4718
4719
4720/** Opcode 0x0f 0x9f. */
4721FNIEMOP_DEF(iemOp_setnle_Eb)
4722{
4723 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4724 IEMOP_HLP_MIN_386();
4725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4726
4727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4728 * any way. AMD says it's "unused", whatever that means. We're
4729 * ignoring for now. */
4730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4731 {
4732 /* register target */
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4736 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4737 } IEM_MC_ELSE() {
4738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4739 } IEM_MC_ENDIF();
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 else
4744 {
4745 /* memory target */
4746 IEM_MC_BEGIN(0, 1);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4751 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4752 } IEM_MC_ELSE() {
4753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4754 } IEM_MC_ENDIF();
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 }
4758 return VINF_SUCCESS;
4759}
4760
4761
4762/**
4763 * Common 'push segment-register' helper.
4764 */
4765FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4766{
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768 if (iReg < X86_SREG_FS)
4769 IEMOP_HLP_NO_64BIT();
4770 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4771
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0, 1);
4776 IEM_MC_LOCAL(uint16_t, u16Value);
4777 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4778 IEM_MC_PUSH_U16(u16Value);
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 break;
4782
4783 case IEMMODE_32BIT:
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(uint32_t, u32Value);
4786 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4787 IEM_MC_PUSH_U32_SREG(u32Value);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 break;
4791
4792 case IEMMODE_64BIT:
4793 IEM_MC_BEGIN(0, 1);
4794 IEM_MC_LOCAL(uint64_t, u64Value);
4795 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4796 IEM_MC_PUSH_U64(u64Value);
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 break;
4800 }
4801
4802 return VINF_SUCCESS;
4803}
4804
4805
4806/** Opcode 0x0f 0xa0. */
4807FNIEMOP_DEF(iemOp_push_fs)
4808{
4809 IEMOP_MNEMONIC(push_fs, "push fs");
4810 IEMOP_HLP_MIN_386();
4811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4812 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4813}
4814
4815
4816/** Opcode 0x0f 0xa1. */
4817FNIEMOP_DEF(iemOp_pop_fs)
4818{
4819 IEMOP_MNEMONIC(pop_fs, "pop fs");
4820 IEMOP_HLP_MIN_386();
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4823}
4824
4825
4826/** Opcode 0x0f 0xa2. */
4827FNIEMOP_DEF(iemOp_cpuid)
4828{
4829 IEMOP_MNEMONIC(cpuid, "cpuid");
4830 IEMOP_HLP_MIN_486(); /* not all 486es. */
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4833}
4834
4835
4836/**
4837 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4838 * iemOp_bts_Ev_Gv.
4839 */
4840FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4841{
4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4843 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4844
4845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4846 {
4847 /* register destination. */
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849 switch (pVCpu->iem.s.enmEffOpSize)
4850 {
4851 case IEMMODE_16BIT:
4852 IEM_MC_BEGIN(3, 0);
4853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4854 IEM_MC_ARG(uint16_t, u16Src, 1);
4855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4856
4857 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4858 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4859 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4860 IEM_MC_REF_EFLAGS(pEFlags);
4861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4862
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 return VINF_SUCCESS;
4866
4867 case IEMMODE_32BIT:
4868 IEM_MC_BEGIN(3, 0);
4869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4870 IEM_MC_ARG(uint32_t, u32Src, 1);
4871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4872
4873 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4874 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4875 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4876 IEM_MC_REF_EFLAGS(pEFlags);
4877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4878
4879 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 case IEMMODE_64BIT:
4885 IEM_MC_BEGIN(3, 0);
4886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4887 IEM_MC_ARG(uint64_t, u64Src, 1);
4888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4889
4890 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4891 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4892 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4893 IEM_MC_REF_EFLAGS(pEFlags);
4894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4895
4896 IEM_MC_ADVANCE_RIP();
4897 IEM_MC_END();
4898 return VINF_SUCCESS;
4899
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4901 }
4902 }
4903 else
4904 {
4905 /* memory destination. */
4906
4907 uint32_t fAccess;
4908 if (pImpl->pfnLockedU16)
4909 fAccess = IEM_ACCESS_DATA_RW;
4910 else /* BT */
4911 fAccess = IEM_ACCESS_DATA_R;
4912
4913 /** @todo test negative bit offsets! */
4914 switch (pVCpu->iem.s.enmEffOpSize)
4915 {
4916 case IEMMODE_16BIT:
4917 IEM_MC_BEGIN(3, 2);
4918 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4919 IEM_MC_ARG(uint16_t, u16Src, 1);
4920 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4922 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4923
4924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4925 if (pImpl->pfnLockedU16)
4926 IEMOP_HLP_DONE_DECODING();
4927 else
4928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4929 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4930 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4931 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4932 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4933 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4934 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4935 IEM_MC_FETCH_EFLAGS(EFlags);
4936
4937 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4940 else
4941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4942 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4943
4944 IEM_MC_COMMIT_EFLAGS(EFlags);
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 return VINF_SUCCESS;
4948
4949 case IEMMODE_32BIT:
4950 IEM_MC_BEGIN(3, 2);
4951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4952 IEM_MC_ARG(uint32_t, u32Src, 1);
4953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4955 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4956
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4958 if (pImpl->pfnLockedU16)
4959 IEMOP_HLP_DONE_DECODING();
4960 else
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4963 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4964 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4965 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4966 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4967 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4968 IEM_MC_FETCH_EFLAGS(EFlags);
4969
4970 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4971 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4973 else
4974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4975 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4976
4977 IEM_MC_COMMIT_EFLAGS(EFlags);
4978 IEM_MC_ADVANCE_RIP();
4979 IEM_MC_END();
4980 return VINF_SUCCESS;
4981
4982 case IEMMODE_64BIT:
4983 IEM_MC_BEGIN(3, 2);
4984 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4985 IEM_MC_ARG(uint64_t, u64Src, 1);
4986 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4988 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4989
4990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4991 if (pImpl->pfnLockedU16)
4992 IEMOP_HLP_DONE_DECODING();
4993 else
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4996 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4997 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4998 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4999 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5000 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5001 IEM_MC_FETCH_EFLAGS(EFlags);
5002
5003 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5004 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5006 else
5007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5009
5010 IEM_MC_COMMIT_EFLAGS(EFlags);
5011 IEM_MC_ADVANCE_RIP();
5012 IEM_MC_END();
5013 return VINF_SUCCESS;
5014
5015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5016 }
5017 }
5018}
5019
5020
5021/** Opcode 0x0f 0xa3. */
5022FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5023{
5024 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5025 IEMOP_HLP_MIN_386();
5026 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5027}
5028
5029
5030/**
5031 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5032 */
5033FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5034{
5035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5036 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5037
5038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5039 {
5040 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5042
5043 switch (pVCpu->iem.s.enmEffOpSize)
5044 {
5045 case IEMMODE_16BIT:
5046 IEM_MC_BEGIN(4, 0);
5047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5048 IEM_MC_ARG(uint16_t, u16Src, 1);
5049 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5050 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5051
5052 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5053 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5054 IEM_MC_REF_EFLAGS(pEFlags);
5055 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5056
5057 IEM_MC_ADVANCE_RIP();
5058 IEM_MC_END();
5059 return VINF_SUCCESS;
5060
5061 case IEMMODE_32BIT:
5062 IEM_MC_BEGIN(4, 0);
5063 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5064 IEM_MC_ARG(uint32_t, u32Src, 1);
5065 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5066 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5067
5068 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5069 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5070 IEM_MC_REF_EFLAGS(pEFlags);
5071 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5072
5073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5074 IEM_MC_ADVANCE_RIP();
5075 IEM_MC_END();
5076 return VINF_SUCCESS;
5077
5078 case IEMMODE_64BIT:
5079 IEM_MC_BEGIN(4, 0);
5080 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5081 IEM_MC_ARG(uint64_t, u64Src, 1);
5082 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5083 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5084
5085 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5086 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5087 IEM_MC_REF_EFLAGS(pEFlags);
5088 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5089
5090 IEM_MC_ADVANCE_RIP();
5091 IEM_MC_END();
5092 return VINF_SUCCESS;
5093
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5095 }
5096 }
5097 else
5098 {
5099 switch (pVCpu->iem.s.enmEffOpSize)
5100 {
5101 case IEMMODE_16BIT:
5102 IEM_MC_BEGIN(4, 2);
5103 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5104 IEM_MC_ARG(uint16_t, u16Src, 1);
5105 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5106 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5108
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5110 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5111 IEM_MC_ASSIGN(cShiftArg, cShift);
5112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5113 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5114 IEM_MC_FETCH_EFLAGS(EFlags);
5115 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5116 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5117
5118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5119 IEM_MC_COMMIT_EFLAGS(EFlags);
5120 IEM_MC_ADVANCE_RIP();
5121 IEM_MC_END();
5122 return VINF_SUCCESS;
5123
5124 case IEMMODE_32BIT:
5125 IEM_MC_BEGIN(4, 2);
5126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5127 IEM_MC_ARG(uint32_t, u32Src, 1);
5128 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5129 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5131
5132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5133 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5134 IEM_MC_ASSIGN(cShiftArg, cShift);
5135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5136 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5137 IEM_MC_FETCH_EFLAGS(EFlags);
5138 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5139 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5140
5141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5142 IEM_MC_COMMIT_EFLAGS(EFlags);
5143 IEM_MC_ADVANCE_RIP();
5144 IEM_MC_END();
5145 return VINF_SUCCESS;
5146
5147 case IEMMODE_64BIT:
5148 IEM_MC_BEGIN(4, 2);
5149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5150 IEM_MC_ARG(uint64_t, u64Src, 1);
5151 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5154
5155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5156 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5157 IEM_MC_ASSIGN(cShiftArg, cShift);
5158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5159 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5160 IEM_MC_FETCH_EFLAGS(EFlags);
5161 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5162 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5163
5164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5165 IEM_MC_COMMIT_EFLAGS(EFlags);
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 return VINF_SUCCESS;
5169
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172 }
5173}
5174
5175
5176/**
5177 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5178 */
5179FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5180{
5181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5182 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5183
5184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5185 {
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187
5188 switch (pVCpu->iem.s.enmEffOpSize)
5189 {
5190 case IEMMODE_16BIT:
5191 IEM_MC_BEGIN(4, 0);
5192 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5193 IEM_MC_ARG(uint16_t, u16Src, 1);
5194 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5196
5197 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5198 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5199 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5200 IEM_MC_REF_EFLAGS(pEFlags);
5201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5202
5203 IEM_MC_ADVANCE_RIP();
5204 IEM_MC_END();
5205 return VINF_SUCCESS;
5206
5207 case IEMMODE_32BIT:
5208 IEM_MC_BEGIN(4, 0);
5209 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5210 IEM_MC_ARG(uint32_t, u32Src, 1);
5211 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5212 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5213
5214 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5215 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5216 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5217 IEM_MC_REF_EFLAGS(pEFlags);
5218 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5219
5220 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5221 IEM_MC_ADVANCE_RIP();
5222 IEM_MC_END();
5223 return VINF_SUCCESS;
5224
5225 case IEMMODE_64BIT:
5226 IEM_MC_BEGIN(4, 0);
5227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5228 IEM_MC_ARG(uint64_t, u64Src, 1);
5229 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5230 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5231
5232 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5233 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5234 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5235 IEM_MC_REF_EFLAGS(pEFlags);
5236 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5237
5238 IEM_MC_ADVANCE_RIP();
5239 IEM_MC_END();
5240 return VINF_SUCCESS;
5241
5242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5243 }
5244 }
5245 else
5246 {
5247 switch (pVCpu->iem.s.enmEffOpSize)
5248 {
5249 case IEMMODE_16BIT:
5250 IEM_MC_BEGIN(4, 2);
5251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5252 IEM_MC_ARG(uint16_t, u16Src, 1);
5253 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5256
5257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5260 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5261 IEM_MC_FETCH_EFLAGS(EFlags);
5262 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5264
5265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5266 IEM_MC_COMMIT_EFLAGS(EFlags);
5267 IEM_MC_ADVANCE_RIP();
5268 IEM_MC_END();
5269 return VINF_SUCCESS;
5270
5271 case IEMMODE_32BIT:
5272 IEM_MC_BEGIN(4, 2);
5273 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5274 IEM_MC_ARG(uint32_t, u32Src, 1);
5275 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5278
5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5281 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5282 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5283 IEM_MC_FETCH_EFLAGS(EFlags);
5284 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5286
5287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5288 IEM_MC_COMMIT_EFLAGS(EFlags);
5289 IEM_MC_ADVANCE_RIP();
5290 IEM_MC_END();
5291 return VINF_SUCCESS;
5292
5293 case IEMMODE_64BIT:
5294 IEM_MC_BEGIN(4, 2);
5295 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5296 IEM_MC_ARG(uint64_t, u64Src, 1);
5297 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5304 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5305 IEM_MC_FETCH_EFLAGS(EFlags);
5306 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5307 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5308
5309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5310 IEM_MC_COMMIT_EFLAGS(EFlags);
5311 IEM_MC_ADVANCE_RIP();
5312 IEM_MC_END();
5313 return VINF_SUCCESS;
5314
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 }
5318}
5319
5320
5321
5322/** Opcode 0x0f 0xa4. */
5323FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5324{
5325 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5326 IEMOP_HLP_MIN_386();
5327 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5328}
5329
5330
5331/** Opcode 0x0f 0xa5. */
5332FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5333{
5334 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5335 IEMOP_HLP_MIN_386();
5336 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5337}
5338
5339
5340/** Opcode 0x0f 0xa8. */
5341FNIEMOP_DEF(iemOp_push_gs)
5342{
5343 IEMOP_MNEMONIC(push_gs, "push gs");
5344 IEMOP_HLP_MIN_386();
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5347}
5348
5349
5350/** Opcode 0x0f 0xa9. */
5351FNIEMOP_DEF(iemOp_pop_gs)
5352{
5353 IEMOP_MNEMONIC(pop_gs, "pop gs");
5354 IEMOP_HLP_MIN_386();
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5357}
5358
5359
5360/** Opcode 0x0f 0xaa. */
5361FNIEMOP_STUB(iemOp_rsm);
5362//IEMOP_HLP_MIN_386();
5363
5364
5365/** Opcode 0x0f 0xab. */
5366FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5367{
5368 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5369 IEMOP_HLP_MIN_386();
5370 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5371}
5372
5373
5374/** Opcode 0x0f 0xac. */
5375FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5376{
5377 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5378 IEMOP_HLP_MIN_386();
5379 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5380}
5381
5382
5383/** Opcode 0x0f 0xad. */
5384FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5385{
5386 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5387 IEMOP_HLP_MIN_386();
5388 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5389}
5390
5391
5392/** Opcode 0x0f 0xae mem/0. */
5393FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5394{
5395 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5396 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5397 return IEMOP_RAISE_INVALID_OPCODE();
5398
5399 IEM_MC_BEGIN(3, 1);
5400 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5401 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5402 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5406 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5407 IEM_MC_END();
5408 return VINF_SUCCESS;
5409}
5410
5411
5412/** Opcode 0x0f 0xae mem/1. */
5413FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5414{
5415 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5416 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5417 return IEMOP_RAISE_INVALID_OPCODE();
5418
5419 IEM_MC_BEGIN(3, 1);
5420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5421 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5422 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5426 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5427 IEM_MC_END();
5428 return VINF_SUCCESS;
5429}
5430
5431
5432/** Opcode 0x0f 0xae mem/2. */
5433FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5434
5435/** Opcode 0x0f 0xae mem/3. */
5436FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5437
5438/** Opcode 0x0f 0xae mem/4. */
5439FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5440
5441/** Opcode 0x0f 0xae mem/5. */
5442FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5443
5444/** Opcode 0x0f 0xae mem/6. */
5445FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5446
5447/** Opcode 0x0f 0xae mem/7. */
5448FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5449
5450
5451/** Opcode 0x0f 0xae 11b/5. */
5452FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5453{
5454 RT_NOREF_PV(bRm);
5455 IEMOP_MNEMONIC(lfence, "lfence");
5456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5457 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5458 return IEMOP_RAISE_INVALID_OPCODE();
5459
5460 IEM_MC_BEGIN(0, 0);
5461 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5462 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5463 else
5464 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5465 IEM_MC_ADVANCE_RIP();
5466 IEM_MC_END();
5467 return VINF_SUCCESS;
5468}
5469
5470
5471/** Opcode 0x0f 0xae 11b/6. */
5472FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5473{
5474 RT_NOREF_PV(bRm);
5475 IEMOP_MNEMONIC(mfence, "mfence");
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5478 return IEMOP_RAISE_INVALID_OPCODE();
5479
5480 IEM_MC_BEGIN(0, 0);
5481 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5482 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5483 else
5484 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487 return VINF_SUCCESS;
5488}
5489
5490
5491/** Opcode 0x0f 0xae 11b/7. */
5492FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5493{
5494 RT_NOREF_PV(bRm);
5495 IEMOP_MNEMONIC(sfence, "sfence");
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5498 return IEMOP_RAISE_INVALID_OPCODE();
5499
5500 IEM_MC_BEGIN(0, 0);
5501 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5502 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5503 else
5504 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508}
5509
5510
5511/** Opcode 0xf3 0x0f 0xae 11b/0. */
5512FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5513
5514/** Opcode 0xf3 0x0f 0xae 11b/1. */
5515FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5516
5517/** Opcode 0xf3 0x0f 0xae 11b/2. */
5518FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5519
5520/** Opcode 0xf3 0x0f 0xae 11b/3. */
5521FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5522
5523
5524/** Opcode 0x0f 0xae. */
5525FNIEMOP_DEF(iemOp_Grp15)
5526{
5527 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5529 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5530 {
5531 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5532 {
5533 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5534 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5535 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5536 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5537 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5538 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5539 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5540 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5542 }
5543 }
5544 else
5545 {
5546 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5547 {
5548 case 0:
5549 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5550 {
5551 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5552 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5553 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5554 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5555 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5556 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5557 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5558 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5560 }
5561 break;
5562
5563 case IEM_OP_PRF_REPZ:
5564 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5565 {
5566 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5567 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5568 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5569 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5570 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5571 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5572 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5573 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5575 }
5576 break;
5577
5578 default:
5579 return IEMOP_RAISE_INVALID_OPCODE();
5580 }
5581 }
5582}
5583
5584
5585/** Opcode 0x0f 0xaf. */
5586FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5587{
5588 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5589 IEMOP_HLP_MIN_386();
5590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5592}
5593
5594
5595/** Opcode 0x0f 0xb0. */
5596FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5597{
5598 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5599 IEMOP_HLP_MIN_486();
5600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5601
5602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5603 {
5604 IEMOP_HLP_DONE_DECODING();
5605 IEM_MC_BEGIN(4, 0);
5606 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5607 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5608 IEM_MC_ARG(uint8_t, u8Src, 2);
5609 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5610
5611 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5612 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5613 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5614 IEM_MC_REF_EFLAGS(pEFlags);
5615 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5616 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5617 else
5618 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5619
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 }
5623 else
5624 {
5625 IEM_MC_BEGIN(4, 3);
5626 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5627 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5628 IEM_MC_ARG(uint8_t, u8Src, 2);
5629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5631 IEM_MC_LOCAL(uint8_t, u8Al);
5632
5633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5634 IEMOP_HLP_DONE_DECODING();
5635 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5636 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5637 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5638 IEM_MC_FETCH_EFLAGS(EFlags);
5639 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5640 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5641 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5642 else
5643 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5644
5645 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5646 IEM_MC_COMMIT_EFLAGS(EFlags);
5647 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5648 IEM_MC_ADVANCE_RIP();
5649 IEM_MC_END();
5650 }
5651 return VINF_SUCCESS;
5652}
5653
5654/** Opcode 0x0f 0xb1. */
5655FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5656{
5657 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5658 IEMOP_HLP_MIN_486();
5659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5660
5661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5662 {
5663 IEMOP_HLP_DONE_DECODING();
5664 switch (pVCpu->iem.s.enmEffOpSize)
5665 {
5666 case IEMMODE_16BIT:
5667 IEM_MC_BEGIN(4, 0);
5668 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5669 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5670 IEM_MC_ARG(uint16_t, u16Src, 2);
5671 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5672
5673 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5674 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5675 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5676 IEM_MC_REF_EFLAGS(pEFlags);
5677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5678 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5679 else
5680 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5681
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 return VINF_SUCCESS;
5685
5686 case IEMMODE_32BIT:
5687 IEM_MC_BEGIN(4, 0);
5688 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5689 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5690 IEM_MC_ARG(uint32_t, u32Src, 2);
5691 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5692
5693 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5694 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5695 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5696 IEM_MC_REF_EFLAGS(pEFlags);
5697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5698 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5699 else
5700 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5701
5702 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5703 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 return VINF_SUCCESS;
5707
5708 case IEMMODE_64BIT:
5709 IEM_MC_BEGIN(4, 0);
5710 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5711 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5712#ifdef RT_ARCH_X86
5713 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5714#else
5715 IEM_MC_ARG(uint64_t, u64Src, 2);
5716#endif
5717 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5718
5719 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5720 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5721 IEM_MC_REF_EFLAGS(pEFlags);
5722#ifdef RT_ARCH_X86
5723 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5724 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5726 else
5727 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5728#else
5729 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5731 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5732 else
5733 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5734#endif
5735
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 return VINF_SUCCESS;
5739
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 }
5743 else
5744 {
5745 switch (pVCpu->iem.s.enmEffOpSize)
5746 {
5747 case IEMMODE_16BIT:
5748 IEM_MC_BEGIN(4, 3);
5749 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5750 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5751 IEM_MC_ARG(uint16_t, u16Src, 2);
5752 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5754 IEM_MC_LOCAL(uint16_t, u16Ax);
5755
5756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5757 IEMOP_HLP_DONE_DECODING();
5758 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5759 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5760 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5761 IEM_MC_FETCH_EFLAGS(EFlags);
5762 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5763 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5764 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5765 else
5766 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5767
5768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5769 IEM_MC_COMMIT_EFLAGS(EFlags);
5770 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5771 IEM_MC_ADVANCE_RIP();
5772 IEM_MC_END();
5773 return VINF_SUCCESS;
5774
5775 case IEMMODE_32BIT:
5776 IEM_MC_BEGIN(4, 3);
5777 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5778 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5779 IEM_MC_ARG(uint32_t, u32Src, 2);
5780 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5782 IEM_MC_LOCAL(uint32_t, u32Eax);
5783
5784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5785 IEMOP_HLP_DONE_DECODING();
5786 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5787 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5788 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5789 IEM_MC_FETCH_EFLAGS(EFlags);
5790 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5791 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5792 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5793 else
5794 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5795
5796 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5797 IEM_MC_COMMIT_EFLAGS(EFlags);
5798 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5799 IEM_MC_ADVANCE_RIP();
5800 IEM_MC_END();
5801 return VINF_SUCCESS;
5802
5803 case IEMMODE_64BIT:
5804 IEM_MC_BEGIN(4, 3);
5805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5806 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5807#ifdef RT_ARCH_X86
5808 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5809#else
5810 IEM_MC_ARG(uint64_t, u64Src, 2);
5811#endif
5812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5814 IEM_MC_LOCAL(uint64_t, u64Rax);
5815
5816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5817 IEMOP_HLP_DONE_DECODING();
5818 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5819 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5820 IEM_MC_FETCH_EFLAGS(EFlags);
5821 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5822#ifdef RT_ARCH_X86
5823 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5824 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5825 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5826 else
5827 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5828#else
5829 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5830 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5831 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5832 else
5833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5834#endif
5835
5836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5837 IEM_MC_COMMIT_EFLAGS(EFlags);
5838 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 return VINF_SUCCESS;
5842
5843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5844 }
5845 }
5846}
5847
5848
5849FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5850{
5851 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5852 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5853
5854 switch (pVCpu->iem.s.enmEffOpSize)
5855 {
5856 case IEMMODE_16BIT:
5857 IEM_MC_BEGIN(5, 1);
5858 IEM_MC_ARG(uint16_t, uSel, 0);
5859 IEM_MC_ARG(uint16_t, offSeg, 1);
5860 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5861 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5862 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5863 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5867 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5868 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5869 IEM_MC_END();
5870 return VINF_SUCCESS;
5871
5872 case IEMMODE_32BIT:
5873 IEM_MC_BEGIN(5, 1);
5874 IEM_MC_ARG(uint16_t, uSel, 0);
5875 IEM_MC_ARG(uint32_t, offSeg, 1);
5876 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5877 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5878 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5879 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5882 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5883 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5884 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5885 IEM_MC_END();
5886 return VINF_SUCCESS;
5887
5888 case IEMMODE_64BIT:
5889 IEM_MC_BEGIN(5, 1);
5890 IEM_MC_ARG(uint16_t, uSel, 0);
5891 IEM_MC_ARG(uint64_t, offSeg, 1);
5892 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5893 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5894 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5895 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5898 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5899 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5900 else
5901 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5902 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5903 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5908 }
5909}
5910
5911
5912/** Opcode 0x0f 0xb2. */
5913FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5914{
5915 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5916 IEMOP_HLP_MIN_386();
5917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5919 return IEMOP_RAISE_INVALID_OPCODE();
5920 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5921}
5922
5923
5924/** Opcode 0x0f 0xb3. */
5925FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5926{
5927 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5928 IEMOP_HLP_MIN_386();
5929 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5930}
5931
5932
5933/** Opcode 0x0f 0xb4. */
5934FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5935{
5936 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5937 IEMOP_HLP_MIN_386();
5938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5940 return IEMOP_RAISE_INVALID_OPCODE();
5941 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5942}
5943
5944
5945/** Opcode 0x0f 0xb5. */
5946FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5947{
5948 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5949 IEMOP_HLP_MIN_386();
5950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5952 return IEMOP_RAISE_INVALID_OPCODE();
5953 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5954}
5955
5956
5957/** Opcode 0x0f 0xb6. */
5958FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5959{
5960 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5961 IEMOP_HLP_MIN_386();
5962
5963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5964
5965 /*
5966 * If rm is denoting a register, no more instruction bytes.
5967 */
5968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5969 {
5970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5971 switch (pVCpu->iem.s.enmEffOpSize)
5972 {
5973 case IEMMODE_16BIT:
5974 IEM_MC_BEGIN(0, 1);
5975 IEM_MC_LOCAL(uint16_t, u16Value);
5976 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5977 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5978 IEM_MC_ADVANCE_RIP();
5979 IEM_MC_END();
5980 return VINF_SUCCESS;
5981
5982 case IEMMODE_32BIT:
5983 IEM_MC_BEGIN(0, 1);
5984 IEM_MC_LOCAL(uint32_t, u32Value);
5985 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5986 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5987 IEM_MC_ADVANCE_RIP();
5988 IEM_MC_END();
5989 return VINF_SUCCESS;
5990
5991 case IEMMODE_64BIT:
5992 IEM_MC_BEGIN(0, 1);
5993 IEM_MC_LOCAL(uint64_t, u64Value);
5994 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5995 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 return VINF_SUCCESS;
5999
6000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6001 }
6002 }
6003 else
6004 {
6005 /*
6006 * We're loading a register from memory.
6007 */
6008 switch (pVCpu->iem.s.enmEffOpSize)
6009 {
6010 case IEMMODE_16BIT:
6011 IEM_MC_BEGIN(0, 2);
6012 IEM_MC_LOCAL(uint16_t, u16Value);
6013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6016 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6017 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 return VINF_SUCCESS;
6021
6022 case IEMMODE_32BIT:
6023 IEM_MC_BEGIN(0, 2);
6024 IEM_MC_LOCAL(uint32_t, u32Value);
6025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6029 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 return VINF_SUCCESS;
6033
6034 case IEMMODE_64BIT:
6035 IEM_MC_BEGIN(0, 2);
6036 IEM_MC_LOCAL(uint64_t, u64Value);
6037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6041 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6042 IEM_MC_ADVANCE_RIP();
6043 IEM_MC_END();
6044 return VINF_SUCCESS;
6045
6046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6047 }
6048 }
6049}
6050
6051
6052/** Opcode 0x0f 0xb7. */
6053FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6054{
6055 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6056 IEMOP_HLP_MIN_386();
6057
6058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6059
6060 /** @todo Not entirely sure how the operand size prefix is handled here,
6061 * assuming that it will be ignored. Would be nice to have a few
6062 * test for this. */
6063 /*
6064 * If rm is denoting a register, no more instruction bytes.
6065 */
6066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6067 {
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6070 {
6071 IEM_MC_BEGIN(0, 1);
6072 IEM_MC_LOCAL(uint32_t, u32Value);
6073 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6074 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6075 IEM_MC_ADVANCE_RIP();
6076 IEM_MC_END();
6077 }
6078 else
6079 {
6080 IEM_MC_BEGIN(0, 1);
6081 IEM_MC_LOCAL(uint64_t, u64Value);
6082 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6083 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 }
6087 }
6088 else
6089 {
6090 /*
6091 * We're loading a register from memory.
6092 */
6093 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6094 {
6095 IEM_MC_BEGIN(0, 2);
6096 IEM_MC_LOCAL(uint32_t, u32Value);
6097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6100 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6101 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6102 IEM_MC_ADVANCE_RIP();
6103 IEM_MC_END();
6104 }
6105 else
6106 {
6107 IEM_MC_BEGIN(0, 2);
6108 IEM_MC_LOCAL(uint64_t, u64Value);
6109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6112 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6113 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 }
6117 }
6118 return VINF_SUCCESS;
6119}
6120
6121
6122/** Opcode 0x0f 0xb8. */
6123FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
6124
6125
6126/** Opcode 0x0f 0xb9. */
6127FNIEMOP_DEF(iemOp_Grp10)
6128{
6129 Log(("iemOp_Grp10 -> #UD\n"));
6130 return IEMOP_RAISE_INVALID_OPCODE();
6131}
6132
6133
6134/** Opcode 0x0f 0xba. */
6135FNIEMOP_DEF(iemOp_Grp8)
6136{
6137 IEMOP_HLP_MIN_386();
6138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6139 PCIEMOPBINSIZES pImpl;
6140 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6141 {
6142 case 0: case 1: case 2: case 3:
6143 return IEMOP_RAISE_INVALID_OPCODE();
6144 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6145 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6146 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6147 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6151
6152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6153 {
6154 /* register destination. */
6155 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6157
6158 switch (pVCpu->iem.s.enmEffOpSize)
6159 {
6160 case IEMMODE_16BIT:
6161 IEM_MC_BEGIN(3, 0);
6162 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6163 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6165
6166 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6167 IEM_MC_REF_EFLAGS(pEFlags);
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6169
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_32BIT:
6175 IEM_MC_BEGIN(3, 0);
6176 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6177 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6179
6180 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6181 IEM_MC_REF_EFLAGS(pEFlags);
6182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6183
6184 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6185 IEM_MC_ADVANCE_RIP();
6186 IEM_MC_END();
6187 return VINF_SUCCESS;
6188
6189 case IEMMODE_64BIT:
6190 IEM_MC_BEGIN(3, 0);
6191 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6192 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6194
6195 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6196 IEM_MC_REF_EFLAGS(pEFlags);
6197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6198
6199 IEM_MC_ADVANCE_RIP();
6200 IEM_MC_END();
6201 return VINF_SUCCESS;
6202
6203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6204 }
6205 }
6206 else
6207 {
6208 /* memory destination. */
6209
6210 uint32_t fAccess;
6211 if (pImpl->pfnLockedU16)
6212 fAccess = IEM_ACCESS_DATA_RW;
6213 else /* BT */
6214 fAccess = IEM_ACCESS_DATA_R;
6215
6216 /** @todo test negative bit offsets! */
6217 switch (pVCpu->iem.s.enmEffOpSize)
6218 {
6219 case IEMMODE_16BIT:
6220 IEM_MC_BEGIN(3, 1);
6221 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6222 IEM_MC_ARG(uint16_t, u16Src, 1);
6223 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6225
6226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6227 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6228 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6229 if (pImpl->pfnLockedU16)
6230 IEMOP_HLP_DONE_DECODING();
6231 else
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233 IEM_MC_FETCH_EFLAGS(EFlags);
6234 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6235 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6237 else
6238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6240
6241 IEM_MC_COMMIT_EFLAGS(EFlags);
6242 IEM_MC_ADVANCE_RIP();
6243 IEM_MC_END();
6244 return VINF_SUCCESS;
6245
6246 case IEMMODE_32BIT:
6247 IEM_MC_BEGIN(3, 1);
6248 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6249 IEM_MC_ARG(uint32_t, u32Src, 1);
6250 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6252
6253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6254 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6255 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6256 if (pImpl->pfnLockedU16)
6257 IEMOP_HLP_DONE_DECODING();
6258 else
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6260 IEM_MC_FETCH_EFLAGS(EFlags);
6261 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6262 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6264 else
6265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6266 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6267
6268 IEM_MC_COMMIT_EFLAGS(EFlags);
6269 IEM_MC_ADVANCE_RIP();
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272
6273 case IEMMODE_64BIT:
6274 IEM_MC_BEGIN(3, 1);
6275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6276 IEM_MC_ARG(uint64_t, u64Src, 1);
6277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6281 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6282 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6283 if (pImpl->pfnLockedU16)
6284 IEMOP_HLP_DONE_DECODING();
6285 else
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6287 IEM_MC_FETCH_EFLAGS(EFlags);
6288 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6289 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6291 else
6292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6294
6295 IEM_MC_COMMIT_EFLAGS(EFlags);
6296 IEM_MC_ADVANCE_RIP();
6297 IEM_MC_END();
6298 return VINF_SUCCESS;
6299
6300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6301 }
6302 }
6303
6304}
6305
6306
6307/** Opcode 0x0f 0xbb. */
6308FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6309{
6310 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6311 IEMOP_HLP_MIN_386();
6312 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6313}
6314
6315
6316/** Opcode 0x0f 0xbc. */
6317FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6318{
6319 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6320 IEMOP_HLP_MIN_386();
6321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6323}
6324
6325
6326/** Opcode 0x0f 0xbd. */
6327FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6328{
6329 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6330 IEMOP_HLP_MIN_386();
6331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6333}
6334
6335
6336/** Opcode 0x0f 0xbe. */
6337FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6338{
6339 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6340 IEMOP_HLP_MIN_386();
6341
6342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6343
6344 /*
6345 * If rm is denoting a register, no more instruction bytes.
6346 */
6347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6348 {
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 switch (pVCpu->iem.s.enmEffOpSize)
6351 {
6352 case IEMMODE_16BIT:
6353 IEM_MC_BEGIN(0, 1);
6354 IEM_MC_LOCAL(uint16_t, u16Value);
6355 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6356 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6357 IEM_MC_ADVANCE_RIP();
6358 IEM_MC_END();
6359 return VINF_SUCCESS;
6360
6361 case IEMMODE_32BIT:
6362 IEM_MC_BEGIN(0, 1);
6363 IEM_MC_LOCAL(uint32_t, u32Value);
6364 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6365 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6366 IEM_MC_ADVANCE_RIP();
6367 IEM_MC_END();
6368 return VINF_SUCCESS;
6369
6370 case IEMMODE_64BIT:
6371 IEM_MC_BEGIN(0, 1);
6372 IEM_MC_LOCAL(uint64_t, u64Value);
6373 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6374 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6375 IEM_MC_ADVANCE_RIP();
6376 IEM_MC_END();
6377 return VINF_SUCCESS;
6378
6379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6380 }
6381 }
6382 else
6383 {
6384 /*
6385 * We're loading a register from memory.
6386 */
6387 switch (pVCpu->iem.s.enmEffOpSize)
6388 {
6389 case IEMMODE_16BIT:
6390 IEM_MC_BEGIN(0, 2);
6391 IEM_MC_LOCAL(uint16_t, u16Value);
6392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6395 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6396 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_32BIT:
6402 IEM_MC_BEGIN(0, 2);
6403 IEM_MC_LOCAL(uint32_t, u32Value);
6404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6407 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6408 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 case IEMMODE_64BIT:
6414 IEM_MC_BEGIN(0, 2);
6415 IEM_MC_LOCAL(uint64_t, u64Value);
6416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6420 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6421 IEM_MC_ADVANCE_RIP();
6422 IEM_MC_END();
6423 return VINF_SUCCESS;
6424
6425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6426 }
6427 }
6428}
6429
6430
6431/** Opcode 0x0f 0xbf. */
6432FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6433{
6434 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6435 IEMOP_HLP_MIN_386();
6436
6437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6438
6439 /** @todo Not entirely sure how the operand size prefix is handled here,
6440 * assuming that it will be ignored. Would be nice to have a few
6441 * test for this. */
6442 /*
6443 * If rm is denoting a register, no more instruction bytes.
6444 */
6445 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6446 {
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6449 {
6450 IEM_MC_BEGIN(0, 1);
6451 IEM_MC_LOCAL(uint32_t, u32Value);
6452 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6453 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6454 IEM_MC_ADVANCE_RIP();
6455 IEM_MC_END();
6456 }
6457 else
6458 {
6459 IEM_MC_BEGIN(0, 1);
6460 IEM_MC_LOCAL(uint64_t, u64Value);
6461 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6462 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6463 IEM_MC_ADVANCE_RIP();
6464 IEM_MC_END();
6465 }
6466 }
6467 else
6468 {
6469 /*
6470 * We're loading a register from memory.
6471 */
6472 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6473 {
6474 IEM_MC_BEGIN(0, 2);
6475 IEM_MC_LOCAL(uint32_t, u32Value);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6480 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6481 IEM_MC_ADVANCE_RIP();
6482 IEM_MC_END();
6483 }
6484 else
6485 {
6486 IEM_MC_BEGIN(0, 2);
6487 IEM_MC_LOCAL(uint64_t, u64Value);
6488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6491 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6492 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6493 IEM_MC_ADVANCE_RIP();
6494 IEM_MC_END();
6495 }
6496 }
6497 return VINF_SUCCESS;
6498}
6499
6500
6501/** Opcode 0x0f 0xc0. */
6502FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6503{
6504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6505 IEMOP_HLP_MIN_486();
6506 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6507
6508 /*
6509 * If rm is denoting a register, no more instruction bytes.
6510 */
6511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6512 {
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514
6515 IEM_MC_BEGIN(3, 0);
6516 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6517 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6519
6520 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6521 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6522 IEM_MC_REF_EFLAGS(pEFlags);
6523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6524
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 }
6528 else
6529 {
6530 /*
6531 * We're accessing memory.
6532 */
6533 IEM_MC_BEGIN(3, 3);
6534 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6535 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6536 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6537 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6539
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6541 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6542 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6543 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6544 IEM_MC_FETCH_EFLAGS(EFlags);
6545 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6546 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6547 else
6548 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6549
6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6551 IEM_MC_COMMIT_EFLAGS(EFlags);
6552 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6553 IEM_MC_ADVANCE_RIP();
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556 }
6557 return VINF_SUCCESS;
6558}
6559
6560
6561/** Opcode 0x0f 0xc1. */
6562FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6563{
6564 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6565 IEMOP_HLP_MIN_486();
6566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6567
6568 /*
6569 * If rm is denoting a register, no more instruction bytes.
6570 */
6571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6572 {
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574
6575 switch (pVCpu->iem.s.enmEffOpSize)
6576 {
6577 case IEMMODE_16BIT:
6578 IEM_MC_BEGIN(3, 0);
6579 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6580 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6582
6583 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6584 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6585 IEM_MC_REF_EFLAGS(pEFlags);
6586 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6587
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 case IEMMODE_32BIT:
6593 IEM_MC_BEGIN(3, 0);
6594 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6595 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6597
6598 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6599 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6600 IEM_MC_REF_EFLAGS(pEFlags);
6601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6602
6603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6604 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6605 IEM_MC_ADVANCE_RIP();
6606 IEM_MC_END();
6607 return VINF_SUCCESS;
6608
6609 case IEMMODE_64BIT:
6610 IEM_MC_BEGIN(3, 0);
6611 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6612 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6613 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6614
6615 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6616 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6617 IEM_MC_REF_EFLAGS(pEFlags);
6618 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6619
6620 IEM_MC_ADVANCE_RIP();
6621 IEM_MC_END();
6622 return VINF_SUCCESS;
6623
6624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6625 }
6626 }
6627 else
6628 {
6629 /*
6630 * We're accessing memory.
6631 */
6632 switch (pVCpu->iem.s.enmEffOpSize)
6633 {
6634 case IEMMODE_16BIT:
6635 IEM_MC_BEGIN(3, 3);
6636 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6637 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6638 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6639 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6641
6642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6643 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6644 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6645 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6646 IEM_MC_FETCH_EFLAGS(EFlags);
6647 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6648 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6649 else
6650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6651
6652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6653 IEM_MC_COMMIT_EFLAGS(EFlags);
6654 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 return VINF_SUCCESS;
6658
6659 case IEMMODE_32BIT:
6660 IEM_MC_BEGIN(3, 3);
6661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6662 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6663 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6664 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6666
6667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6668 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6669 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6670 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6671 IEM_MC_FETCH_EFLAGS(EFlags);
6672 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6673 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6674 else
6675 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6676
6677 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6678 IEM_MC_COMMIT_EFLAGS(EFlags);
6679 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6680 IEM_MC_ADVANCE_RIP();
6681 IEM_MC_END();
6682 return VINF_SUCCESS;
6683
6684 case IEMMODE_64BIT:
6685 IEM_MC_BEGIN(3, 3);
6686 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6687 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6688 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6689 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6691
6692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6693 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6694 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6695 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6696 IEM_MC_FETCH_EFLAGS(EFlags);
6697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6699 else
6700 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6701
6702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6703 IEM_MC_COMMIT_EFLAGS(EFlags);
6704 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 return VINF_SUCCESS;
6708
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711 }
6712}
6713
6714/** Opcode 0x0f 0xc2. */
6715FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6716
6717
6718/** Opcode 0x0f 0xc3. */
6719FNIEMOP_DEF(iemOp_movnti_My_Gy)
6720{
6721 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6722
6723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6724
6725 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6726 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6727 {
6728 switch (pVCpu->iem.s.enmEffOpSize)
6729 {
6730 case IEMMODE_32BIT:
6731 IEM_MC_BEGIN(0, 2);
6732 IEM_MC_LOCAL(uint32_t, u32Value);
6733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6734
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6738 return IEMOP_RAISE_INVALID_OPCODE();
6739
6740 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6741 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6742 IEM_MC_ADVANCE_RIP();
6743 IEM_MC_END();
6744 break;
6745
6746 case IEMMODE_64BIT:
6747 IEM_MC_BEGIN(0, 2);
6748 IEM_MC_LOCAL(uint64_t, u64Value);
6749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6750
6751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6753 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6754 return IEMOP_RAISE_INVALID_OPCODE();
6755
6756 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6757 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6758 IEM_MC_ADVANCE_RIP();
6759 IEM_MC_END();
6760 break;
6761
6762 case IEMMODE_16BIT:
6763 /** @todo check this form. */
6764 return IEMOP_RAISE_INVALID_OPCODE();
6765 }
6766 }
6767 else
6768 return IEMOP_RAISE_INVALID_OPCODE();
6769 return VINF_SUCCESS;
6770}
6771
6772
6773/** Opcode 0x0f 0xc4. */
6774FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6775
6776/** Opcode 0x0f 0xc5. */
6777FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6778
6779/** Opcode 0x0f 0xc6. */
6780FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6781
6782
6783/** Opcode 0x0f 0xc7 !11/1. */
6784FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6785{
6786 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6787
6788 IEM_MC_BEGIN(4, 3);
6789 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6790 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6791 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6792 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6793 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6794 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6796
6797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6798 IEMOP_HLP_DONE_DECODING();
6799 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6800
6801 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6802 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6803 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6804
6805 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6806 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6807 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6808
6809 IEM_MC_FETCH_EFLAGS(EFlags);
6810 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6811 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6812 else
6813 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6814
6815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6816 IEM_MC_COMMIT_EFLAGS(EFlags);
6817 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6818 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6819 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6820 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6821 IEM_MC_ENDIF();
6822 IEM_MC_ADVANCE_RIP();
6823
6824 IEM_MC_END();
6825 return VINF_SUCCESS;
6826}
6827
6828
6829/** Opcode REX.W 0x0f 0xc7 !11/1. */
6830FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6831{
6832 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6833 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6834 {
6835 RT_NOREF(bRm);
6836 IEMOP_BITCH_ABOUT_STUB();
6837 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6838 }
6839 Log(("cmpxchg16b -> #UD\n"));
6840 return IEMOP_RAISE_INVALID_OPCODE();
6841}
6842
6843
6844/** Opcode 0x0f 0xc7 11/6. */
6845FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6846
6847/** Opcode 0x0f 0xc7 !11/6. */
6848FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6849
6850/** Opcode 0x66 0x0f 0xc7 !11/6. */
6851FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6852
6853/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6854FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6855
6856/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6857FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6858
6859
6860/** Opcode 0x0f 0xc7. */
6861FNIEMOP_DEF(iemOp_Grp9)
6862{
6863 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6865 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6866 {
6867 case 0: case 2: case 3: case 4: case 5:
6868 return IEMOP_RAISE_INVALID_OPCODE();
6869 case 1:
6870 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6871 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6872 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6873 return IEMOP_RAISE_INVALID_OPCODE();
6874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6875 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6876 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6877 case 6:
6878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6879 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6880 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6881 {
6882 case 0:
6883 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6884 case IEM_OP_PRF_SIZE_OP:
6885 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6886 case IEM_OP_PRF_REPZ:
6887 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6888 default:
6889 return IEMOP_RAISE_INVALID_OPCODE();
6890 }
6891 case 7:
6892 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6893 {
6894 case 0:
6895 case IEM_OP_PRF_REPZ:
6896 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6897 default:
6898 return IEMOP_RAISE_INVALID_OPCODE();
6899 }
6900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6901 }
6902}
6903
6904
6905/**
6906 * Common 'bswap register' helper.
6907 */
6908FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6909{
6910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6911 switch (pVCpu->iem.s.enmEffOpSize)
6912 {
6913 case IEMMODE_16BIT:
6914 IEM_MC_BEGIN(1, 0);
6915 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6916 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6917 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6918 IEM_MC_ADVANCE_RIP();
6919 IEM_MC_END();
6920 return VINF_SUCCESS;
6921
6922 case IEMMODE_32BIT:
6923 IEM_MC_BEGIN(1, 0);
6924 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6925 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6926 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6927 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6928 IEM_MC_ADVANCE_RIP();
6929 IEM_MC_END();
6930 return VINF_SUCCESS;
6931
6932 case IEMMODE_64BIT:
6933 IEM_MC_BEGIN(1, 0);
6934 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6935 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6936 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6937 IEM_MC_ADVANCE_RIP();
6938 IEM_MC_END();
6939 return VINF_SUCCESS;
6940
6941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6942 }
6943}
6944
6945
6946/** Opcode 0x0f 0xc8. */
6947FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6948{
6949 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
6950 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6951 prefix. REX.B is the correct prefix it appears. For a parallel
6952 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6953 IEMOP_HLP_MIN_486();
6954 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6955}
6956
6957
6958/** Opcode 0x0f 0xc9. */
6959FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6960{
6961 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
6962 IEMOP_HLP_MIN_486();
6963 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6964}
6965
6966
6967/** Opcode 0x0f 0xca. */
6968FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6969{
6970 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
6971 IEMOP_HLP_MIN_486();
6972 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6973}
6974
6975
6976/** Opcode 0x0f 0xcb. */
6977FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6978{
6979 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
6980 IEMOP_HLP_MIN_486();
6981 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6982}
6983
6984
6985/** Opcode 0x0f 0xcc. */
6986FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6987{
6988 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
6989 IEMOP_HLP_MIN_486();
6990 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6991}
6992
6993
6994/** Opcode 0x0f 0xcd. */
6995FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6996{
6997 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
6998 IEMOP_HLP_MIN_486();
6999 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7000}
7001
7002
7003/** Opcode 0x0f 0xce. */
7004FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7005{
7006 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7007 IEMOP_HLP_MIN_486();
7008 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7009}
7010
7011
7012/** Opcode 0x0f 0xcf. */
7013FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7014{
7015 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7016 IEMOP_HLP_MIN_486();
7017 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7018}
7019
7020
7021
7022/** Opcode 0x0f 0xd0. */
7023FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
7024/** Opcode 0x0f 0xd1. */
7025FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
7026/** Opcode 0x0f 0xd2. */
7027FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
7028/** Opcode 0x0f 0xd3. */
7029FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
7030/** Opcode 0x0f 0xd4. */
7031FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
7032/** Opcode 0x0f 0xd5. */
7033FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
7034/** Opcode 0x0f 0xd6. */
7035FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq); /** @todo Win10 w/o np may need this: 66 0f d6 0a */
7036
7037
7038/** Opcode 0x0f 0xd7. */
7039FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7040{
7041 /* Docs says register only. */
7042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7043 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7044 return IEMOP_RAISE_INVALID_OPCODE();
7045
7046 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7047 /** @todo testcase: Check that the instruction implicitly clears the high
7048 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7049 * and opcode modifications are made to work with the whole width (not
7050 * just 128). */
7051 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7052 {
7053 case IEM_OP_PRF_SIZE_OP: /* SSE */
7054 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7055 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7056 IEM_MC_BEGIN(2, 0);
7057 IEM_MC_ARG(uint64_t *, pDst, 0);
7058 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7059 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7060 IEM_MC_PREPARE_SSE_USAGE();
7061 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7062 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7063 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7064 IEM_MC_ADVANCE_RIP();
7065 IEM_MC_END();
7066 return VINF_SUCCESS;
7067
7068 case 0: /* MMX */
7069 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7070 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7071 IEM_MC_BEGIN(2, 0);
7072 IEM_MC_ARG(uint64_t *, pDst, 0);
7073 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7074 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7075 IEM_MC_PREPARE_FPU_USAGE();
7076 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7077 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7078 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7079 IEM_MC_ADVANCE_RIP();
7080 IEM_MC_END();
7081 return VINF_SUCCESS;
7082
7083 default:
7084 return IEMOP_RAISE_INVALID_OPCODE();
7085 }
7086}
7087
7088
7089/** Opcode 0x0f 0xd8. */
7090FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
7091/** Opcode 0x0f 0xd9. */
7092FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
7093/** Opcode 0x0f 0xda. */
7094FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
7095/** Opcode 0x0f 0xdb. */
7096FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
7097/** Opcode 0x0f 0xdc. */
7098FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
7099/** Opcode 0x0f 0xdd. */
7100FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
7101/** Opcode 0x0f 0xde. */
7102FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
7103/** Opcode 0x0f 0xdf. */
7104FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
7105/** Opcode 0x0f 0xe0. */
7106FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
7107/** Opcode 0x0f 0xe1. */
7108FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
7109/** Opcode 0x0f 0xe2. */
7110FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
7111/** Opcode 0x0f 0xe3. */
7112FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
7113/** Opcode 0x0f 0xe4. */
7114FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
7115/** Opcode 0x0f 0xe5. */
7116FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
7117/** Opcode 0x0f 0xe6. */
7118FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
7119
7120
7121/** Opcode 0x0f 0xe7. */
7122FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7123{
7124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7125 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7126 {
7127 /*
7128 * Register, memory.
7129 */
7130/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7131 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7132 {
7133
7134 case IEM_OP_PRF_SIZE_OP: /* SSE */
7135 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7136 IEM_MC_BEGIN(0, 2);
7137 IEM_MC_LOCAL(uint128_t, uSrc);
7138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7139
7140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7142 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7144
7145 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7146 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7147
7148 IEM_MC_ADVANCE_RIP();
7149 IEM_MC_END();
7150 break;
7151
7152 case 0: /* MMX */
7153 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7154 IEM_MC_BEGIN(0, 2);
7155 IEM_MC_LOCAL(uint64_t, uSrc);
7156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7157
7158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7160 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7161 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7162
7163 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7164 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7165
7166 IEM_MC_ADVANCE_RIP();
7167 IEM_MC_END();
7168 break;
7169
7170 default:
7171 return IEMOP_RAISE_INVALID_OPCODE();
7172 }
7173 }
7174 /* The register, register encoding is invalid. */
7175 else
7176 return IEMOP_RAISE_INVALID_OPCODE();
7177 return VINF_SUCCESS;
7178}
7179
7180
7181/** Opcode 0x0f 0xe8. */
7182FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
7183/** Opcode 0x0f 0xe9. */
7184FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
7185/** Opcode 0x0f 0xea. */
7186FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
7187/** Opcode 0x0f 0xeb. */
7188FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
7189/** Opcode 0x0f 0xec. */
7190FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
7191/** Opcode 0x0f 0xed. */
7192FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
7193/** Opcode 0x0f 0xee. */
7194FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
7195
7196
7197/** Opcode 0x0f 0xef. */
7198FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7199{
7200 IEMOP_MNEMONIC(pxor, "pxor");
7201 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7202}
7203
7204
7205/** Opcode 0x0f 0xf0. */
7206FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7207/** Opcode 0x0f 0xf1. */
7208FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7209/** Opcode 0x0f 0xf2. */
7210FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7211/** Opcode 0x0f 0xf3. */
7212FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7213/** Opcode 0x0f 0xf4. */
7214FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7215/** Opcode 0x0f 0xf5. */
7216FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7217/** Opcode 0x0f 0xf6. */
7218FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7219/** Opcode 0x0f 0xf7. */
7220FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7221/** Opcode 0x0f 0xf8. */
7222FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7223/** Opcode 0x0f 0xf9. */
7224FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7225/** Opcode 0x0f 0xfa. */
7226FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7227/** Opcode 0x0f 0xfb. */
7228FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7229/** Opcode 0x0f 0xfc. */
7230FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7231/** Opcode 0x0f 0xfd. */
7232FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7233/** Opcode 0x0f 0xfe. */
7234FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7235
7236
7237IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[256] =
7238{
7239 /* 0x00 */ iemOp_Grp6,
7240 /* 0x01 */ iemOp_Grp7,
7241 /* 0x02 */ iemOp_lar_Gv_Ew,
7242 /* 0x03 */ iemOp_lsl_Gv_Ew,
7243 /* 0x04 */ iemOp_Invalid,
7244 /* 0x05 */ iemOp_syscall,
7245 /* 0x06 */ iemOp_clts,
7246 /* 0x07 */ iemOp_sysret,
7247 /* 0x08 */ iemOp_invd,
7248 /* 0x09 */ iemOp_wbinvd,
7249 /* 0x0a */ iemOp_Invalid,
7250 /* 0x0b */ iemOp_ud2,
7251 /* 0x0c */ iemOp_Invalid,
7252 /* 0x0d */ iemOp_nop_Ev_GrpP,
7253 /* 0x0e */ iemOp_femms,
7254 /* 0x0f */ iemOp_3Dnow,
7255 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7256 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7257 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7258 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7259 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7260 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7261 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7262 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7263 /* 0x18 */ iemOp_prefetch_Grp16,
7264 /* 0x19 */ iemOp_nop_Ev,
7265 /* 0x1a */ iemOp_nop_Ev,
7266 /* 0x1b */ iemOp_nop_Ev,
7267 /* 0x1c */ iemOp_nop_Ev,
7268 /* 0x1d */ iemOp_nop_Ev,
7269 /* 0x1e */ iemOp_nop_Ev,
7270 /* 0x1f */ iemOp_nop_Ev,
7271 /* 0x20 */ iemOp_mov_Rd_Cd,
7272 /* 0x21 */ iemOp_mov_Rd_Dd,
7273 /* 0x22 */ iemOp_mov_Cd_Rd,
7274 /* 0x23 */ iemOp_mov_Dd_Rd,
7275 /* 0x24 */ iemOp_mov_Rd_Td,
7276 /* 0x25 */ iemOp_Invalid,
7277 /* 0x26 */ iemOp_mov_Td_Rd,
7278 /* 0x27 */ iemOp_Invalid,
7279 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7280 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7281 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7282 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7283 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7284 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7285 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7286 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7287 /* 0x30 */ iemOp_wrmsr,
7288 /* 0x31 */ iemOp_rdtsc,
7289 /* 0x32 */ iemOp_rdmsr,
7290 /* 0x33 */ iemOp_rdpmc,
7291 /* 0x34 */ iemOp_sysenter,
7292 /* 0x35 */ iemOp_sysexit,
7293 /* 0x36 */ iemOp_Invalid,
7294 /* 0x37 */ iemOp_getsec,
7295 /* 0x38 */ iemOp_3byte_Esc_A4,
7296 /* 0x39 */ iemOp_Invalid,
7297 /* 0x3a */ iemOp_3byte_Esc_A5,
7298 /* 0x3b */ iemOp_Invalid,
7299 /* 0x3c */ iemOp_Invalid,
7300 /* 0x3d */ iemOp_Invalid,
7301 /* 0x3e */ iemOp_Invalid,
7302 /* 0x3f */ iemOp_Invalid,
7303 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7304 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7305 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7306 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7307 /* 0x44 */ iemOp_cmove_Gv_Ev,
7308 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7309 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7310 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7311 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7312 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7313 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7314 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7315 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7316 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7317 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7318 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7319 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7320 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7321 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7322 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7323 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7324 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7325 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7326 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7327 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7328 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7329 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7330 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7331 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7332 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7333 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7334 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7335 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7336 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7337 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7338 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7339 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7340 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7341 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7342 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7343 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7344 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7345 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7346 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7347 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7348 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7349 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7350 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7351 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7352 /* 0x71 */ iemOp_Grp12,
7353 /* 0x72 */ iemOp_Grp13,
7354 /* 0x73 */ iemOp_Grp14,
7355 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7356 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7357 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7358 /* 0x77 */ iemOp_emms,
7359 /* 0x78 */ iemOp_vmread_AmdGrp17,
7360 /* 0x79 */ iemOp_vmwrite,
7361 /* 0x7a */ iemOp_Invalid,
7362 /* 0x7b */ iemOp_Invalid,
7363 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7364 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7365 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7366 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7367 /* 0x80 */ iemOp_jo_Jv,
7368 /* 0x81 */ iemOp_jno_Jv,
7369 /* 0x82 */ iemOp_jc_Jv,
7370 /* 0x83 */ iemOp_jnc_Jv,
7371 /* 0x84 */ iemOp_je_Jv,
7372 /* 0x85 */ iemOp_jne_Jv,
7373 /* 0x86 */ iemOp_jbe_Jv,
7374 /* 0x87 */ iemOp_jnbe_Jv,
7375 /* 0x88 */ iemOp_js_Jv,
7376 /* 0x89 */ iemOp_jns_Jv,
7377 /* 0x8a */ iemOp_jp_Jv,
7378 /* 0x8b */ iemOp_jnp_Jv,
7379 /* 0x8c */ iemOp_jl_Jv,
7380 /* 0x8d */ iemOp_jnl_Jv,
7381 /* 0x8e */ iemOp_jle_Jv,
7382 /* 0x8f */ iemOp_jnle_Jv,
7383 /* 0x90 */ iemOp_seto_Eb,
7384 /* 0x91 */ iemOp_setno_Eb,
7385 /* 0x92 */ iemOp_setc_Eb,
7386 /* 0x93 */ iemOp_setnc_Eb,
7387 /* 0x94 */ iemOp_sete_Eb,
7388 /* 0x95 */ iemOp_setne_Eb,
7389 /* 0x96 */ iemOp_setbe_Eb,
7390 /* 0x97 */ iemOp_setnbe_Eb,
7391 /* 0x98 */ iemOp_sets_Eb,
7392 /* 0x99 */ iemOp_setns_Eb,
7393 /* 0x9a */ iemOp_setp_Eb,
7394 /* 0x9b */ iemOp_setnp_Eb,
7395 /* 0x9c */ iemOp_setl_Eb,
7396 /* 0x9d */ iemOp_setnl_Eb,
7397 /* 0x9e */ iemOp_setle_Eb,
7398 /* 0x9f */ iemOp_setnle_Eb,
7399 /* 0xa0 */ iemOp_push_fs,
7400 /* 0xa1 */ iemOp_pop_fs,
7401 /* 0xa2 */ iemOp_cpuid,
7402 /* 0xa3 */ iemOp_bt_Ev_Gv,
7403 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7404 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7405 /* 0xa6 */ iemOp_Invalid,
7406 /* 0xa7 */ iemOp_Invalid,
7407 /* 0xa8 */ iemOp_push_gs,
7408 /* 0xa9 */ iemOp_pop_gs,
7409 /* 0xaa */ iemOp_rsm,
7410 /* 0xab */ iemOp_bts_Ev_Gv,
7411 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7412 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7413 /* 0xae */ iemOp_Grp15,
7414 /* 0xaf */ iemOp_imul_Gv_Ev,
7415 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7416 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7417 /* 0xb2 */ iemOp_lss_Gv_Mp,
7418 /* 0xb3 */ iemOp_btr_Ev_Gv,
7419 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7420 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7421 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7422 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7423 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7424 /* 0xb9 */ iemOp_Grp10,
7425 /* 0xba */ iemOp_Grp8,
7426 /* 0xbb */ iemOp_btc_Ev_Gv,
7427 /* 0xbc */ iemOp_bsf_Gv_Ev,
7428 /* 0xbd */ iemOp_bsr_Gv_Ev,
7429 /* 0xbe */ iemOp_movsx_Gv_Eb,
7430 /* 0xbf */ iemOp_movsx_Gv_Ew,
7431 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7432 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7433 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7434 /* 0xc3 */ iemOp_movnti_My_Gy,
7435 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7436 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7437 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7438 /* 0xc7 */ iemOp_Grp9,
7439 /* 0xc8 */ iemOp_bswap_rAX_r8,
7440 /* 0xc9 */ iemOp_bswap_rCX_r9,
7441 /* 0xca */ iemOp_bswap_rDX_r10,
7442 /* 0xcb */ iemOp_bswap_rBX_r11,
7443 /* 0xcc */ iemOp_bswap_rSP_r12,
7444 /* 0xcd */ iemOp_bswap_rBP_r13,
7445 /* 0xce */ iemOp_bswap_rSI_r14,
7446 /* 0xcf */ iemOp_bswap_rDI_r15,
7447 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7448 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7449 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7450 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7451 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7452 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7453 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7454 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7455 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7456 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7457 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7458 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7459 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7460 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7461 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7462 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7463 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7464 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7465 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7466 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7467 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7468 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7469 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7470 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7471 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7472 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7473 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7474 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7475 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7476 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7477 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7478 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7479 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7480 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7481 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7482 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7483 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7484 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7485 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7486 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7487 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7488 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7489 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7490 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7491 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7492 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7493 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7494 /* 0xff */ iemOp_Invalid
7495};
7496
7497/** @} */
7498
7499
7500/** @name One byte opcodes.
7501 *
7502 * @{
7503 */
7504
7505/** Opcode 0x00. */
7506FNIEMOP_DEF(iemOp_add_Eb_Gb)
7507{
7508 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
7509 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7510}
7511
7512
7513/** Opcode 0x01. */
7514FNIEMOP_DEF(iemOp_add_Ev_Gv)
7515{
7516 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
7517 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7518}
7519
7520
7521/** Opcode 0x02. */
7522FNIEMOP_DEF(iemOp_add_Gb_Eb)
7523{
7524 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
7525 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7526}
7527
7528
7529/** Opcode 0x03. */
7530FNIEMOP_DEF(iemOp_add_Gv_Ev)
7531{
7532 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
7533 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7534}
7535
7536
7537/** Opcode 0x04. */
7538FNIEMOP_DEF(iemOp_add_Al_Ib)
7539{
7540 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
7541 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7542}
7543
7544
7545/** Opcode 0x05. */
7546FNIEMOP_DEF(iemOp_add_eAX_Iz)
7547{
7548 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
7549 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7550}
7551
7552
7553/** Opcode 0x06. */
7554FNIEMOP_DEF(iemOp_push_ES)
7555{
7556 IEMOP_MNEMONIC(push_es, "push es");
7557 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7558}
7559
7560
7561/** Opcode 0x07. */
7562FNIEMOP_DEF(iemOp_pop_ES)
7563{
7564 IEMOP_MNEMONIC(pop_es, "pop es");
7565 IEMOP_HLP_NO_64BIT();
7566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7567 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
7568}
7569
7570
7571/** Opcode 0x08. */
7572FNIEMOP_DEF(iemOp_or_Eb_Gb)
7573{
7574 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
7575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7577}
7578
7579
7580/** Opcode 0x09. */
7581FNIEMOP_DEF(iemOp_or_Ev_Gv)
7582{
7583 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
7584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7585 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7586}
7587
7588
7589/** Opcode 0x0a. */
7590FNIEMOP_DEF(iemOp_or_Gb_Eb)
7591{
7592 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
7593 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7594 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7595}
7596
7597
7598/** Opcode 0x0b. */
7599FNIEMOP_DEF(iemOp_or_Gv_Ev)
7600{
7601 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
7602 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7603 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7604}
7605
7606
7607/** Opcode 0x0c. */
7608FNIEMOP_DEF(iemOp_or_Al_Ib)
7609{
7610 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
7611 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7612 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7613}
7614
7615
7616/** Opcode 0x0d. */
7617FNIEMOP_DEF(iemOp_or_eAX_Iz)
7618{
7619 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
7620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7622}
7623
7624
7625/** Opcode 0x0e. */
7626FNIEMOP_DEF(iemOp_push_CS)
7627{
7628 IEMOP_MNEMONIC(push_cs, "push cs");
7629 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7630}
7631
7632
7633/** Opcode 0x0f. */
7634FNIEMOP_DEF(iemOp_2byteEscape)
7635{
7636 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7637 /** @todo PUSH CS on 8086, undefined on 80186. */
7638 IEMOP_HLP_MIN_286();
7639 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7640}
7641
7642/** Opcode 0x10. */
7643FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7644{
7645 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
7646 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7647}
7648
7649
7650/** Opcode 0x11. */
7651FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7652{
7653 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
7654 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7655}
7656
7657
7658/** Opcode 0x12. */
7659FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7660{
7661 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
7662 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7663}
7664
7665
7666/** Opcode 0x13. */
7667FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7668{
7669 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
7670 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7671}
7672
7673
7674/** Opcode 0x14. */
7675FNIEMOP_DEF(iemOp_adc_Al_Ib)
7676{
7677 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
7678 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7679}
7680
7681
7682/** Opcode 0x15. */
7683FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7684{
7685 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
7686 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7687}
7688
7689
7690/** Opcode 0x16. */
7691FNIEMOP_DEF(iemOp_push_SS)
7692{
7693 IEMOP_MNEMONIC(push_ss, "push ss");
7694 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7695}
7696
7697
7698/** Opcode 0x17. */
7699FNIEMOP_DEF(iemOp_pop_SS)
7700{
7701 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
7702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7703 IEMOP_HLP_NO_64BIT();
7704 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
7705}
7706
7707
7708/** Opcode 0x18. */
7709FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7710{
7711 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
7712 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7713}
7714
7715
7716/** Opcode 0x19. */
7717FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7718{
7719 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
7720 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7721}
7722
7723
7724/** Opcode 0x1a. */
7725FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7726{
7727 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
7728 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7729}
7730
7731
7732/** Opcode 0x1b. */
7733FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7734{
7735 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
7736 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7737}
7738
7739
7740/** Opcode 0x1c. */
7741FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7742{
7743 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
7744 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7745}
7746
7747
7748/** Opcode 0x1d. */
7749FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7750{
7751 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
7752 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7753}
7754
7755
7756/** Opcode 0x1e. */
7757FNIEMOP_DEF(iemOp_push_DS)
7758{
7759 IEMOP_MNEMONIC(push_ds, "push ds");
7760 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7761}
7762
7763
7764/** Opcode 0x1f. */
7765FNIEMOP_DEF(iemOp_pop_DS)
7766{
7767 IEMOP_MNEMONIC(pop_ds, "pop ds");
7768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7769 IEMOP_HLP_NO_64BIT();
7770 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
7771}
7772
7773
7774/** Opcode 0x20. */
7775FNIEMOP_DEF(iemOp_and_Eb_Gb)
7776{
7777 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
7778 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7779 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7780}
7781
7782
7783/** Opcode 0x21. */
7784FNIEMOP_DEF(iemOp_and_Ev_Gv)
7785{
7786 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
7787 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7788 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7789}
7790
7791
7792/** Opcode 0x22. */
7793FNIEMOP_DEF(iemOp_and_Gb_Eb)
7794{
7795 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
7796 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7797 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7798}
7799
7800
7801/** Opcode 0x23. */
7802FNIEMOP_DEF(iemOp_and_Gv_Ev)
7803{
7804 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
7805 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7806 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7807}
7808
7809
7810/** Opcode 0x24. */
7811FNIEMOP_DEF(iemOp_and_Al_Ib)
7812{
7813 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
7814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7816}
7817
7818
7819/** Opcode 0x25. */
7820FNIEMOP_DEF(iemOp_and_eAX_Iz)
7821{
7822 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
7823 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7824 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7825}
7826
7827
7828/** Opcode 0x26. */
7829FNIEMOP_DEF(iemOp_seg_ES)
7830{
7831 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7832 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
7833 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
7834
7835 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7836 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7837}
7838
7839
7840/** Opcode 0x27. */
7841FNIEMOP_DEF(iemOp_daa)
7842{
7843 IEMOP_MNEMONIC(daa_AL, "daa AL");
7844 IEMOP_HLP_NO_64BIT();
7845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7846 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7847 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7848}
7849
7850
7851/** Opcode 0x28. */
7852FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7853{
7854 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
7855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7856}
7857
7858
7859/** Opcode 0x29. */
7860FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7861{
7862 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
7863 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7864}
7865
7866
7867/** Opcode 0x2a. */
7868FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7869{
7870 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
7871 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7872}
7873
7874
7875/** Opcode 0x2b. */
7876FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7877{
7878 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
7879 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7880}
7881
7882
7883/** Opcode 0x2c. */
7884FNIEMOP_DEF(iemOp_sub_Al_Ib)
7885{
7886 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
7887 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7888}
7889
7890
7891/** Opcode 0x2d. */
7892FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7893{
7894 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
7895 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7896}
7897
7898
7899/** Opcode 0x2e. */
7900FNIEMOP_DEF(iemOp_seg_CS)
7901{
7902 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7903 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
7904 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
7905
7906 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7907 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7908}
7909
7910
7911/** Opcode 0x2f. */
7912FNIEMOP_DEF(iemOp_das)
7913{
7914 IEMOP_MNEMONIC(das_AL, "das AL");
7915 IEMOP_HLP_NO_64BIT();
7916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7917 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7918 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7919}
7920
7921
7922/** Opcode 0x30. */
7923FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7924{
7925 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
7926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7927 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7928}
7929
7930
7931/** Opcode 0x31. */
7932FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7933{
7934 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
7935 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7936 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7937}
7938
7939
7940/** Opcode 0x32. */
7941FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7942{
7943 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
7944 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7945 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7946}
7947
7948
7949/** Opcode 0x33. */
7950FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7951{
7952 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
7953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7954 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7955}
7956
7957
7958/** Opcode 0x34. */
7959FNIEMOP_DEF(iemOp_xor_Al_Ib)
7960{
7961 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
7962 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7963 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7964}
7965
7966
7967/** Opcode 0x35. */
7968FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7969{
7970 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
7971 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7972 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7973}
7974
7975
7976/** Opcode 0x36. */
7977FNIEMOP_DEF(iemOp_seg_SS)
7978{
7979 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7980 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
7981 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
7982
7983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7984 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7985}
7986
7987
7988/** Opcode 0x37. */
7989FNIEMOP_STUB(iemOp_aaa);
7990
7991
7992/** Opcode 0x38. */
7993FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7994{
7995 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
7996 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7997}
7998
7999
8000/** Opcode 0x39. */
8001FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8002{
8003 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8005}
8006
8007
8008/** Opcode 0x3a. */
8009FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8010{
8011 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8012 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8013}
8014
8015
8016/** Opcode 0x3b. */
8017FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8018{
8019 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8020 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8021}
8022
8023
8024/** Opcode 0x3c. */
8025FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8026{
8027 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8028 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8029}
8030
8031
8032/** Opcode 0x3d. */
8033FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8034{
8035 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8036 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8037}
8038
8039
8040/** Opcode 0x3e. */
8041FNIEMOP_DEF(iemOp_seg_DS)
8042{
8043 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8044 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8045 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8046
8047 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8048 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8049}
8050
8051
8052/** Opcode 0x3f. */
8053FNIEMOP_STUB(iemOp_aas);
8054
8055/**
8056 * Common 'inc/dec/not/neg register' helper.
8057 */
8058FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8059{
8060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8061 switch (pVCpu->iem.s.enmEffOpSize)
8062 {
8063 case IEMMODE_16BIT:
8064 IEM_MC_BEGIN(2, 0);
8065 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8066 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8067 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8068 IEM_MC_REF_EFLAGS(pEFlags);
8069 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8070 IEM_MC_ADVANCE_RIP();
8071 IEM_MC_END();
8072 return VINF_SUCCESS;
8073
8074 case IEMMODE_32BIT:
8075 IEM_MC_BEGIN(2, 0);
8076 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8077 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8078 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8079 IEM_MC_REF_EFLAGS(pEFlags);
8080 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8081 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8082 IEM_MC_ADVANCE_RIP();
8083 IEM_MC_END();
8084 return VINF_SUCCESS;
8085
8086 case IEMMODE_64BIT:
8087 IEM_MC_BEGIN(2, 0);
8088 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8089 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8090 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8091 IEM_MC_REF_EFLAGS(pEFlags);
8092 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8093 IEM_MC_ADVANCE_RIP();
8094 IEM_MC_END();
8095 return VINF_SUCCESS;
8096 }
8097 return VINF_SUCCESS;
8098}
8099
8100
8101/** Opcode 0x40. */
8102FNIEMOP_DEF(iemOp_inc_eAX)
8103{
8104 /*
8105 * This is a REX prefix in 64-bit mode.
8106 */
8107 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8108 {
8109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8110 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8111
8112 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8113 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8114 }
8115
8116 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8117 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8118}
8119
8120
8121/** Opcode 0x41. */
8122FNIEMOP_DEF(iemOp_inc_eCX)
8123{
8124 /*
8125 * This is a REX prefix in 64-bit mode.
8126 */
8127 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8128 {
8129 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8130 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8131 pVCpu->iem.s.uRexB = 1 << 3;
8132
8133 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8134 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8135 }
8136
8137 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8138 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8139}
8140
8141
8142/** Opcode 0x42. */
8143FNIEMOP_DEF(iemOp_inc_eDX)
8144{
8145 /*
8146 * This is a REX prefix in 64-bit mode.
8147 */
8148 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8149 {
8150 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8151 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8152 pVCpu->iem.s.uRexIndex = 1 << 3;
8153
8154 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8155 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8156 }
8157
8158 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8159 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8160}
8161
8162
8163
8164/** Opcode 0x43. */
8165FNIEMOP_DEF(iemOp_inc_eBX)
8166{
8167 /*
8168 * This is a REX prefix in 64-bit mode.
8169 */
8170 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8171 {
8172 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8173 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8174 pVCpu->iem.s.uRexB = 1 << 3;
8175 pVCpu->iem.s.uRexIndex = 1 << 3;
8176
8177 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8178 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8179 }
8180
8181 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8182 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8183}
8184
8185
8186/** Opcode 0x44. */
8187FNIEMOP_DEF(iemOp_inc_eSP)
8188{
8189 /*
8190 * This is a REX prefix in 64-bit mode.
8191 */
8192 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8193 {
8194 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8195 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8196 pVCpu->iem.s.uRexReg = 1 << 3;
8197
8198 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8199 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8200 }
8201
8202 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8203 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8204}
8205
8206
8207/** Opcode 0x45. */
8208FNIEMOP_DEF(iemOp_inc_eBP)
8209{
8210 /*
8211 * This is a REX prefix in 64-bit mode.
8212 */
8213 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8214 {
8215 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8216 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8217 pVCpu->iem.s.uRexReg = 1 << 3;
8218 pVCpu->iem.s.uRexB = 1 << 3;
8219
8220 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8221 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8222 }
8223
8224 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8225 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8226}
8227
8228
8229/** Opcode 0x46. */
8230FNIEMOP_DEF(iemOp_inc_eSI)
8231{
8232 /*
8233 * This is a REX prefix in 64-bit mode.
8234 */
8235 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8236 {
8237 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8238 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8239 pVCpu->iem.s.uRexReg = 1 << 3;
8240 pVCpu->iem.s.uRexIndex = 1 << 3;
8241
8242 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8243 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8244 }
8245
8246 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8247 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8248}
8249
8250
8251/** Opcode 0x47. */
8252FNIEMOP_DEF(iemOp_inc_eDI)
8253{
8254 /*
8255 * This is a REX prefix in 64-bit mode.
8256 */
8257 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8258 {
8259 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8260 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8261 pVCpu->iem.s.uRexReg = 1 << 3;
8262 pVCpu->iem.s.uRexB = 1 << 3;
8263 pVCpu->iem.s.uRexIndex = 1 << 3;
8264
8265 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8266 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8267 }
8268
8269 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
8270 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8271}
8272
8273
8274/** Opcode 0x48. */
8275FNIEMOP_DEF(iemOp_dec_eAX)
8276{
8277 /*
8278 * This is a REX prefix in 64-bit mode.
8279 */
8280 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8281 {
8282 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8283 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8284 iemRecalEffOpSize(pVCpu);
8285
8286 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8287 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8288 }
8289
8290 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
8291 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8292}
8293
8294
8295/** Opcode 0x49. */
8296FNIEMOP_DEF(iemOp_dec_eCX)
8297{
8298 /*
8299 * This is a REX prefix in 64-bit mode.
8300 */
8301 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8302 {
8303 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8304 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8305 pVCpu->iem.s.uRexB = 1 << 3;
8306 iemRecalEffOpSize(pVCpu);
8307
8308 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8309 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8310 }
8311
8312 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
8313 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8314}
8315
8316
8317/** Opcode 0x4a. */
8318FNIEMOP_DEF(iemOp_dec_eDX)
8319{
8320 /*
8321 * This is a REX prefix in 64-bit mode.
8322 */
8323 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8324 {
8325 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8326 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8327 pVCpu->iem.s.uRexIndex = 1 << 3;
8328 iemRecalEffOpSize(pVCpu);
8329
8330 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8331 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8332 }
8333
8334 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
8335 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8336}
8337
8338
8339/** Opcode 0x4b. */
8340FNIEMOP_DEF(iemOp_dec_eBX)
8341{
8342 /*
8343 * This is a REX prefix in 64-bit mode.
8344 */
8345 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8346 {
8347 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8348 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8349 pVCpu->iem.s.uRexB = 1 << 3;
8350 pVCpu->iem.s.uRexIndex = 1 << 3;
8351 iemRecalEffOpSize(pVCpu);
8352
8353 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8354 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8355 }
8356
8357 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
8358 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8359}
8360
8361
8362/** Opcode 0x4c. */
8363FNIEMOP_DEF(iemOp_dec_eSP)
8364{
8365 /*
8366 * This is a REX prefix in 64-bit mode.
8367 */
8368 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8369 {
8370 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8371 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8372 pVCpu->iem.s.uRexReg = 1 << 3;
8373 iemRecalEffOpSize(pVCpu);
8374
8375 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8376 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8377 }
8378
8379 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
8380 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8381}
8382
8383
8384/** Opcode 0x4d. */
8385FNIEMOP_DEF(iemOp_dec_eBP)
8386{
8387 /*
8388 * This is a REX prefix in 64-bit mode.
8389 */
8390 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8391 {
8392 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8393 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8394 pVCpu->iem.s.uRexReg = 1 << 3;
8395 pVCpu->iem.s.uRexB = 1 << 3;
8396 iemRecalEffOpSize(pVCpu);
8397
8398 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8399 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8400 }
8401
8402 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
8403 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8404}
8405
8406
8407/** Opcode 0x4e. */
8408FNIEMOP_DEF(iemOp_dec_eSI)
8409{
8410 /*
8411 * This is a REX prefix in 64-bit mode.
8412 */
8413 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8414 {
8415 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8416 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8417 pVCpu->iem.s.uRexReg = 1 << 3;
8418 pVCpu->iem.s.uRexIndex = 1 << 3;
8419 iemRecalEffOpSize(pVCpu);
8420
8421 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8422 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8423 }
8424
8425 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
8426 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8427}
8428
8429
8430/** Opcode 0x4f. */
8431FNIEMOP_DEF(iemOp_dec_eDI)
8432{
8433 /*
8434 * This is a REX prefix in 64-bit mode.
8435 */
8436 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8437 {
8438 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8439 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8440 pVCpu->iem.s.uRexReg = 1 << 3;
8441 pVCpu->iem.s.uRexB = 1 << 3;
8442 pVCpu->iem.s.uRexIndex = 1 << 3;
8443 iemRecalEffOpSize(pVCpu);
8444
8445 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8446 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8447 }
8448
8449 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
8450 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8451}
8452
8453
8454/**
8455 * Common 'push register' helper.
8456 */
8457FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8458{
8459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8460 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8461 {
8462 iReg |= pVCpu->iem.s.uRexB;
8463 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8464 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8465 }
8466
8467 switch (pVCpu->iem.s.enmEffOpSize)
8468 {
8469 case IEMMODE_16BIT:
8470 IEM_MC_BEGIN(0, 1);
8471 IEM_MC_LOCAL(uint16_t, u16Value);
8472 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8473 IEM_MC_PUSH_U16(u16Value);
8474 IEM_MC_ADVANCE_RIP();
8475 IEM_MC_END();
8476 break;
8477
8478 case IEMMODE_32BIT:
8479 IEM_MC_BEGIN(0, 1);
8480 IEM_MC_LOCAL(uint32_t, u32Value);
8481 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8482 IEM_MC_PUSH_U32(u32Value);
8483 IEM_MC_ADVANCE_RIP();
8484 IEM_MC_END();
8485 break;
8486
8487 case IEMMODE_64BIT:
8488 IEM_MC_BEGIN(0, 1);
8489 IEM_MC_LOCAL(uint64_t, u64Value);
8490 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8491 IEM_MC_PUSH_U64(u64Value);
8492 IEM_MC_ADVANCE_RIP();
8493 IEM_MC_END();
8494 break;
8495 }
8496
8497 return VINF_SUCCESS;
8498}
8499
8500
8501/** Opcode 0x50. */
8502FNIEMOP_DEF(iemOp_push_eAX)
8503{
8504 IEMOP_MNEMONIC(push_rAX, "push rAX");
8505 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8506}
8507
8508
8509/** Opcode 0x51. */
8510FNIEMOP_DEF(iemOp_push_eCX)
8511{
8512 IEMOP_MNEMONIC(push_rCX, "push rCX");
8513 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8514}
8515
8516
8517/** Opcode 0x52. */
8518FNIEMOP_DEF(iemOp_push_eDX)
8519{
8520 IEMOP_MNEMONIC(push_rDX, "push rDX");
8521 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8522}
8523
8524
8525/** Opcode 0x53. */
8526FNIEMOP_DEF(iemOp_push_eBX)
8527{
8528 IEMOP_MNEMONIC(push_rBX, "push rBX");
8529 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8530}
8531
8532
8533/** Opcode 0x54. */
8534FNIEMOP_DEF(iemOp_push_eSP)
8535{
8536 IEMOP_MNEMONIC(push_rSP, "push rSP");
8537 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
8538 {
8539 IEM_MC_BEGIN(0, 1);
8540 IEM_MC_LOCAL(uint16_t, u16Value);
8541 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8542 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8543 IEM_MC_PUSH_U16(u16Value);
8544 IEM_MC_ADVANCE_RIP();
8545 IEM_MC_END();
8546 }
8547 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8548}
8549
8550
8551/** Opcode 0x55. */
8552FNIEMOP_DEF(iemOp_push_eBP)
8553{
8554 IEMOP_MNEMONIC(push_rBP, "push rBP");
8555 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8556}
8557
8558
8559/** Opcode 0x56. */
8560FNIEMOP_DEF(iemOp_push_eSI)
8561{
8562 IEMOP_MNEMONIC(push_rSI, "push rSI");
8563 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8564}
8565
8566
8567/** Opcode 0x57. */
8568FNIEMOP_DEF(iemOp_push_eDI)
8569{
8570 IEMOP_MNEMONIC(push_rDI, "push rDI");
8571 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8572}
8573
8574
8575/**
8576 * Common 'pop register' helper.
8577 */
8578FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8579{
8580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8581 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8582 {
8583 iReg |= pVCpu->iem.s.uRexB;
8584 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8585 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8586 }
8587
8588 switch (pVCpu->iem.s.enmEffOpSize)
8589 {
8590 case IEMMODE_16BIT:
8591 IEM_MC_BEGIN(0, 1);
8592 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8593 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8594 IEM_MC_POP_U16(pu16Dst);
8595 IEM_MC_ADVANCE_RIP();
8596 IEM_MC_END();
8597 break;
8598
8599 case IEMMODE_32BIT:
8600 IEM_MC_BEGIN(0, 1);
8601 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8602 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8603 IEM_MC_POP_U32(pu32Dst);
8604 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8605 IEM_MC_ADVANCE_RIP();
8606 IEM_MC_END();
8607 break;
8608
8609 case IEMMODE_64BIT:
8610 IEM_MC_BEGIN(0, 1);
8611 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8612 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8613 IEM_MC_POP_U64(pu64Dst);
8614 IEM_MC_ADVANCE_RIP();
8615 IEM_MC_END();
8616 break;
8617 }
8618
8619 return VINF_SUCCESS;
8620}
8621
8622
8623/** Opcode 0x58. */
8624FNIEMOP_DEF(iemOp_pop_eAX)
8625{
8626 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
8627 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8628}
8629
8630
8631/** Opcode 0x59. */
8632FNIEMOP_DEF(iemOp_pop_eCX)
8633{
8634 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
8635 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8636}
8637
8638
8639/** Opcode 0x5a. */
8640FNIEMOP_DEF(iemOp_pop_eDX)
8641{
8642 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
8643 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8644}
8645
8646
8647/** Opcode 0x5b. */
8648FNIEMOP_DEF(iemOp_pop_eBX)
8649{
8650 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
8651 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8652}
8653
8654
8655/** Opcode 0x5c. */
8656FNIEMOP_DEF(iemOp_pop_eSP)
8657{
8658 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
8659 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8660 {
8661 if (pVCpu->iem.s.uRexB)
8662 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8663 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8664 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8665 }
8666
8667 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8668 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8669 /** @todo add testcase for this instruction. */
8670 switch (pVCpu->iem.s.enmEffOpSize)
8671 {
8672 case IEMMODE_16BIT:
8673 IEM_MC_BEGIN(0, 1);
8674 IEM_MC_LOCAL(uint16_t, u16Dst);
8675 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8676 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8677 IEM_MC_ADVANCE_RIP();
8678 IEM_MC_END();
8679 break;
8680
8681 case IEMMODE_32BIT:
8682 IEM_MC_BEGIN(0, 1);
8683 IEM_MC_LOCAL(uint32_t, u32Dst);
8684 IEM_MC_POP_U32(&u32Dst);
8685 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8686 IEM_MC_ADVANCE_RIP();
8687 IEM_MC_END();
8688 break;
8689
8690 case IEMMODE_64BIT:
8691 IEM_MC_BEGIN(0, 1);
8692 IEM_MC_LOCAL(uint64_t, u64Dst);
8693 IEM_MC_POP_U64(&u64Dst);
8694 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8695 IEM_MC_ADVANCE_RIP();
8696 IEM_MC_END();
8697 break;
8698 }
8699
8700 return VINF_SUCCESS;
8701}
8702
8703
8704/** Opcode 0x5d. */
8705FNIEMOP_DEF(iemOp_pop_eBP)
8706{
8707 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
8708 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8709}
8710
8711
8712/** Opcode 0x5e. */
8713FNIEMOP_DEF(iemOp_pop_eSI)
8714{
8715 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
8716 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8717}
8718
8719
8720/** Opcode 0x5f. */
8721FNIEMOP_DEF(iemOp_pop_eDI)
8722{
8723 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
8724 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8725}
8726
8727
8728/** Opcode 0x60. */
8729FNIEMOP_DEF(iemOp_pusha)
8730{
8731 IEMOP_MNEMONIC(pusha, "pusha");
8732 IEMOP_HLP_MIN_186();
8733 IEMOP_HLP_NO_64BIT();
8734 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8735 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8736 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8737 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8738}
8739
8740
8741/** Opcode 0x61. */
8742FNIEMOP_DEF(iemOp_popa)
8743{
8744 IEMOP_MNEMONIC(popa, "popa");
8745 IEMOP_HLP_MIN_186();
8746 IEMOP_HLP_NO_64BIT();
8747 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8748 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8749 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8750 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8751}
8752
8753
8754/** Opcode 0x62. */
8755FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8756// IEMOP_HLP_MIN_186();
8757
8758
8759/** Opcode 0x63 - non-64-bit modes. */
8760FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8761{
8762 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
8763 IEMOP_HLP_MIN_286();
8764 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8766
8767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8768 {
8769 /* Register */
8770 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8771 IEM_MC_BEGIN(3, 0);
8772 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8773 IEM_MC_ARG(uint16_t, u16Src, 1);
8774 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8775
8776 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8777 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8778 IEM_MC_REF_EFLAGS(pEFlags);
8779 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8780
8781 IEM_MC_ADVANCE_RIP();
8782 IEM_MC_END();
8783 }
8784 else
8785 {
8786 /* Memory */
8787 IEM_MC_BEGIN(3, 2);
8788 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8789 IEM_MC_ARG(uint16_t, u16Src, 1);
8790 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8792
8793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8794 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8795 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8796 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8797 IEM_MC_FETCH_EFLAGS(EFlags);
8798 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8799
8800 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8801 IEM_MC_COMMIT_EFLAGS(EFlags);
8802 IEM_MC_ADVANCE_RIP();
8803 IEM_MC_END();
8804 }
8805 return VINF_SUCCESS;
8806
8807}
8808
8809
8810/** Opcode 0x63.
8811 * @note This is a weird one. It works like a regular move instruction if
8812 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8813 * @todo This definitely needs a testcase to verify the odd cases. */
8814FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8815{
8816 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8817
8818 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
8819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8820
8821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8822 {
8823 /*
8824 * Register to register.
8825 */
8826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8827 IEM_MC_BEGIN(0, 1);
8828 IEM_MC_LOCAL(uint64_t, u64Value);
8829 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8830 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8831 IEM_MC_ADVANCE_RIP();
8832 IEM_MC_END();
8833 }
8834 else
8835 {
8836 /*
8837 * We're loading a register from memory.
8838 */
8839 IEM_MC_BEGIN(0, 2);
8840 IEM_MC_LOCAL(uint64_t, u64Value);
8841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8844 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8845 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8846 IEM_MC_ADVANCE_RIP();
8847 IEM_MC_END();
8848 }
8849 return VINF_SUCCESS;
8850}
8851
8852
8853/** Opcode 0x64. */
8854FNIEMOP_DEF(iemOp_seg_FS)
8855{
8856 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8857 IEMOP_HLP_MIN_386();
8858
8859 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
8860 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
8861
8862 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8863 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8864}
8865
8866
8867/** Opcode 0x65. */
8868FNIEMOP_DEF(iemOp_seg_GS)
8869{
8870 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8871 IEMOP_HLP_MIN_386();
8872
8873 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
8874 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
8875
8876 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8877 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8878}
8879
8880
8881/** Opcode 0x66. */
8882FNIEMOP_DEF(iemOp_op_size)
8883{
8884 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8885 IEMOP_HLP_MIN_386();
8886
8887 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
8888 iemRecalEffOpSize(pVCpu);
8889
8890 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8891 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8892}
8893
8894
8895/** Opcode 0x67. */
8896FNIEMOP_DEF(iemOp_addr_size)
8897{
8898 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8899 IEMOP_HLP_MIN_386();
8900
8901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8902 switch (pVCpu->iem.s.enmDefAddrMode)
8903 {
8904 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8905 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
8906 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8907 default: AssertFailed();
8908 }
8909
8910 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8911 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8912}
8913
8914
8915/** Opcode 0x68. */
8916FNIEMOP_DEF(iemOp_push_Iz)
8917{
8918 IEMOP_MNEMONIC(push_Iz, "push Iz");
8919 IEMOP_HLP_MIN_186();
8920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8921 switch (pVCpu->iem.s.enmEffOpSize)
8922 {
8923 case IEMMODE_16BIT:
8924 {
8925 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927 IEM_MC_BEGIN(0,0);
8928 IEM_MC_PUSH_U16(u16Imm);
8929 IEM_MC_ADVANCE_RIP();
8930 IEM_MC_END();
8931 return VINF_SUCCESS;
8932 }
8933
8934 case IEMMODE_32BIT:
8935 {
8936 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8938 IEM_MC_BEGIN(0,0);
8939 IEM_MC_PUSH_U32(u32Imm);
8940 IEM_MC_ADVANCE_RIP();
8941 IEM_MC_END();
8942 return VINF_SUCCESS;
8943 }
8944
8945 case IEMMODE_64BIT:
8946 {
8947 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8949 IEM_MC_BEGIN(0,0);
8950 IEM_MC_PUSH_U64(u64Imm);
8951 IEM_MC_ADVANCE_RIP();
8952 IEM_MC_END();
8953 return VINF_SUCCESS;
8954 }
8955
8956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8957 }
8958}
8959
8960
8961/** Opcode 0x69. */
8962FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8963{
8964 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8965 IEMOP_HLP_MIN_186();
8966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8967 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8968
8969 switch (pVCpu->iem.s.enmEffOpSize)
8970 {
8971 case IEMMODE_16BIT:
8972 {
8973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8974 {
8975 /* register operand */
8976 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8978
8979 IEM_MC_BEGIN(3, 1);
8980 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8981 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8982 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8983 IEM_MC_LOCAL(uint16_t, u16Tmp);
8984
8985 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8986 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8987 IEM_MC_REF_EFLAGS(pEFlags);
8988 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8989 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
8990
8991 IEM_MC_ADVANCE_RIP();
8992 IEM_MC_END();
8993 }
8994 else
8995 {
8996 /* memory operand */
8997 IEM_MC_BEGIN(3, 2);
8998 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8999 IEM_MC_ARG(uint16_t, u16Src, 1);
9000 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9001 IEM_MC_LOCAL(uint16_t, u16Tmp);
9002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9003
9004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9005 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9006 IEM_MC_ASSIGN(u16Src, u16Imm);
9007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9008 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9009 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9010 IEM_MC_REF_EFLAGS(pEFlags);
9011 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9012 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9013
9014 IEM_MC_ADVANCE_RIP();
9015 IEM_MC_END();
9016 }
9017 return VINF_SUCCESS;
9018 }
9019
9020 case IEMMODE_32BIT:
9021 {
9022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9023 {
9024 /* register operand */
9025 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9027
9028 IEM_MC_BEGIN(3, 1);
9029 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9030 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9031 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9032 IEM_MC_LOCAL(uint32_t, u32Tmp);
9033
9034 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9035 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9036 IEM_MC_REF_EFLAGS(pEFlags);
9037 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9038 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9039
9040 IEM_MC_ADVANCE_RIP();
9041 IEM_MC_END();
9042 }
9043 else
9044 {
9045 /* memory operand */
9046 IEM_MC_BEGIN(3, 2);
9047 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9048 IEM_MC_ARG(uint32_t, u32Src, 1);
9049 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9050 IEM_MC_LOCAL(uint32_t, u32Tmp);
9051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9052
9053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9054 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9055 IEM_MC_ASSIGN(u32Src, u32Imm);
9056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9057 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9058 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9059 IEM_MC_REF_EFLAGS(pEFlags);
9060 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9061 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9062
9063 IEM_MC_ADVANCE_RIP();
9064 IEM_MC_END();
9065 }
9066 return VINF_SUCCESS;
9067 }
9068
9069 case IEMMODE_64BIT:
9070 {
9071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9072 {
9073 /* register operand */
9074 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9076
9077 IEM_MC_BEGIN(3, 1);
9078 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9079 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9081 IEM_MC_LOCAL(uint64_t, u64Tmp);
9082
9083 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9084 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9085 IEM_MC_REF_EFLAGS(pEFlags);
9086 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9087 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9088
9089 IEM_MC_ADVANCE_RIP();
9090 IEM_MC_END();
9091 }
9092 else
9093 {
9094 /* memory operand */
9095 IEM_MC_BEGIN(3, 2);
9096 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9097 IEM_MC_ARG(uint64_t, u64Src, 1);
9098 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9099 IEM_MC_LOCAL(uint64_t, u64Tmp);
9100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9101
9102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9103 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9104 IEM_MC_ASSIGN(u64Src, u64Imm);
9105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9106 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9107 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9108 IEM_MC_REF_EFLAGS(pEFlags);
9109 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9110 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9111
9112 IEM_MC_ADVANCE_RIP();
9113 IEM_MC_END();
9114 }
9115 return VINF_SUCCESS;
9116 }
9117 }
9118 AssertFailedReturn(VERR_IEM_IPE_9);
9119}
9120
9121
9122/** Opcode 0x6a. */
9123FNIEMOP_DEF(iemOp_push_Ib)
9124{
9125 IEMOP_MNEMONIC(push_Ib, "push Ib");
9126 IEMOP_HLP_MIN_186();
9127 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9130
9131 IEM_MC_BEGIN(0,0);
9132 switch (pVCpu->iem.s.enmEffOpSize)
9133 {
9134 case IEMMODE_16BIT:
9135 IEM_MC_PUSH_U16(i8Imm);
9136 break;
9137 case IEMMODE_32BIT:
9138 IEM_MC_PUSH_U32(i8Imm);
9139 break;
9140 case IEMMODE_64BIT:
9141 IEM_MC_PUSH_U64(i8Imm);
9142 break;
9143 }
9144 IEM_MC_ADVANCE_RIP();
9145 IEM_MC_END();
9146 return VINF_SUCCESS;
9147}
9148
9149
9150/** Opcode 0x6b. */
9151FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9152{
9153 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9154 IEMOP_HLP_MIN_186();
9155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9156 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9157
9158 switch (pVCpu->iem.s.enmEffOpSize)
9159 {
9160 case IEMMODE_16BIT:
9161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9162 {
9163 /* register operand */
9164 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9166
9167 IEM_MC_BEGIN(3, 1);
9168 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9169 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9170 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9171 IEM_MC_LOCAL(uint16_t, u16Tmp);
9172
9173 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9174 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9175 IEM_MC_REF_EFLAGS(pEFlags);
9176 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9177 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9178
9179 IEM_MC_ADVANCE_RIP();
9180 IEM_MC_END();
9181 }
9182 else
9183 {
9184 /* memory operand */
9185 IEM_MC_BEGIN(3, 2);
9186 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9187 IEM_MC_ARG(uint16_t, u16Src, 1);
9188 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9189 IEM_MC_LOCAL(uint16_t, u16Tmp);
9190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9191
9192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9193 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9194 IEM_MC_ASSIGN(u16Src, u16Imm);
9195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9196 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9197 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9198 IEM_MC_REF_EFLAGS(pEFlags);
9199 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9200 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9201
9202 IEM_MC_ADVANCE_RIP();
9203 IEM_MC_END();
9204 }
9205 return VINF_SUCCESS;
9206
9207 case IEMMODE_32BIT:
9208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9209 {
9210 /* register operand */
9211 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9213
9214 IEM_MC_BEGIN(3, 1);
9215 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9216 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9217 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9218 IEM_MC_LOCAL(uint32_t, u32Tmp);
9219
9220 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9221 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9222 IEM_MC_REF_EFLAGS(pEFlags);
9223 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9224 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9225
9226 IEM_MC_ADVANCE_RIP();
9227 IEM_MC_END();
9228 }
9229 else
9230 {
9231 /* memory operand */
9232 IEM_MC_BEGIN(3, 2);
9233 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9234 IEM_MC_ARG(uint32_t, u32Src, 1);
9235 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9236 IEM_MC_LOCAL(uint32_t, u32Tmp);
9237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9238
9239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9240 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9241 IEM_MC_ASSIGN(u32Src, u32Imm);
9242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9243 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9244 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9245 IEM_MC_REF_EFLAGS(pEFlags);
9246 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9247 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9248
9249 IEM_MC_ADVANCE_RIP();
9250 IEM_MC_END();
9251 }
9252 return VINF_SUCCESS;
9253
9254 case IEMMODE_64BIT:
9255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9256 {
9257 /* register operand */
9258 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9260
9261 IEM_MC_BEGIN(3, 1);
9262 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9263 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9264 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9265 IEM_MC_LOCAL(uint64_t, u64Tmp);
9266
9267 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9268 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9269 IEM_MC_REF_EFLAGS(pEFlags);
9270 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9271 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9272
9273 IEM_MC_ADVANCE_RIP();
9274 IEM_MC_END();
9275 }
9276 else
9277 {
9278 /* memory operand */
9279 IEM_MC_BEGIN(3, 2);
9280 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9281 IEM_MC_ARG(uint64_t, u64Src, 1);
9282 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9283 IEM_MC_LOCAL(uint64_t, u64Tmp);
9284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9285
9286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9287 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9288 IEM_MC_ASSIGN(u64Src, u64Imm);
9289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9290 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9291 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9292 IEM_MC_REF_EFLAGS(pEFlags);
9293 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9294 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9295
9296 IEM_MC_ADVANCE_RIP();
9297 IEM_MC_END();
9298 }
9299 return VINF_SUCCESS;
9300 }
9301 AssertFailedReturn(VERR_IEM_IPE_8);
9302}
9303
9304
9305/** Opcode 0x6c. */
9306FNIEMOP_DEF(iemOp_insb_Yb_DX)
9307{
9308 IEMOP_HLP_MIN_186();
9309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9310 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9311 {
9312 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
9313 switch (pVCpu->iem.s.enmEffAddrMode)
9314 {
9315 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9316 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9317 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9319 }
9320 }
9321 else
9322 {
9323 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
9324 switch (pVCpu->iem.s.enmEffAddrMode)
9325 {
9326 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9327 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9328 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9330 }
9331 }
9332}
9333
9334
9335/** Opcode 0x6d. */
9336FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9337{
9338 IEMOP_HLP_MIN_186();
9339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9340 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9341 {
9342 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
9343 switch (pVCpu->iem.s.enmEffOpSize)
9344 {
9345 case IEMMODE_16BIT:
9346 switch (pVCpu->iem.s.enmEffAddrMode)
9347 {
9348 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9349 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9350 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9352 }
9353 break;
9354 case IEMMODE_64BIT:
9355 case IEMMODE_32BIT:
9356 switch (pVCpu->iem.s.enmEffAddrMode)
9357 {
9358 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9359 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9360 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9362 }
9363 break;
9364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9365 }
9366 }
9367 else
9368 {
9369 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
9370 switch (pVCpu->iem.s.enmEffOpSize)
9371 {
9372 case IEMMODE_16BIT:
9373 switch (pVCpu->iem.s.enmEffAddrMode)
9374 {
9375 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9379 }
9380 break;
9381 case IEMMODE_64BIT:
9382 case IEMMODE_32BIT:
9383 switch (pVCpu->iem.s.enmEffAddrMode)
9384 {
9385 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9386 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9387 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9389 }
9390 break;
9391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9392 }
9393 }
9394}
9395
9396
9397/** Opcode 0x6e. */
9398FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9399{
9400 IEMOP_HLP_MIN_186();
9401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9402 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9403 {
9404 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
9405 switch (pVCpu->iem.s.enmEffAddrMode)
9406 {
9407 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9408 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9409 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9411 }
9412 }
9413 else
9414 {
9415 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
9416 switch (pVCpu->iem.s.enmEffAddrMode)
9417 {
9418 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9419 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9420 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9422 }
9423 }
9424}
9425
9426
9427/** Opcode 0x6f. */
9428FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9429{
9430 IEMOP_HLP_MIN_186();
9431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9432 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9433 {
9434 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
9435 switch (pVCpu->iem.s.enmEffOpSize)
9436 {
9437 case IEMMODE_16BIT:
9438 switch (pVCpu->iem.s.enmEffAddrMode)
9439 {
9440 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9441 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9442 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9444 }
9445 break;
9446 case IEMMODE_64BIT:
9447 case IEMMODE_32BIT:
9448 switch (pVCpu->iem.s.enmEffAddrMode)
9449 {
9450 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9451 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9452 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9454 }
9455 break;
9456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9457 }
9458 }
9459 else
9460 {
9461 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
9462 switch (pVCpu->iem.s.enmEffOpSize)
9463 {
9464 case IEMMODE_16BIT:
9465 switch (pVCpu->iem.s.enmEffAddrMode)
9466 {
9467 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9468 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9469 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9471 }
9472 break;
9473 case IEMMODE_64BIT:
9474 case IEMMODE_32BIT:
9475 switch (pVCpu->iem.s.enmEffAddrMode)
9476 {
9477 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9478 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9479 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9481 }
9482 break;
9483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9484 }
9485 }
9486}
9487
9488
9489/** Opcode 0x70. */
9490FNIEMOP_DEF(iemOp_jo_Jb)
9491{
9492 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
9493 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9495 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9496
9497 IEM_MC_BEGIN(0, 0);
9498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9499 IEM_MC_REL_JMP_S8(i8Imm);
9500 } IEM_MC_ELSE() {
9501 IEM_MC_ADVANCE_RIP();
9502 } IEM_MC_ENDIF();
9503 IEM_MC_END();
9504 return VINF_SUCCESS;
9505}
9506
9507
9508/** Opcode 0x71. */
9509FNIEMOP_DEF(iemOp_jno_Jb)
9510{
9511 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
9512 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9514 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9515
9516 IEM_MC_BEGIN(0, 0);
9517 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9518 IEM_MC_ADVANCE_RIP();
9519 } IEM_MC_ELSE() {
9520 IEM_MC_REL_JMP_S8(i8Imm);
9521 } IEM_MC_ENDIF();
9522 IEM_MC_END();
9523 return VINF_SUCCESS;
9524}
9525
9526/** Opcode 0x72. */
9527FNIEMOP_DEF(iemOp_jc_Jb)
9528{
9529 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
9530 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9532 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9533
9534 IEM_MC_BEGIN(0, 0);
9535 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9536 IEM_MC_REL_JMP_S8(i8Imm);
9537 } IEM_MC_ELSE() {
9538 IEM_MC_ADVANCE_RIP();
9539 } IEM_MC_ENDIF();
9540 IEM_MC_END();
9541 return VINF_SUCCESS;
9542}
9543
9544
9545/** Opcode 0x73. */
9546FNIEMOP_DEF(iemOp_jnc_Jb)
9547{
9548 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
9549 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9551 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9552
9553 IEM_MC_BEGIN(0, 0);
9554 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9555 IEM_MC_ADVANCE_RIP();
9556 } IEM_MC_ELSE() {
9557 IEM_MC_REL_JMP_S8(i8Imm);
9558 } IEM_MC_ENDIF();
9559 IEM_MC_END();
9560 return VINF_SUCCESS;
9561}
9562
9563
9564/** Opcode 0x74. */
9565FNIEMOP_DEF(iemOp_je_Jb)
9566{
9567 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
9568 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9571
9572 IEM_MC_BEGIN(0, 0);
9573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9574 IEM_MC_REL_JMP_S8(i8Imm);
9575 } IEM_MC_ELSE() {
9576 IEM_MC_ADVANCE_RIP();
9577 } IEM_MC_ENDIF();
9578 IEM_MC_END();
9579 return VINF_SUCCESS;
9580}
9581
9582
9583/** Opcode 0x75. */
9584FNIEMOP_DEF(iemOp_jne_Jb)
9585{
9586 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
9587 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9589 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9590
9591 IEM_MC_BEGIN(0, 0);
9592 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9593 IEM_MC_ADVANCE_RIP();
9594 } IEM_MC_ELSE() {
9595 IEM_MC_REL_JMP_S8(i8Imm);
9596 } IEM_MC_ENDIF();
9597 IEM_MC_END();
9598 return VINF_SUCCESS;
9599}
9600
9601
9602/** Opcode 0x76. */
9603FNIEMOP_DEF(iemOp_jbe_Jb)
9604{
9605 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
9606 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9608 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9609
9610 IEM_MC_BEGIN(0, 0);
9611 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9612 IEM_MC_REL_JMP_S8(i8Imm);
9613 } IEM_MC_ELSE() {
9614 IEM_MC_ADVANCE_RIP();
9615 } IEM_MC_ENDIF();
9616 IEM_MC_END();
9617 return VINF_SUCCESS;
9618}
9619
9620
9621/** Opcode 0x77. */
9622FNIEMOP_DEF(iemOp_jnbe_Jb)
9623{
9624 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
9625 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9628
9629 IEM_MC_BEGIN(0, 0);
9630 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9631 IEM_MC_ADVANCE_RIP();
9632 } IEM_MC_ELSE() {
9633 IEM_MC_REL_JMP_S8(i8Imm);
9634 } IEM_MC_ENDIF();
9635 IEM_MC_END();
9636 return VINF_SUCCESS;
9637}
9638
9639
9640/** Opcode 0x78. */
9641FNIEMOP_DEF(iemOp_js_Jb)
9642{
9643 IEMOP_MNEMONIC(js_Jb, "js Jb");
9644 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9646 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9647
9648 IEM_MC_BEGIN(0, 0);
9649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9650 IEM_MC_REL_JMP_S8(i8Imm);
9651 } IEM_MC_ELSE() {
9652 IEM_MC_ADVANCE_RIP();
9653 } IEM_MC_ENDIF();
9654 IEM_MC_END();
9655 return VINF_SUCCESS;
9656}
9657
9658
9659/** Opcode 0x79. */
9660FNIEMOP_DEF(iemOp_jns_Jb)
9661{
9662 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
9663 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9665 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9666
9667 IEM_MC_BEGIN(0, 0);
9668 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9669 IEM_MC_ADVANCE_RIP();
9670 } IEM_MC_ELSE() {
9671 IEM_MC_REL_JMP_S8(i8Imm);
9672 } IEM_MC_ENDIF();
9673 IEM_MC_END();
9674 return VINF_SUCCESS;
9675}
9676
9677
9678/** Opcode 0x7a. */
9679FNIEMOP_DEF(iemOp_jp_Jb)
9680{
9681 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
9682 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9684 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9685
9686 IEM_MC_BEGIN(0, 0);
9687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9688 IEM_MC_REL_JMP_S8(i8Imm);
9689 } IEM_MC_ELSE() {
9690 IEM_MC_ADVANCE_RIP();
9691 } IEM_MC_ENDIF();
9692 IEM_MC_END();
9693 return VINF_SUCCESS;
9694}
9695
9696
9697/** Opcode 0x7b. */
9698FNIEMOP_DEF(iemOp_jnp_Jb)
9699{
9700 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
9701 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9703 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9704
9705 IEM_MC_BEGIN(0, 0);
9706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9707 IEM_MC_ADVANCE_RIP();
9708 } IEM_MC_ELSE() {
9709 IEM_MC_REL_JMP_S8(i8Imm);
9710 } IEM_MC_ENDIF();
9711 IEM_MC_END();
9712 return VINF_SUCCESS;
9713}
9714
9715
9716/** Opcode 0x7c. */
9717FNIEMOP_DEF(iemOp_jl_Jb)
9718{
9719 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
9720 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9722 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9723
9724 IEM_MC_BEGIN(0, 0);
9725 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9726 IEM_MC_REL_JMP_S8(i8Imm);
9727 } IEM_MC_ELSE() {
9728 IEM_MC_ADVANCE_RIP();
9729 } IEM_MC_ENDIF();
9730 IEM_MC_END();
9731 return VINF_SUCCESS;
9732}
9733
9734
9735/** Opcode 0x7d. */
9736FNIEMOP_DEF(iemOp_jnl_Jb)
9737{
9738 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
9739 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9741 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9742
9743 IEM_MC_BEGIN(0, 0);
9744 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9745 IEM_MC_ADVANCE_RIP();
9746 } IEM_MC_ELSE() {
9747 IEM_MC_REL_JMP_S8(i8Imm);
9748 } IEM_MC_ENDIF();
9749 IEM_MC_END();
9750 return VINF_SUCCESS;
9751}
9752
9753
9754/** Opcode 0x7e. */
9755FNIEMOP_DEF(iemOp_jle_Jb)
9756{
9757 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
9758 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9760 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9761
9762 IEM_MC_BEGIN(0, 0);
9763 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9764 IEM_MC_REL_JMP_S8(i8Imm);
9765 } IEM_MC_ELSE() {
9766 IEM_MC_ADVANCE_RIP();
9767 } IEM_MC_ENDIF();
9768 IEM_MC_END();
9769 return VINF_SUCCESS;
9770}
9771
9772
9773/** Opcode 0x7f. */
9774FNIEMOP_DEF(iemOp_jnle_Jb)
9775{
9776 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
9777 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9780
9781 IEM_MC_BEGIN(0, 0);
9782 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9783 IEM_MC_ADVANCE_RIP();
9784 } IEM_MC_ELSE() {
9785 IEM_MC_REL_JMP_S8(i8Imm);
9786 } IEM_MC_ENDIF();
9787 IEM_MC_END();
9788 return VINF_SUCCESS;
9789}
9790
9791
9792/** Opcode 0x80. */
9793FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9794{
9795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9796 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9797 {
9798 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
9799 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
9800 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
9801 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
9802 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
9803 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
9804 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
9805 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
9806 }
9807 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9808
9809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9810 {
9811 /* register target */
9812 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9814 IEM_MC_BEGIN(3, 0);
9815 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9816 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9817 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9818
9819 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9820 IEM_MC_REF_EFLAGS(pEFlags);
9821 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9822
9823 IEM_MC_ADVANCE_RIP();
9824 IEM_MC_END();
9825 }
9826 else
9827 {
9828 /* memory target */
9829 uint32_t fAccess;
9830 if (pImpl->pfnLockedU8)
9831 fAccess = IEM_ACCESS_DATA_RW;
9832 else /* CMP */
9833 fAccess = IEM_ACCESS_DATA_R;
9834 IEM_MC_BEGIN(3, 2);
9835 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9836 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9838
9839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9840 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9841 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9842 if (pImpl->pfnLockedU8)
9843 IEMOP_HLP_DONE_DECODING();
9844 else
9845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9846
9847 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9848 IEM_MC_FETCH_EFLAGS(EFlags);
9849 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9851 else
9852 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9853
9854 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9855 IEM_MC_COMMIT_EFLAGS(EFlags);
9856 IEM_MC_ADVANCE_RIP();
9857 IEM_MC_END();
9858 }
9859 return VINF_SUCCESS;
9860}
9861
9862
9863/** Opcode 0x81. */
9864FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9865{
9866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9867 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9868 {
9869 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
9870 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
9871 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
9872 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
9873 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
9874 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
9875 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
9876 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
9877 }
9878 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9879
9880 switch (pVCpu->iem.s.enmEffOpSize)
9881 {
9882 case IEMMODE_16BIT:
9883 {
9884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9885 {
9886 /* register target */
9887 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9889 IEM_MC_BEGIN(3, 0);
9890 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9891 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9892 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9893
9894 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9895 IEM_MC_REF_EFLAGS(pEFlags);
9896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9897
9898 IEM_MC_ADVANCE_RIP();
9899 IEM_MC_END();
9900 }
9901 else
9902 {
9903 /* memory target */
9904 uint32_t fAccess;
9905 if (pImpl->pfnLockedU16)
9906 fAccess = IEM_ACCESS_DATA_RW;
9907 else /* CMP, TEST */
9908 fAccess = IEM_ACCESS_DATA_R;
9909 IEM_MC_BEGIN(3, 2);
9910 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9911 IEM_MC_ARG(uint16_t, u16Src, 1);
9912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9914
9915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9916 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9917 IEM_MC_ASSIGN(u16Src, u16Imm);
9918 if (pImpl->pfnLockedU16)
9919 IEMOP_HLP_DONE_DECODING();
9920 else
9921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9922 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9923 IEM_MC_FETCH_EFLAGS(EFlags);
9924 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9925 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9926 else
9927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9928
9929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9930 IEM_MC_COMMIT_EFLAGS(EFlags);
9931 IEM_MC_ADVANCE_RIP();
9932 IEM_MC_END();
9933 }
9934 break;
9935 }
9936
9937 case IEMMODE_32BIT:
9938 {
9939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9940 {
9941 /* register target */
9942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9944 IEM_MC_BEGIN(3, 0);
9945 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9946 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9947 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9948
9949 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9950 IEM_MC_REF_EFLAGS(pEFlags);
9951 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9952 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9953
9954 IEM_MC_ADVANCE_RIP();
9955 IEM_MC_END();
9956 }
9957 else
9958 {
9959 /* memory target */
9960 uint32_t fAccess;
9961 if (pImpl->pfnLockedU32)
9962 fAccess = IEM_ACCESS_DATA_RW;
9963 else /* CMP, TEST */
9964 fAccess = IEM_ACCESS_DATA_R;
9965 IEM_MC_BEGIN(3, 2);
9966 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9967 IEM_MC_ARG(uint32_t, u32Src, 1);
9968 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9970
9971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9972 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9973 IEM_MC_ASSIGN(u32Src, u32Imm);
9974 if (pImpl->pfnLockedU32)
9975 IEMOP_HLP_DONE_DECODING();
9976 else
9977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9978 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9979 IEM_MC_FETCH_EFLAGS(EFlags);
9980 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9981 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9982 else
9983 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9984
9985 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9986 IEM_MC_COMMIT_EFLAGS(EFlags);
9987 IEM_MC_ADVANCE_RIP();
9988 IEM_MC_END();
9989 }
9990 break;
9991 }
9992
9993 case IEMMODE_64BIT:
9994 {
9995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9996 {
9997 /* register target */
9998 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10000 IEM_MC_BEGIN(3, 0);
10001 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10002 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10003 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10004
10005 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10006 IEM_MC_REF_EFLAGS(pEFlags);
10007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10008
10009 IEM_MC_ADVANCE_RIP();
10010 IEM_MC_END();
10011 }
10012 else
10013 {
10014 /* memory target */
10015 uint32_t fAccess;
10016 if (pImpl->pfnLockedU64)
10017 fAccess = IEM_ACCESS_DATA_RW;
10018 else /* CMP */
10019 fAccess = IEM_ACCESS_DATA_R;
10020 IEM_MC_BEGIN(3, 2);
10021 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10022 IEM_MC_ARG(uint64_t, u64Src, 1);
10023 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10025
10026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10027 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10028 if (pImpl->pfnLockedU64)
10029 IEMOP_HLP_DONE_DECODING();
10030 else
10031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10032 IEM_MC_ASSIGN(u64Src, u64Imm);
10033 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10034 IEM_MC_FETCH_EFLAGS(EFlags);
10035 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10037 else
10038 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10039
10040 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10041 IEM_MC_COMMIT_EFLAGS(EFlags);
10042 IEM_MC_ADVANCE_RIP();
10043 IEM_MC_END();
10044 }
10045 break;
10046 }
10047 }
10048 return VINF_SUCCESS;
10049}
10050
10051
10052/** Opcode 0x82. */
10053FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10054{
10055 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10056 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10057}
10058
10059
10060/** Opcode 0x83. */
10061FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10062{
10063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10064 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10065 {
10066 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10067 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10068 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10069 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10070 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10071 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10072 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10073 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10074 }
10075 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10076 to the 386 even if absent in the intel reference manuals and some
10077 3rd party opcode listings. */
10078 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10079
10080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10081 {
10082 /*
10083 * Register target
10084 */
10085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10086 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10087 switch (pVCpu->iem.s.enmEffOpSize)
10088 {
10089 case IEMMODE_16BIT:
10090 {
10091 IEM_MC_BEGIN(3, 0);
10092 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10093 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10094 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10095
10096 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10097 IEM_MC_REF_EFLAGS(pEFlags);
10098 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10099
10100 IEM_MC_ADVANCE_RIP();
10101 IEM_MC_END();
10102 break;
10103 }
10104
10105 case IEMMODE_32BIT:
10106 {
10107 IEM_MC_BEGIN(3, 0);
10108 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10109 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10111
10112 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10113 IEM_MC_REF_EFLAGS(pEFlags);
10114 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10115 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10116
10117 IEM_MC_ADVANCE_RIP();
10118 IEM_MC_END();
10119 break;
10120 }
10121
10122 case IEMMODE_64BIT:
10123 {
10124 IEM_MC_BEGIN(3, 0);
10125 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10126 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10127 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10128
10129 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10130 IEM_MC_REF_EFLAGS(pEFlags);
10131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10132
10133 IEM_MC_ADVANCE_RIP();
10134 IEM_MC_END();
10135 break;
10136 }
10137 }
10138 }
10139 else
10140 {
10141 /*
10142 * Memory target.
10143 */
10144 uint32_t fAccess;
10145 if (pImpl->pfnLockedU16)
10146 fAccess = IEM_ACCESS_DATA_RW;
10147 else /* CMP */
10148 fAccess = IEM_ACCESS_DATA_R;
10149
10150 switch (pVCpu->iem.s.enmEffOpSize)
10151 {
10152 case IEMMODE_16BIT:
10153 {
10154 IEM_MC_BEGIN(3, 2);
10155 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10156 IEM_MC_ARG(uint16_t, u16Src, 1);
10157 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10159
10160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10161 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10162 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10163 if (pImpl->pfnLockedU16)
10164 IEMOP_HLP_DONE_DECODING();
10165 else
10166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10167 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10168 IEM_MC_FETCH_EFLAGS(EFlags);
10169 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10171 else
10172 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10173
10174 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10175 IEM_MC_COMMIT_EFLAGS(EFlags);
10176 IEM_MC_ADVANCE_RIP();
10177 IEM_MC_END();
10178 break;
10179 }
10180
10181 case IEMMODE_32BIT:
10182 {
10183 IEM_MC_BEGIN(3, 2);
10184 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10185 IEM_MC_ARG(uint32_t, u32Src, 1);
10186 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10188
10189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10190 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10191 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10192 if (pImpl->pfnLockedU32)
10193 IEMOP_HLP_DONE_DECODING();
10194 else
10195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10196 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10197 IEM_MC_FETCH_EFLAGS(EFlags);
10198 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10200 else
10201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10202
10203 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10204 IEM_MC_COMMIT_EFLAGS(EFlags);
10205 IEM_MC_ADVANCE_RIP();
10206 IEM_MC_END();
10207 break;
10208 }
10209
10210 case IEMMODE_64BIT:
10211 {
10212 IEM_MC_BEGIN(3, 2);
10213 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10214 IEM_MC_ARG(uint64_t, u64Src, 1);
10215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10217
10218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10219 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10220 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10221 if (pImpl->pfnLockedU64)
10222 IEMOP_HLP_DONE_DECODING();
10223 else
10224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10225 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10226 IEM_MC_FETCH_EFLAGS(EFlags);
10227 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10229 else
10230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10231
10232 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10233 IEM_MC_COMMIT_EFLAGS(EFlags);
10234 IEM_MC_ADVANCE_RIP();
10235 IEM_MC_END();
10236 break;
10237 }
10238 }
10239 }
10240 return VINF_SUCCESS;
10241}
10242
10243
10244/** Opcode 0x84. */
10245FNIEMOP_DEF(iemOp_test_Eb_Gb)
10246{
10247 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
10248 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10249 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10250}
10251
10252
10253/** Opcode 0x85. */
10254FNIEMOP_DEF(iemOp_test_Ev_Gv)
10255{
10256 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
10257 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10258 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10259}
10260
10261
10262/** Opcode 0x86. */
10263FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10264{
10265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10266 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
10267
10268 /*
10269 * If rm is denoting a register, no more instruction bytes.
10270 */
10271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10272 {
10273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10274
10275 IEM_MC_BEGIN(0, 2);
10276 IEM_MC_LOCAL(uint8_t, uTmp1);
10277 IEM_MC_LOCAL(uint8_t, uTmp2);
10278
10279 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10280 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10281 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10282 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10283
10284 IEM_MC_ADVANCE_RIP();
10285 IEM_MC_END();
10286 }
10287 else
10288 {
10289 /*
10290 * We're accessing memory.
10291 */
10292/** @todo the register must be committed separately! */
10293 IEM_MC_BEGIN(2, 2);
10294 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10295 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10297
10298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10299 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10300 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10301 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10302 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10303
10304 IEM_MC_ADVANCE_RIP();
10305 IEM_MC_END();
10306 }
10307 return VINF_SUCCESS;
10308}
10309
10310
10311/** Opcode 0x87. */
10312FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10313{
10314 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
10315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10316
10317 /*
10318 * If rm is denoting a register, no more instruction bytes.
10319 */
10320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10321 {
10322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10323
10324 switch (pVCpu->iem.s.enmEffOpSize)
10325 {
10326 case IEMMODE_16BIT:
10327 IEM_MC_BEGIN(0, 2);
10328 IEM_MC_LOCAL(uint16_t, uTmp1);
10329 IEM_MC_LOCAL(uint16_t, uTmp2);
10330
10331 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10332 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10333 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10334 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10335
10336 IEM_MC_ADVANCE_RIP();
10337 IEM_MC_END();
10338 return VINF_SUCCESS;
10339
10340 case IEMMODE_32BIT:
10341 IEM_MC_BEGIN(0, 2);
10342 IEM_MC_LOCAL(uint32_t, uTmp1);
10343 IEM_MC_LOCAL(uint32_t, uTmp2);
10344
10345 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10346 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10347 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10348 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10349
10350 IEM_MC_ADVANCE_RIP();
10351 IEM_MC_END();
10352 return VINF_SUCCESS;
10353
10354 case IEMMODE_64BIT:
10355 IEM_MC_BEGIN(0, 2);
10356 IEM_MC_LOCAL(uint64_t, uTmp1);
10357 IEM_MC_LOCAL(uint64_t, uTmp2);
10358
10359 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10360 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10361 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10362 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10363
10364 IEM_MC_ADVANCE_RIP();
10365 IEM_MC_END();
10366 return VINF_SUCCESS;
10367
10368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10369 }
10370 }
10371 else
10372 {
10373 /*
10374 * We're accessing memory.
10375 */
10376 switch (pVCpu->iem.s.enmEffOpSize)
10377 {
10378/** @todo the register must be committed separately! */
10379 case IEMMODE_16BIT:
10380 IEM_MC_BEGIN(2, 2);
10381 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10382 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10384
10385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10386 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10387 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10388 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10389 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10390
10391 IEM_MC_ADVANCE_RIP();
10392 IEM_MC_END();
10393 return VINF_SUCCESS;
10394
10395 case IEMMODE_32BIT:
10396 IEM_MC_BEGIN(2, 2);
10397 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10398 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10400
10401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10402 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10403 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10404 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10406
10407 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10408 IEM_MC_ADVANCE_RIP();
10409 IEM_MC_END();
10410 return VINF_SUCCESS;
10411
10412 case IEMMODE_64BIT:
10413 IEM_MC_BEGIN(2, 2);
10414 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10415 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10417
10418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10419 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10420 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10421 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10422 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10423
10424 IEM_MC_ADVANCE_RIP();
10425 IEM_MC_END();
10426 return VINF_SUCCESS;
10427
10428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10429 }
10430 }
10431}
10432
10433
10434/** Opcode 0x88. */
10435FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10436{
10437 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
10438
10439 uint8_t bRm;
10440 IEM_OPCODE_GET_NEXT_U8(&bRm);
10441
10442 /*
10443 * If rm is denoting a register, no more instruction bytes.
10444 */
10445 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10446 {
10447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10448 IEM_MC_BEGIN(0, 1);
10449 IEM_MC_LOCAL(uint8_t, u8Value);
10450 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10451 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
10452 IEM_MC_ADVANCE_RIP();
10453 IEM_MC_END();
10454 }
10455 else
10456 {
10457 /*
10458 * We're writing a register to memory.
10459 */
10460 IEM_MC_BEGIN(0, 2);
10461 IEM_MC_LOCAL(uint8_t, u8Value);
10462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10465 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10466 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
10467 IEM_MC_ADVANCE_RIP();
10468 IEM_MC_END();
10469 }
10470 return VINF_SUCCESS;
10471
10472}
10473
10474
10475/** Opcode 0x89. */
10476FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10477{
10478 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
10479
10480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10481
10482 /*
10483 * If rm is denoting a register, no more instruction bytes.
10484 */
10485 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10486 {
10487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10488 switch (pVCpu->iem.s.enmEffOpSize)
10489 {
10490 case IEMMODE_16BIT:
10491 IEM_MC_BEGIN(0, 1);
10492 IEM_MC_LOCAL(uint16_t, u16Value);
10493 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10494 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10495 IEM_MC_ADVANCE_RIP();
10496 IEM_MC_END();
10497 break;
10498
10499 case IEMMODE_32BIT:
10500 IEM_MC_BEGIN(0, 1);
10501 IEM_MC_LOCAL(uint32_t, u32Value);
10502 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10503 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10504 IEM_MC_ADVANCE_RIP();
10505 IEM_MC_END();
10506 break;
10507
10508 case IEMMODE_64BIT:
10509 IEM_MC_BEGIN(0, 1);
10510 IEM_MC_LOCAL(uint64_t, u64Value);
10511 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10512 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10513 IEM_MC_ADVANCE_RIP();
10514 IEM_MC_END();
10515 break;
10516 }
10517 }
10518 else
10519 {
10520 /*
10521 * We're writing a register to memory.
10522 */
10523 switch (pVCpu->iem.s.enmEffOpSize)
10524 {
10525 case IEMMODE_16BIT:
10526 IEM_MC_BEGIN(0, 2);
10527 IEM_MC_LOCAL(uint16_t, u16Value);
10528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10532 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10533 IEM_MC_ADVANCE_RIP();
10534 IEM_MC_END();
10535 break;
10536
10537 case IEMMODE_32BIT:
10538 IEM_MC_BEGIN(0, 2);
10539 IEM_MC_LOCAL(uint32_t, u32Value);
10540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10543 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10544 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
10545 IEM_MC_ADVANCE_RIP();
10546 IEM_MC_END();
10547 break;
10548
10549 case IEMMODE_64BIT:
10550 IEM_MC_BEGIN(0, 2);
10551 IEM_MC_LOCAL(uint64_t, u64Value);
10552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10555 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10556 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
10557 IEM_MC_ADVANCE_RIP();
10558 IEM_MC_END();
10559 break;
10560 }
10561 }
10562 return VINF_SUCCESS;
10563}
10564
10565
10566/** Opcode 0x8a. */
10567FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10568{
10569 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
10570
10571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10572
10573 /*
10574 * If rm is denoting a register, no more instruction bytes.
10575 */
10576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10577 {
10578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10579 IEM_MC_BEGIN(0, 1);
10580 IEM_MC_LOCAL(uint8_t, u8Value);
10581 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10582 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10583 IEM_MC_ADVANCE_RIP();
10584 IEM_MC_END();
10585 }
10586 else
10587 {
10588 /*
10589 * We're loading a register from memory.
10590 */
10591 IEM_MC_BEGIN(0, 2);
10592 IEM_MC_LOCAL(uint8_t, u8Value);
10593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10596 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10597 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10598 IEM_MC_ADVANCE_RIP();
10599 IEM_MC_END();
10600 }
10601 return VINF_SUCCESS;
10602}
10603
10604
10605/** Opcode 0x8b. */
10606FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10607{
10608 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
10609
10610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10611
10612 /*
10613 * If rm is denoting a register, no more instruction bytes.
10614 */
10615 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10616 {
10617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10618 switch (pVCpu->iem.s.enmEffOpSize)
10619 {
10620 case IEMMODE_16BIT:
10621 IEM_MC_BEGIN(0, 1);
10622 IEM_MC_LOCAL(uint16_t, u16Value);
10623 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10624 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10625 IEM_MC_ADVANCE_RIP();
10626 IEM_MC_END();
10627 break;
10628
10629 case IEMMODE_32BIT:
10630 IEM_MC_BEGIN(0, 1);
10631 IEM_MC_LOCAL(uint32_t, u32Value);
10632 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10633 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10634 IEM_MC_ADVANCE_RIP();
10635 IEM_MC_END();
10636 break;
10637
10638 case IEMMODE_64BIT:
10639 IEM_MC_BEGIN(0, 1);
10640 IEM_MC_LOCAL(uint64_t, u64Value);
10641 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10642 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10643 IEM_MC_ADVANCE_RIP();
10644 IEM_MC_END();
10645 break;
10646 }
10647 }
10648 else
10649 {
10650 /*
10651 * We're loading a register from memory.
10652 */
10653 switch (pVCpu->iem.s.enmEffOpSize)
10654 {
10655 case IEMMODE_16BIT:
10656 IEM_MC_BEGIN(0, 2);
10657 IEM_MC_LOCAL(uint16_t, u16Value);
10658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10661 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10662 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10663 IEM_MC_ADVANCE_RIP();
10664 IEM_MC_END();
10665 break;
10666
10667 case IEMMODE_32BIT:
10668 IEM_MC_BEGIN(0, 2);
10669 IEM_MC_LOCAL(uint32_t, u32Value);
10670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10673 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10674 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10675 IEM_MC_ADVANCE_RIP();
10676 IEM_MC_END();
10677 break;
10678
10679 case IEMMODE_64BIT:
10680 IEM_MC_BEGIN(0, 2);
10681 IEM_MC_LOCAL(uint64_t, u64Value);
10682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10685 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10686 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10687 IEM_MC_ADVANCE_RIP();
10688 IEM_MC_END();
10689 break;
10690 }
10691 }
10692 return VINF_SUCCESS;
10693}
10694
10695
10696/** Opcode 0x63. */
10697FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10698{
10699 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
10700 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10701 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10702 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10703 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10704}
10705
10706
10707/** Opcode 0x8c. */
10708FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10709{
10710 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
10711
10712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10713
10714 /*
10715 * Check that the destination register exists. The REX.R prefix is ignored.
10716 */
10717 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10718 if ( iSegReg > X86_SREG_GS)
10719 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10720
10721 /*
10722 * If rm is denoting a register, no more instruction bytes.
10723 * In that case, the operand size is respected and the upper bits are
10724 * cleared (starting with some pentium).
10725 */
10726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10727 {
10728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10729 switch (pVCpu->iem.s.enmEffOpSize)
10730 {
10731 case IEMMODE_16BIT:
10732 IEM_MC_BEGIN(0, 1);
10733 IEM_MC_LOCAL(uint16_t, u16Value);
10734 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10735 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10736 IEM_MC_ADVANCE_RIP();
10737 IEM_MC_END();
10738 break;
10739
10740 case IEMMODE_32BIT:
10741 IEM_MC_BEGIN(0, 1);
10742 IEM_MC_LOCAL(uint32_t, u32Value);
10743 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10744 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10745 IEM_MC_ADVANCE_RIP();
10746 IEM_MC_END();
10747 break;
10748
10749 case IEMMODE_64BIT:
10750 IEM_MC_BEGIN(0, 1);
10751 IEM_MC_LOCAL(uint64_t, u64Value);
10752 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10753 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10754 IEM_MC_ADVANCE_RIP();
10755 IEM_MC_END();
10756 break;
10757 }
10758 }
10759 else
10760 {
10761 /*
10762 * We're saving the register to memory. The access is word sized
10763 * regardless of operand size prefixes.
10764 */
10765#if 0 /* not necessary */
10766 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10767#endif
10768 IEM_MC_BEGIN(0, 2);
10769 IEM_MC_LOCAL(uint16_t, u16Value);
10770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10773 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10774 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10775 IEM_MC_ADVANCE_RIP();
10776 IEM_MC_END();
10777 }
10778 return VINF_SUCCESS;
10779}
10780
10781
10782
10783
10784/** Opcode 0x8d. */
10785FNIEMOP_DEF(iemOp_lea_Gv_M)
10786{
10787 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
10788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10789 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10790 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10791
10792 switch (pVCpu->iem.s.enmEffOpSize)
10793 {
10794 case IEMMODE_16BIT:
10795 IEM_MC_BEGIN(0, 2);
10796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10797 IEM_MC_LOCAL(uint16_t, u16Cast);
10798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10800 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10801 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
10802 IEM_MC_ADVANCE_RIP();
10803 IEM_MC_END();
10804 return VINF_SUCCESS;
10805
10806 case IEMMODE_32BIT:
10807 IEM_MC_BEGIN(0, 2);
10808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10809 IEM_MC_LOCAL(uint32_t, u32Cast);
10810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10812 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10813 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
10814 IEM_MC_ADVANCE_RIP();
10815 IEM_MC_END();
10816 return VINF_SUCCESS;
10817
10818 case IEMMODE_64BIT:
10819 IEM_MC_BEGIN(0, 1);
10820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10823 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
10824 IEM_MC_ADVANCE_RIP();
10825 IEM_MC_END();
10826 return VINF_SUCCESS;
10827 }
10828 AssertFailedReturn(VERR_IEM_IPE_7);
10829}
10830
10831
10832/** Opcode 0x8e. */
10833FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10834{
10835 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
10836
10837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10838
10839 /*
10840 * The practical operand size is 16-bit.
10841 */
10842#if 0 /* not necessary */
10843 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10844#endif
10845
10846 /*
10847 * Check that the destination register exists and can be used with this
10848 * instruction. The REX.R prefix is ignored.
10849 */
10850 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10851 if ( iSegReg == X86_SREG_CS
10852 || iSegReg > X86_SREG_GS)
10853 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10854
10855 /*
10856 * If rm is denoting a register, no more instruction bytes.
10857 */
10858 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10859 {
10860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10861 IEM_MC_BEGIN(2, 0);
10862 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10863 IEM_MC_ARG(uint16_t, u16Value, 1);
10864 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10865 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10866 IEM_MC_END();
10867 }
10868 else
10869 {
10870 /*
10871 * We're loading the register from memory. The access is word sized
10872 * regardless of operand size prefixes.
10873 */
10874 IEM_MC_BEGIN(2, 1);
10875 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10876 IEM_MC_ARG(uint16_t, u16Value, 1);
10877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10880 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10881 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10882 IEM_MC_END();
10883 }
10884 return VINF_SUCCESS;
10885}
10886
10887
10888/** Opcode 0x8f /0. */
10889FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10890{
10891 /* This bugger is rather annoying as it requires rSP to be updated before
10892 doing the effective address calculations. Will eventually require a
10893 split between the R/M+SIB decoding and the effective address
10894 calculation - which is something that is required for any attempt at
10895 reusing this code for a recompiler. It may also be good to have if we
10896 need to delay #UD exception caused by invalid lock prefixes.
10897
10898 For now, we'll do a mostly safe interpreter-only implementation here. */
10899 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10900 * now until tests show it's checked.. */
10901 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
10902
10903 /* Register access is relatively easy and can share code. */
10904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10905 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10906
10907 /*
10908 * Memory target.
10909 *
10910 * Intel says that RSP is incremented before it's used in any effective
10911 * address calcuations. This means some serious extra annoyance here since
10912 * we decode and calculate the effective address in one step and like to
10913 * delay committing registers till everything is done.
10914 *
10915 * So, we'll decode and calculate the effective address twice. This will
10916 * require some recoding if turned into a recompiler.
10917 */
10918 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10919
10920#ifndef TST_IEM_CHECK_MC
10921 /* Calc effective address with modified ESP. */
10922/** @todo testcase */
10923 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
10924 RTGCPTR GCPtrEff;
10925 VBOXSTRICTRC rcStrict;
10926 switch (pVCpu->iem.s.enmEffOpSize)
10927 {
10928 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
10929 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
10930 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
10931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10932 }
10933 if (rcStrict != VINF_SUCCESS)
10934 return rcStrict;
10935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10936
10937 /* Perform the operation - this should be CImpl. */
10938 RTUINT64U TmpRsp;
10939 TmpRsp.u = pCtx->rsp;
10940 switch (pVCpu->iem.s.enmEffOpSize)
10941 {
10942 case IEMMODE_16BIT:
10943 {
10944 uint16_t u16Value;
10945 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
10946 if (rcStrict == VINF_SUCCESS)
10947 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
10948 break;
10949 }
10950
10951 case IEMMODE_32BIT:
10952 {
10953 uint32_t u32Value;
10954 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
10955 if (rcStrict == VINF_SUCCESS)
10956 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
10957 break;
10958 }
10959
10960 case IEMMODE_64BIT:
10961 {
10962 uint64_t u64Value;
10963 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
10964 if (rcStrict == VINF_SUCCESS)
10965 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
10966 break;
10967 }
10968
10969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10970 }
10971 if (rcStrict == VINF_SUCCESS)
10972 {
10973 pCtx->rsp = TmpRsp.u;
10974 iemRegUpdateRipAndClearRF(pVCpu);
10975 }
10976 return rcStrict;
10977
10978#else
10979 return VERR_IEM_IPE_2;
10980#endif
10981}
10982
10983
10984/** Opcode 0x8f. */
10985FNIEMOP_DEF(iemOp_Grp1A)
10986{
10987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10988 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10989 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10990
10991 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10992 /** @todo XOP decoding. */
10993 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
10994 return IEMOP_RAISE_INVALID_OPCODE();
10995}
10996
10997
10998/**
10999 * Common 'xchg reg,rAX' helper.
11000 */
11001FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11002{
11003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11004
11005 iReg |= pVCpu->iem.s.uRexB;
11006 switch (pVCpu->iem.s.enmEffOpSize)
11007 {
11008 case IEMMODE_16BIT:
11009 IEM_MC_BEGIN(0, 2);
11010 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11011 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11012 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11013 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11014 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11015 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11016 IEM_MC_ADVANCE_RIP();
11017 IEM_MC_END();
11018 return VINF_SUCCESS;
11019
11020 case IEMMODE_32BIT:
11021 IEM_MC_BEGIN(0, 2);
11022 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11023 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11024 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11025 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11026 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11027 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11028 IEM_MC_ADVANCE_RIP();
11029 IEM_MC_END();
11030 return VINF_SUCCESS;
11031
11032 case IEMMODE_64BIT:
11033 IEM_MC_BEGIN(0, 2);
11034 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11035 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11036 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11037 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11038 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11039 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11040 IEM_MC_ADVANCE_RIP();
11041 IEM_MC_END();
11042 return VINF_SUCCESS;
11043
11044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11045 }
11046}
11047
11048
11049/** Opcode 0x90. */
11050FNIEMOP_DEF(iemOp_nop)
11051{
11052 /* R8/R8D and RAX/EAX can be exchanged. */
11053 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11054 {
11055 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11056 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11057 }
11058
11059 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11060 IEMOP_MNEMONIC(pause, "pause");
11061 else
11062 IEMOP_MNEMONIC(nop, "nop");
11063 IEM_MC_BEGIN(0, 0);
11064 IEM_MC_ADVANCE_RIP();
11065 IEM_MC_END();
11066 return VINF_SUCCESS;
11067}
11068
11069
11070/** Opcode 0x91. */
11071FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11072{
11073 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11074 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11075}
11076
11077
11078/** Opcode 0x92. */
11079FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11080{
11081 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11082 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11083}
11084
11085
11086/** Opcode 0x93. */
11087FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11088{
11089 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11090 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11091}
11092
11093
11094/** Opcode 0x94. */
11095FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11096{
11097 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11098 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11099}
11100
11101
11102/** Opcode 0x95. */
11103FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11104{
11105 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11106 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11107}
11108
11109
11110/** Opcode 0x96. */
11111FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11112{
11113 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11114 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11115}
11116
11117
11118/** Opcode 0x97. */
11119FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11120{
11121 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11122 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11123}
11124
11125
11126/** Opcode 0x98. */
11127FNIEMOP_DEF(iemOp_cbw)
11128{
11129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11130 switch (pVCpu->iem.s.enmEffOpSize)
11131 {
11132 case IEMMODE_16BIT:
11133 IEMOP_MNEMONIC(cbw, "cbw");
11134 IEM_MC_BEGIN(0, 1);
11135 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11136 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11137 } IEM_MC_ELSE() {
11138 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11139 } IEM_MC_ENDIF();
11140 IEM_MC_ADVANCE_RIP();
11141 IEM_MC_END();
11142 return VINF_SUCCESS;
11143
11144 case IEMMODE_32BIT:
11145 IEMOP_MNEMONIC(cwde, "cwde");
11146 IEM_MC_BEGIN(0, 1);
11147 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11148 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11149 } IEM_MC_ELSE() {
11150 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11151 } IEM_MC_ENDIF();
11152 IEM_MC_ADVANCE_RIP();
11153 IEM_MC_END();
11154 return VINF_SUCCESS;
11155
11156 case IEMMODE_64BIT:
11157 IEMOP_MNEMONIC(cdqe, "cdqe");
11158 IEM_MC_BEGIN(0, 1);
11159 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11160 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11161 } IEM_MC_ELSE() {
11162 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11163 } IEM_MC_ENDIF();
11164 IEM_MC_ADVANCE_RIP();
11165 IEM_MC_END();
11166 return VINF_SUCCESS;
11167
11168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11169 }
11170}
11171
11172
11173/** Opcode 0x99. */
11174FNIEMOP_DEF(iemOp_cwd)
11175{
11176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11177 switch (pVCpu->iem.s.enmEffOpSize)
11178 {
11179 case IEMMODE_16BIT:
11180 IEMOP_MNEMONIC(cwd, "cwd");
11181 IEM_MC_BEGIN(0, 1);
11182 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11183 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11184 } IEM_MC_ELSE() {
11185 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11186 } IEM_MC_ENDIF();
11187 IEM_MC_ADVANCE_RIP();
11188 IEM_MC_END();
11189 return VINF_SUCCESS;
11190
11191 case IEMMODE_32BIT:
11192 IEMOP_MNEMONIC(cdq, "cdq");
11193 IEM_MC_BEGIN(0, 1);
11194 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11195 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11196 } IEM_MC_ELSE() {
11197 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11198 } IEM_MC_ENDIF();
11199 IEM_MC_ADVANCE_RIP();
11200 IEM_MC_END();
11201 return VINF_SUCCESS;
11202
11203 case IEMMODE_64BIT:
11204 IEMOP_MNEMONIC(cqo, "cqo");
11205 IEM_MC_BEGIN(0, 1);
11206 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11207 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11208 } IEM_MC_ELSE() {
11209 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11210 } IEM_MC_ENDIF();
11211 IEM_MC_ADVANCE_RIP();
11212 IEM_MC_END();
11213 return VINF_SUCCESS;
11214
11215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11216 }
11217}
11218
11219
11220/** Opcode 0x9a. */
11221FNIEMOP_DEF(iemOp_call_Ap)
11222{
11223 IEMOP_MNEMONIC(call_Ap, "call Ap");
11224 IEMOP_HLP_NO_64BIT();
11225
11226 /* Decode the far pointer address and pass it on to the far call C implementation. */
11227 uint32_t offSeg;
11228 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11229 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11230 else
11231 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11232 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11234 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11235}
11236
11237
11238/** Opcode 0x9b. (aka fwait) */
11239FNIEMOP_DEF(iemOp_wait)
11240{
11241 IEMOP_MNEMONIC(wait, "wait");
11242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11243
11244 IEM_MC_BEGIN(0, 0);
11245 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11246 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11247 IEM_MC_ADVANCE_RIP();
11248 IEM_MC_END();
11249 return VINF_SUCCESS;
11250}
11251
11252
11253/** Opcode 0x9c. */
11254FNIEMOP_DEF(iemOp_pushf_Fv)
11255{
11256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11258 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11259}
11260
11261
11262/** Opcode 0x9d. */
11263FNIEMOP_DEF(iemOp_popf_Fv)
11264{
11265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11266 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11267 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11268}
11269
11270
11271/** Opcode 0x9e. */
11272FNIEMOP_DEF(iemOp_sahf)
11273{
11274 IEMOP_MNEMONIC(sahf, "sahf");
11275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11276 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11277 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11278 return IEMOP_RAISE_INVALID_OPCODE();
11279 IEM_MC_BEGIN(0, 2);
11280 IEM_MC_LOCAL(uint32_t, u32Flags);
11281 IEM_MC_LOCAL(uint32_t, EFlags);
11282 IEM_MC_FETCH_EFLAGS(EFlags);
11283 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11284 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11285 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11286 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11287 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11288 IEM_MC_COMMIT_EFLAGS(EFlags);
11289 IEM_MC_ADVANCE_RIP();
11290 IEM_MC_END();
11291 return VINF_SUCCESS;
11292}
11293
11294
11295/** Opcode 0x9f. */
11296FNIEMOP_DEF(iemOp_lahf)
11297{
11298 IEMOP_MNEMONIC(lahf, "lahf");
11299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11300 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11301 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11302 return IEMOP_RAISE_INVALID_OPCODE();
11303 IEM_MC_BEGIN(0, 1);
11304 IEM_MC_LOCAL(uint8_t, u8Flags);
11305 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11306 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11307 IEM_MC_ADVANCE_RIP();
11308 IEM_MC_END();
11309 return VINF_SUCCESS;
11310}
11311
11312
11313/**
11314 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11315 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11316 * prefixes. Will return on failures.
11317 * @param a_GCPtrMemOff The variable to store the offset in.
11318 */
11319#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11320 do \
11321 { \
11322 switch (pVCpu->iem.s.enmEffAddrMode) \
11323 { \
11324 case IEMMODE_16BIT: \
11325 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11326 break; \
11327 case IEMMODE_32BIT: \
11328 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11329 break; \
11330 case IEMMODE_64BIT: \
11331 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11332 break; \
11333 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11334 } \
11335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11336 } while (0)
11337
11338/** Opcode 0xa0. */
11339FNIEMOP_DEF(iemOp_mov_Al_Ob)
11340{
11341 /*
11342 * Get the offset and fend of lock prefixes.
11343 */
11344 RTGCPTR GCPtrMemOff;
11345 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11346
11347 /*
11348 * Fetch AL.
11349 */
11350 IEM_MC_BEGIN(0,1);
11351 IEM_MC_LOCAL(uint8_t, u8Tmp);
11352 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11353 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11354 IEM_MC_ADVANCE_RIP();
11355 IEM_MC_END();
11356 return VINF_SUCCESS;
11357}
11358
11359
11360/** Opcode 0xa1. */
11361FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11362{
11363 /*
11364 * Get the offset and fend of lock prefixes.
11365 */
11366 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
11367 RTGCPTR GCPtrMemOff;
11368 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11369
11370 /*
11371 * Fetch rAX.
11372 */
11373 switch (pVCpu->iem.s.enmEffOpSize)
11374 {
11375 case IEMMODE_16BIT:
11376 IEM_MC_BEGIN(0,1);
11377 IEM_MC_LOCAL(uint16_t, u16Tmp);
11378 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11379 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11380 IEM_MC_ADVANCE_RIP();
11381 IEM_MC_END();
11382 return VINF_SUCCESS;
11383
11384 case IEMMODE_32BIT:
11385 IEM_MC_BEGIN(0,1);
11386 IEM_MC_LOCAL(uint32_t, u32Tmp);
11387 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11388 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11389 IEM_MC_ADVANCE_RIP();
11390 IEM_MC_END();
11391 return VINF_SUCCESS;
11392
11393 case IEMMODE_64BIT:
11394 IEM_MC_BEGIN(0,1);
11395 IEM_MC_LOCAL(uint64_t, u64Tmp);
11396 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11397 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11398 IEM_MC_ADVANCE_RIP();
11399 IEM_MC_END();
11400 return VINF_SUCCESS;
11401
11402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11403 }
11404}
11405
11406
11407/** Opcode 0xa2. */
11408FNIEMOP_DEF(iemOp_mov_Ob_AL)
11409{
11410 /*
11411 * Get the offset and fend of lock prefixes.
11412 */
11413 RTGCPTR GCPtrMemOff;
11414 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11415
11416 /*
11417 * Store AL.
11418 */
11419 IEM_MC_BEGIN(0,1);
11420 IEM_MC_LOCAL(uint8_t, u8Tmp);
11421 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11422 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
11423 IEM_MC_ADVANCE_RIP();
11424 IEM_MC_END();
11425 return VINF_SUCCESS;
11426}
11427
11428
11429/** Opcode 0xa3. */
11430FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11431{
11432 /*
11433 * Get the offset and fend of lock prefixes.
11434 */
11435 RTGCPTR GCPtrMemOff;
11436 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11437
11438 /*
11439 * Store rAX.
11440 */
11441 switch (pVCpu->iem.s.enmEffOpSize)
11442 {
11443 case IEMMODE_16BIT:
11444 IEM_MC_BEGIN(0,1);
11445 IEM_MC_LOCAL(uint16_t, u16Tmp);
11446 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11447 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
11448 IEM_MC_ADVANCE_RIP();
11449 IEM_MC_END();
11450 return VINF_SUCCESS;
11451
11452 case IEMMODE_32BIT:
11453 IEM_MC_BEGIN(0,1);
11454 IEM_MC_LOCAL(uint32_t, u32Tmp);
11455 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11456 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
11457 IEM_MC_ADVANCE_RIP();
11458 IEM_MC_END();
11459 return VINF_SUCCESS;
11460
11461 case IEMMODE_64BIT:
11462 IEM_MC_BEGIN(0,1);
11463 IEM_MC_LOCAL(uint64_t, u64Tmp);
11464 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11465 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
11466 IEM_MC_ADVANCE_RIP();
11467 IEM_MC_END();
11468 return VINF_SUCCESS;
11469
11470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11471 }
11472}
11473
11474/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11475#define IEM_MOVS_CASE(ValBits, AddrBits) \
11476 IEM_MC_BEGIN(0, 2); \
11477 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11478 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11479 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11480 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11481 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11482 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11484 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11485 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11486 } IEM_MC_ELSE() { \
11487 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11488 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11489 } IEM_MC_ENDIF(); \
11490 IEM_MC_ADVANCE_RIP(); \
11491 IEM_MC_END();
11492
11493/** Opcode 0xa4. */
11494FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11495{
11496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11497
11498 /*
11499 * Use the C implementation if a repeat prefix is encountered.
11500 */
11501 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11502 {
11503 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
11504 switch (pVCpu->iem.s.enmEffAddrMode)
11505 {
11506 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
11507 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
11508 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
11509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11510 }
11511 }
11512 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
11513
11514 /*
11515 * Sharing case implementation with movs[wdq] below.
11516 */
11517 switch (pVCpu->iem.s.enmEffAddrMode)
11518 {
11519 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11520 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11521 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11523 }
11524 return VINF_SUCCESS;
11525}
11526
11527
11528/** Opcode 0xa5. */
11529FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11530{
11531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11532
11533 /*
11534 * Use the C implementation if a repeat prefix is encountered.
11535 */
11536 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11537 {
11538 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
11539 switch (pVCpu->iem.s.enmEffOpSize)
11540 {
11541 case IEMMODE_16BIT:
11542 switch (pVCpu->iem.s.enmEffAddrMode)
11543 {
11544 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
11545 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
11546 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
11547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11548 }
11549 break;
11550 case IEMMODE_32BIT:
11551 switch (pVCpu->iem.s.enmEffAddrMode)
11552 {
11553 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
11554 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
11555 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
11556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11557 }
11558 case IEMMODE_64BIT:
11559 switch (pVCpu->iem.s.enmEffAddrMode)
11560 {
11561 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11562 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
11563 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
11564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11565 }
11566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11567 }
11568 }
11569 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
11570
11571 /*
11572 * Annoying double switch here.
11573 * Using ugly macro for implementing the cases, sharing it with movsb.
11574 */
11575 switch (pVCpu->iem.s.enmEffOpSize)
11576 {
11577 case IEMMODE_16BIT:
11578 switch (pVCpu->iem.s.enmEffAddrMode)
11579 {
11580 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11581 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11582 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11584 }
11585 break;
11586
11587 case IEMMODE_32BIT:
11588 switch (pVCpu->iem.s.enmEffAddrMode)
11589 {
11590 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11591 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11592 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11594 }
11595 break;
11596
11597 case IEMMODE_64BIT:
11598 switch (pVCpu->iem.s.enmEffAddrMode)
11599 {
11600 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11601 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11602 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11604 }
11605 break;
11606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11607 }
11608 return VINF_SUCCESS;
11609}
11610
11611#undef IEM_MOVS_CASE
11612
11613/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11614#define IEM_CMPS_CASE(ValBits, AddrBits) \
11615 IEM_MC_BEGIN(3, 3); \
11616 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11617 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11618 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11619 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11620 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11621 \
11622 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11623 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
11624 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11625 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11626 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11627 IEM_MC_REF_EFLAGS(pEFlags); \
11628 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11629 \
11630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11631 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11632 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11633 } IEM_MC_ELSE() { \
11634 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11635 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11636 } IEM_MC_ENDIF(); \
11637 IEM_MC_ADVANCE_RIP(); \
11638 IEM_MC_END(); \
11639
11640/** Opcode 0xa6. */
11641FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11642{
11643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11644
11645 /*
11646 * Use the C implementation if a repeat prefix is encountered.
11647 */
11648 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11649 {
11650 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
11651 switch (pVCpu->iem.s.enmEffAddrMode)
11652 {
11653 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11654 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11655 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11657 }
11658 }
11659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11660 {
11661 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
11662 switch (pVCpu->iem.s.enmEffAddrMode)
11663 {
11664 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11665 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11666 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11668 }
11669 }
11670 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
11671
11672 /*
11673 * Sharing case implementation with cmps[wdq] below.
11674 */
11675 switch (pVCpu->iem.s.enmEffAddrMode)
11676 {
11677 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11678 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11679 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11681 }
11682 return VINF_SUCCESS;
11683
11684}
11685
11686
11687/** Opcode 0xa7. */
11688FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11689{
11690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11691
11692 /*
11693 * Use the C implementation if a repeat prefix is encountered.
11694 */
11695 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11696 {
11697 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
11698 switch (pVCpu->iem.s.enmEffOpSize)
11699 {
11700 case IEMMODE_16BIT:
11701 switch (pVCpu->iem.s.enmEffAddrMode)
11702 {
11703 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11704 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11705 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11707 }
11708 break;
11709 case IEMMODE_32BIT:
11710 switch (pVCpu->iem.s.enmEffAddrMode)
11711 {
11712 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11713 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11714 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11716 }
11717 case IEMMODE_64BIT:
11718 switch (pVCpu->iem.s.enmEffAddrMode)
11719 {
11720 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11721 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11722 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11724 }
11725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11726 }
11727 }
11728
11729 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11730 {
11731 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
11732 switch (pVCpu->iem.s.enmEffOpSize)
11733 {
11734 case IEMMODE_16BIT:
11735 switch (pVCpu->iem.s.enmEffAddrMode)
11736 {
11737 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11738 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11739 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11741 }
11742 break;
11743 case IEMMODE_32BIT:
11744 switch (pVCpu->iem.s.enmEffAddrMode)
11745 {
11746 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11747 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11748 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11750 }
11751 case IEMMODE_64BIT:
11752 switch (pVCpu->iem.s.enmEffAddrMode)
11753 {
11754 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11755 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11756 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11758 }
11759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11760 }
11761 }
11762
11763 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
11764
11765 /*
11766 * Annoying double switch here.
11767 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11768 */
11769 switch (pVCpu->iem.s.enmEffOpSize)
11770 {
11771 case IEMMODE_16BIT:
11772 switch (pVCpu->iem.s.enmEffAddrMode)
11773 {
11774 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11775 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11776 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11778 }
11779 break;
11780
11781 case IEMMODE_32BIT:
11782 switch (pVCpu->iem.s.enmEffAddrMode)
11783 {
11784 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11785 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11786 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11788 }
11789 break;
11790
11791 case IEMMODE_64BIT:
11792 switch (pVCpu->iem.s.enmEffAddrMode)
11793 {
11794 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11795 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11796 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11798 }
11799 break;
11800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11801 }
11802 return VINF_SUCCESS;
11803
11804}
11805
11806#undef IEM_CMPS_CASE
11807
11808/** Opcode 0xa8. */
11809FNIEMOP_DEF(iemOp_test_AL_Ib)
11810{
11811 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
11812 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11813 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11814}
11815
11816
11817/** Opcode 0xa9. */
11818FNIEMOP_DEF(iemOp_test_eAX_Iz)
11819{
11820 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
11821 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11822 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11823}
11824
11825
11826/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11827#define IEM_STOS_CASE(ValBits, AddrBits) \
11828 IEM_MC_BEGIN(0, 2); \
11829 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11830 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11831 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11832 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11833 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11834 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11835 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11836 } IEM_MC_ELSE() { \
11837 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11838 } IEM_MC_ENDIF(); \
11839 IEM_MC_ADVANCE_RIP(); \
11840 IEM_MC_END(); \
11841
11842/** Opcode 0xaa. */
11843FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11844{
11845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11846
11847 /*
11848 * Use the C implementation if a repeat prefix is encountered.
11849 */
11850 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11851 {
11852 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
11853 switch (pVCpu->iem.s.enmEffAddrMode)
11854 {
11855 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11856 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11857 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11859 }
11860 }
11861 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
11862
11863 /*
11864 * Sharing case implementation with stos[wdq] below.
11865 */
11866 switch (pVCpu->iem.s.enmEffAddrMode)
11867 {
11868 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11869 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11870 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11872 }
11873 return VINF_SUCCESS;
11874}
11875
11876
11877/** Opcode 0xab. */
11878FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11879{
11880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11881
11882 /*
11883 * Use the C implementation if a repeat prefix is encountered.
11884 */
11885 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11886 {
11887 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
11888 switch (pVCpu->iem.s.enmEffOpSize)
11889 {
11890 case IEMMODE_16BIT:
11891 switch (pVCpu->iem.s.enmEffAddrMode)
11892 {
11893 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11894 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11895 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11897 }
11898 break;
11899 case IEMMODE_32BIT:
11900 switch (pVCpu->iem.s.enmEffAddrMode)
11901 {
11902 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11903 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11904 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11906 }
11907 case IEMMODE_64BIT:
11908 switch (pVCpu->iem.s.enmEffAddrMode)
11909 {
11910 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11911 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11912 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11914 }
11915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11916 }
11917 }
11918 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
11919
11920 /*
11921 * Annoying double switch here.
11922 * Using ugly macro for implementing the cases, sharing it with stosb.
11923 */
11924 switch (pVCpu->iem.s.enmEffOpSize)
11925 {
11926 case IEMMODE_16BIT:
11927 switch (pVCpu->iem.s.enmEffAddrMode)
11928 {
11929 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11930 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11931 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11933 }
11934 break;
11935
11936 case IEMMODE_32BIT:
11937 switch (pVCpu->iem.s.enmEffAddrMode)
11938 {
11939 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11940 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11941 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11943 }
11944 break;
11945
11946 case IEMMODE_64BIT:
11947 switch (pVCpu->iem.s.enmEffAddrMode)
11948 {
11949 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11950 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11951 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11953 }
11954 break;
11955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11956 }
11957 return VINF_SUCCESS;
11958}
11959
11960#undef IEM_STOS_CASE
11961
11962/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11963#define IEM_LODS_CASE(ValBits, AddrBits) \
11964 IEM_MC_BEGIN(0, 2); \
11965 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11966 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11967 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11968 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11969 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11970 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11971 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11972 } IEM_MC_ELSE() { \
11973 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11974 } IEM_MC_ENDIF(); \
11975 IEM_MC_ADVANCE_RIP(); \
11976 IEM_MC_END();
11977
11978/** Opcode 0xac. */
11979FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11980{
11981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11982
11983 /*
11984 * Use the C implementation if a repeat prefix is encountered.
11985 */
11986 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11987 {
11988 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
11989 switch (pVCpu->iem.s.enmEffAddrMode)
11990 {
11991 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
11992 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
11993 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
11994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11995 }
11996 }
11997 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
11998
11999 /*
12000 * Sharing case implementation with stos[wdq] below.
12001 */
12002 switch (pVCpu->iem.s.enmEffAddrMode)
12003 {
12004 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12005 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12006 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12008 }
12009 return VINF_SUCCESS;
12010}
12011
12012
12013/** Opcode 0xad. */
12014FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12015{
12016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12017
12018 /*
12019 * Use the C implementation if a repeat prefix is encountered.
12020 */
12021 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12022 {
12023 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12024 switch (pVCpu->iem.s.enmEffOpSize)
12025 {
12026 case IEMMODE_16BIT:
12027 switch (pVCpu->iem.s.enmEffAddrMode)
12028 {
12029 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12030 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12031 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12033 }
12034 break;
12035 case IEMMODE_32BIT:
12036 switch (pVCpu->iem.s.enmEffAddrMode)
12037 {
12038 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12039 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12040 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12042 }
12043 case IEMMODE_64BIT:
12044 switch (pVCpu->iem.s.enmEffAddrMode)
12045 {
12046 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12047 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12048 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12050 }
12051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12052 }
12053 }
12054 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12055
12056 /*
12057 * Annoying double switch here.
12058 * Using ugly macro for implementing the cases, sharing it with lodsb.
12059 */
12060 switch (pVCpu->iem.s.enmEffOpSize)
12061 {
12062 case IEMMODE_16BIT:
12063 switch (pVCpu->iem.s.enmEffAddrMode)
12064 {
12065 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12066 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12067 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12069 }
12070 break;
12071
12072 case IEMMODE_32BIT:
12073 switch (pVCpu->iem.s.enmEffAddrMode)
12074 {
12075 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12076 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12077 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12079 }
12080 break;
12081
12082 case IEMMODE_64BIT:
12083 switch (pVCpu->iem.s.enmEffAddrMode)
12084 {
12085 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12086 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12087 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12089 }
12090 break;
12091 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12092 }
12093 return VINF_SUCCESS;
12094}
12095
12096#undef IEM_LODS_CASE
12097
12098/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12099#define IEM_SCAS_CASE(ValBits, AddrBits) \
12100 IEM_MC_BEGIN(3, 2); \
12101 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12102 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12103 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12104 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12105 \
12106 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12107 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12108 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12109 IEM_MC_REF_EFLAGS(pEFlags); \
12110 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12111 \
12112 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12113 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12114 } IEM_MC_ELSE() { \
12115 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12116 } IEM_MC_ENDIF(); \
12117 IEM_MC_ADVANCE_RIP(); \
12118 IEM_MC_END();
12119
12120/** Opcode 0xae. */
12121FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12122{
12123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12124
12125 /*
12126 * Use the C implementation if a repeat prefix is encountered.
12127 */
12128 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12129 {
12130 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12131 switch (pVCpu->iem.s.enmEffAddrMode)
12132 {
12133 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12134 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12135 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12137 }
12138 }
12139 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12140 {
12141 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12142 switch (pVCpu->iem.s.enmEffAddrMode)
12143 {
12144 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12145 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12146 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12148 }
12149 }
12150 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12151
12152 /*
12153 * Sharing case implementation with stos[wdq] below.
12154 */
12155 switch (pVCpu->iem.s.enmEffAddrMode)
12156 {
12157 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12158 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12159 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12161 }
12162 return VINF_SUCCESS;
12163}
12164
12165
12166/** Opcode 0xaf. */
12167FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12168{
12169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12170
12171 /*
12172 * Use the C implementation if a repeat prefix is encountered.
12173 */
12174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12175 {
12176 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12177 switch (pVCpu->iem.s.enmEffOpSize)
12178 {
12179 case IEMMODE_16BIT:
12180 switch (pVCpu->iem.s.enmEffAddrMode)
12181 {
12182 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12183 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12184 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12186 }
12187 break;
12188 case IEMMODE_32BIT:
12189 switch (pVCpu->iem.s.enmEffAddrMode)
12190 {
12191 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12192 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12193 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12195 }
12196 case IEMMODE_64BIT:
12197 switch (pVCpu->iem.s.enmEffAddrMode)
12198 {
12199 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12200 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12201 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12203 }
12204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12205 }
12206 }
12207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12208 {
12209 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12210 switch (pVCpu->iem.s.enmEffOpSize)
12211 {
12212 case IEMMODE_16BIT:
12213 switch (pVCpu->iem.s.enmEffAddrMode)
12214 {
12215 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12216 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12217 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12219 }
12220 break;
12221 case IEMMODE_32BIT:
12222 switch (pVCpu->iem.s.enmEffAddrMode)
12223 {
12224 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12225 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12226 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12228 }
12229 case IEMMODE_64BIT:
12230 switch (pVCpu->iem.s.enmEffAddrMode)
12231 {
12232 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12233 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12234 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12236 }
12237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12238 }
12239 }
12240 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12241
12242 /*
12243 * Annoying double switch here.
12244 * Using ugly macro for implementing the cases, sharing it with scasb.
12245 */
12246 switch (pVCpu->iem.s.enmEffOpSize)
12247 {
12248 case IEMMODE_16BIT:
12249 switch (pVCpu->iem.s.enmEffAddrMode)
12250 {
12251 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12252 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12253 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12255 }
12256 break;
12257
12258 case IEMMODE_32BIT:
12259 switch (pVCpu->iem.s.enmEffAddrMode)
12260 {
12261 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12262 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12263 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12265 }
12266 break;
12267
12268 case IEMMODE_64BIT:
12269 switch (pVCpu->iem.s.enmEffAddrMode)
12270 {
12271 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12272 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12273 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12275 }
12276 break;
12277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12278 }
12279 return VINF_SUCCESS;
12280}
12281
12282#undef IEM_SCAS_CASE
12283
12284/**
12285 * Common 'mov r8, imm8' helper.
12286 */
12287FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12288{
12289 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12291
12292 IEM_MC_BEGIN(0, 1);
12293 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12294 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12295 IEM_MC_ADVANCE_RIP();
12296 IEM_MC_END();
12297
12298 return VINF_SUCCESS;
12299}
12300
12301
12302/** Opcode 0xb0. */
12303FNIEMOP_DEF(iemOp_mov_AL_Ib)
12304{
12305 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
12306 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12307}
12308
12309
12310/** Opcode 0xb1. */
12311FNIEMOP_DEF(iemOp_CL_Ib)
12312{
12313 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
12314 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12315}
12316
12317
12318/** Opcode 0xb2. */
12319FNIEMOP_DEF(iemOp_DL_Ib)
12320{
12321 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
12322 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12323}
12324
12325
12326/** Opcode 0xb3. */
12327FNIEMOP_DEF(iemOp_BL_Ib)
12328{
12329 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
12330 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12331}
12332
12333
12334/** Opcode 0xb4. */
12335FNIEMOP_DEF(iemOp_mov_AH_Ib)
12336{
12337 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
12338 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12339}
12340
12341
12342/** Opcode 0xb5. */
12343FNIEMOP_DEF(iemOp_CH_Ib)
12344{
12345 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
12346 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12347}
12348
12349
12350/** Opcode 0xb6. */
12351FNIEMOP_DEF(iemOp_DH_Ib)
12352{
12353 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
12354 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12355}
12356
12357
12358/** Opcode 0xb7. */
12359FNIEMOP_DEF(iemOp_BH_Ib)
12360{
12361 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
12362 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12363}
12364
12365
12366/**
12367 * Common 'mov regX,immX' helper.
12368 */
12369FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12370{
12371 switch (pVCpu->iem.s.enmEffOpSize)
12372 {
12373 case IEMMODE_16BIT:
12374 {
12375 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12377
12378 IEM_MC_BEGIN(0, 1);
12379 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12380 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12381 IEM_MC_ADVANCE_RIP();
12382 IEM_MC_END();
12383 break;
12384 }
12385
12386 case IEMMODE_32BIT:
12387 {
12388 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12390
12391 IEM_MC_BEGIN(0, 1);
12392 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12393 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12394 IEM_MC_ADVANCE_RIP();
12395 IEM_MC_END();
12396 break;
12397 }
12398 case IEMMODE_64BIT:
12399 {
12400 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12402
12403 IEM_MC_BEGIN(0, 1);
12404 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12405 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12406 IEM_MC_ADVANCE_RIP();
12407 IEM_MC_END();
12408 break;
12409 }
12410 }
12411
12412 return VINF_SUCCESS;
12413}
12414
12415
12416/** Opcode 0xb8. */
12417FNIEMOP_DEF(iemOp_eAX_Iv)
12418{
12419 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
12420 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12421}
12422
12423
12424/** Opcode 0xb9. */
12425FNIEMOP_DEF(iemOp_eCX_Iv)
12426{
12427 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
12428 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12429}
12430
12431
12432/** Opcode 0xba. */
12433FNIEMOP_DEF(iemOp_eDX_Iv)
12434{
12435 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
12436 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12437}
12438
12439
12440/** Opcode 0xbb. */
12441FNIEMOP_DEF(iemOp_eBX_Iv)
12442{
12443 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
12444 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12445}
12446
12447
12448/** Opcode 0xbc. */
12449FNIEMOP_DEF(iemOp_eSP_Iv)
12450{
12451 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
12452 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12453}
12454
12455
12456/** Opcode 0xbd. */
12457FNIEMOP_DEF(iemOp_eBP_Iv)
12458{
12459 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
12460 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12461}
12462
12463
12464/** Opcode 0xbe. */
12465FNIEMOP_DEF(iemOp_eSI_Iv)
12466{
12467 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
12468 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12469}
12470
12471
12472/** Opcode 0xbf. */
12473FNIEMOP_DEF(iemOp_eDI_Iv)
12474{
12475 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
12476 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12477}
12478
12479
12480/** Opcode 0xc0. */
12481FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12482{
12483 IEMOP_HLP_MIN_186();
12484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12485 PCIEMOPSHIFTSIZES pImpl;
12486 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12487 {
12488 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
12489 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
12490 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
12491 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
12492 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
12493 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
12494 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
12495 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12496 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12497 }
12498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12499
12500 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12501 {
12502 /* register */
12503 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12505 IEM_MC_BEGIN(3, 0);
12506 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12507 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12509 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12510 IEM_MC_REF_EFLAGS(pEFlags);
12511 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12512 IEM_MC_ADVANCE_RIP();
12513 IEM_MC_END();
12514 }
12515 else
12516 {
12517 /* memory */
12518 IEM_MC_BEGIN(3, 2);
12519 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12520 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12521 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12523
12524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12525 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12526 IEM_MC_ASSIGN(cShiftArg, cShift);
12527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12528 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12529 IEM_MC_FETCH_EFLAGS(EFlags);
12530 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12531
12532 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12533 IEM_MC_COMMIT_EFLAGS(EFlags);
12534 IEM_MC_ADVANCE_RIP();
12535 IEM_MC_END();
12536 }
12537 return VINF_SUCCESS;
12538}
12539
12540
12541/** Opcode 0xc1. */
12542FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12543{
12544 IEMOP_HLP_MIN_186();
12545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12546 PCIEMOPSHIFTSIZES pImpl;
12547 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12548 {
12549 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
12550 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
12551 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
12552 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
12553 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
12554 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
12555 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
12556 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12557 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12558 }
12559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12560
12561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12562 {
12563 /* register */
12564 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12566 switch (pVCpu->iem.s.enmEffOpSize)
12567 {
12568 case IEMMODE_16BIT:
12569 IEM_MC_BEGIN(3, 0);
12570 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12571 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12572 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12573 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12574 IEM_MC_REF_EFLAGS(pEFlags);
12575 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12576 IEM_MC_ADVANCE_RIP();
12577 IEM_MC_END();
12578 return VINF_SUCCESS;
12579
12580 case IEMMODE_32BIT:
12581 IEM_MC_BEGIN(3, 0);
12582 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12583 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12584 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12585 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12586 IEM_MC_REF_EFLAGS(pEFlags);
12587 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12588 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12589 IEM_MC_ADVANCE_RIP();
12590 IEM_MC_END();
12591 return VINF_SUCCESS;
12592
12593 case IEMMODE_64BIT:
12594 IEM_MC_BEGIN(3, 0);
12595 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12596 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12597 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12598 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12599 IEM_MC_REF_EFLAGS(pEFlags);
12600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12601 IEM_MC_ADVANCE_RIP();
12602 IEM_MC_END();
12603 return VINF_SUCCESS;
12604
12605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12606 }
12607 }
12608 else
12609 {
12610 /* memory */
12611 switch (pVCpu->iem.s.enmEffOpSize)
12612 {
12613 case IEMMODE_16BIT:
12614 IEM_MC_BEGIN(3, 2);
12615 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12616 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12617 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12619
12620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12621 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12622 IEM_MC_ASSIGN(cShiftArg, cShift);
12623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12624 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12625 IEM_MC_FETCH_EFLAGS(EFlags);
12626 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12627
12628 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12629 IEM_MC_COMMIT_EFLAGS(EFlags);
12630 IEM_MC_ADVANCE_RIP();
12631 IEM_MC_END();
12632 return VINF_SUCCESS;
12633
12634 case IEMMODE_32BIT:
12635 IEM_MC_BEGIN(3, 2);
12636 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12637 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12638 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12640
12641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12642 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12643 IEM_MC_ASSIGN(cShiftArg, cShift);
12644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12645 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12646 IEM_MC_FETCH_EFLAGS(EFlags);
12647 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12648
12649 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12650 IEM_MC_COMMIT_EFLAGS(EFlags);
12651 IEM_MC_ADVANCE_RIP();
12652 IEM_MC_END();
12653 return VINF_SUCCESS;
12654
12655 case IEMMODE_64BIT:
12656 IEM_MC_BEGIN(3, 2);
12657 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12658 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12659 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12661
12662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12663 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12664 IEM_MC_ASSIGN(cShiftArg, cShift);
12665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12666 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12667 IEM_MC_FETCH_EFLAGS(EFlags);
12668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12669
12670 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12671 IEM_MC_COMMIT_EFLAGS(EFlags);
12672 IEM_MC_ADVANCE_RIP();
12673 IEM_MC_END();
12674 return VINF_SUCCESS;
12675
12676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12677 }
12678 }
12679}
12680
12681
12682/** Opcode 0xc2. */
12683FNIEMOP_DEF(iemOp_retn_Iw)
12684{
12685 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
12686 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12688 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12689 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
12690}
12691
12692
12693/** Opcode 0xc3. */
12694FNIEMOP_DEF(iemOp_retn)
12695{
12696 IEMOP_MNEMONIC(retn, "retn");
12697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12699 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
12700}
12701
12702
12703/** Opcode 0xc4. */
12704FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12705{
12706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12707 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12708 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12709 {
12710 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
12711 /* The LES instruction is invalid 64-bit mode. In legacy and
12712 compatability mode it is invalid with MOD=3.
12713 The use as a VEX prefix is made possible by assigning the inverted
12714 REX.R to the top MOD bit, and the top bit in the inverted register
12715 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12716 to accessing registers 0..7 in this VEX form. */
12717 /** @todo VEX: Just use new tables for it. */
12718 return IEMOP_RAISE_INVALID_OPCODE();
12719 }
12720 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
12721 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12722}
12723
12724
12725/** Opcode 0xc5. */
12726FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12727{
12728 /* The LDS instruction is invalid 64-bit mode. In legacy and
12729 compatability mode it is invalid with MOD=3.
12730 The use as a VEX prefix is made possible by assigning the inverted
12731 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12732 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12734 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
12735 {
12736 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12737 {
12738 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
12739 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12740 }
12741 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12742 }
12743
12744 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
12745 /** @todo Test when exctly the VEX conformance checks kick in during
12746 * instruction decoding and fetching (using \#PF). */
12747 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12748 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12749 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12750#if 0 /* will make sense of this next week... */
12751 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12752 &&
12753 )
12754 {
12755
12756 }
12757#endif
12758
12759 /** @todo VEX: Just use new tables for it. */
12760 return IEMOP_RAISE_INVALID_OPCODE();
12761}
12762
12763
12764/** Opcode 0xc6. */
12765FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12766{
12767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12768 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12769 return IEMOP_RAISE_INVALID_OPCODE();
12770 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
12771
12772 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12773 {
12774 /* register access */
12775 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12777 IEM_MC_BEGIN(0, 0);
12778 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
12779 IEM_MC_ADVANCE_RIP();
12780 IEM_MC_END();
12781 }
12782 else
12783 {
12784 /* memory access. */
12785 IEM_MC_BEGIN(0, 1);
12786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12788 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12790 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
12791 IEM_MC_ADVANCE_RIP();
12792 IEM_MC_END();
12793 }
12794 return VINF_SUCCESS;
12795}
12796
12797
12798/** Opcode 0xc7. */
12799FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12800{
12801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12802 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12803 return IEMOP_RAISE_INVALID_OPCODE();
12804 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
12805
12806 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12807 {
12808 /* register access */
12809 switch (pVCpu->iem.s.enmEffOpSize)
12810 {
12811 case IEMMODE_16BIT:
12812 IEM_MC_BEGIN(0, 0);
12813 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12815 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
12816 IEM_MC_ADVANCE_RIP();
12817 IEM_MC_END();
12818 return VINF_SUCCESS;
12819
12820 case IEMMODE_32BIT:
12821 IEM_MC_BEGIN(0, 0);
12822 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12824 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
12825 IEM_MC_ADVANCE_RIP();
12826 IEM_MC_END();
12827 return VINF_SUCCESS;
12828
12829 case IEMMODE_64BIT:
12830 IEM_MC_BEGIN(0, 0);
12831 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12833 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
12834 IEM_MC_ADVANCE_RIP();
12835 IEM_MC_END();
12836 return VINF_SUCCESS;
12837
12838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12839 }
12840 }
12841 else
12842 {
12843 /* memory access. */
12844 switch (pVCpu->iem.s.enmEffOpSize)
12845 {
12846 case IEMMODE_16BIT:
12847 IEM_MC_BEGIN(0, 1);
12848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12850 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12852 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
12853 IEM_MC_ADVANCE_RIP();
12854 IEM_MC_END();
12855 return VINF_SUCCESS;
12856
12857 case IEMMODE_32BIT:
12858 IEM_MC_BEGIN(0, 1);
12859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12861 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12863 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
12864 IEM_MC_ADVANCE_RIP();
12865 IEM_MC_END();
12866 return VINF_SUCCESS;
12867
12868 case IEMMODE_64BIT:
12869 IEM_MC_BEGIN(0, 1);
12870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12872 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12874 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
12875 IEM_MC_ADVANCE_RIP();
12876 IEM_MC_END();
12877 return VINF_SUCCESS;
12878
12879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12880 }
12881 }
12882}
12883
12884
12885
12886
12887/** Opcode 0xc8. */
12888FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12889{
12890 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
12891 IEMOP_HLP_MIN_186();
12892 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12893 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12894 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12896 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
12897}
12898
12899
12900/** Opcode 0xc9. */
12901FNIEMOP_DEF(iemOp_leave)
12902{
12903 IEMOP_MNEMONIC(leave, "leave");
12904 IEMOP_HLP_MIN_186();
12905 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12907 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
12908}
12909
12910
12911/** Opcode 0xca. */
12912FNIEMOP_DEF(iemOp_retf_Iw)
12913{
12914 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
12915 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12917 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12918 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
12919}
12920
12921
12922/** Opcode 0xcb. */
12923FNIEMOP_DEF(iemOp_retf)
12924{
12925 IEMOP_MNEMONIC(retf, "retf");
12926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12927 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12928 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
12929}
12930
12931
12932/** Opcode 0xcc. */
12933FNIEMOP_DEF(iemOp_int_3)
12934{
12935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12936 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12937}
12938
12939
12940/** Opcode 0xcd. */
12941FNIEMOP_DEF(iemOp_int_Ib)
12942{
12943 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12945 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12946}
12947
12948
12949/** Opcode 0xce. */
12950FNIEMOP_DEF(iemOp_into)
12951{
12952 IEMOP_MNEMONIC(into, "into");
12953 IEMOP_HLP_NO_64BIT();
12954
12955 IEM_MC_BEGIN(2, 0);
12956 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12957 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12958 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12959 IEM_MC_END();
12960 return VINF_SUCCESS;
12961}
12962
12963
12964/** Opcode 0xcf. */
12965FNIEMOP_DEF(iemOp_iret)
12966{
12967 IEMOP_MNEMONIC(iret, "iret");
12968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12969 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
12970}
12971
12972
12973/** Opcode 0xd0. */
12974FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12975{
12976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12977 PCIEMOPSHIFTSIZES pImpl;
12978 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12979 {
12980 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
12981 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
12982 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
12983 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
12984 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
12985 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
12986 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
12987 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12988 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12989 }
12990 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12991
12992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12993 {
12994 /* register */
12995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12996 IEM_MC_BEGIN(3, 0);
12997 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12998 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12999 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13000 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13001 IEM_MC_REF_EFLAGS(pEFlags);
13002 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13003 IEM_MC_ADVANCE_RIP();
13004 IEM_MC_END();
13005 }
13006 else
13007 {
13008 /* memory */
13009 IEM_MC_BEGIN(3, 2);
13010 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13011 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13012 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13014
13015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13017 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13018 IEM_MC_FETCH_EFLAGS(EFlags);
13019 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13020
13021 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13022 IEM_MC_COMMIT_EFLAGS(EFlags);
13023 IEM_MC_ADVANCE_RIP();
13024 IEM_MC_END();
13025 }
13026 return VINF_SUCCESS;
13027}
13028
13029
13030
13031/** Opcode 0xd1. */
13032FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13033{
13034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13035 PCIEMOPSHIFTSIZES pImpl;
13036 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13037 {
13038 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13039 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13040 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13041 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13042 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13043 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13044 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13045 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13046 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13047 }
13048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13049
13050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13051 {
13052 /* register */
13053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13054 switch (pVCpu->iem.s.enmEffOpSize)
13055 {
13056 case IEMMODE_16BIT:
13057 IEM_MC_BEGIN(3, 0);
13058 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13059 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13061 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13062 IEM_MC_REF_EFLAGS(pEFlags);
13063 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13064 IEM_MC_ADVANCE_RIP();
13065 IEM_MC_END();
13066 return VINF_SUCCESS;
13067
13068 case IEMMODE_32BIT:
13069 IEM_MC_BEGIN(3, 0);
13070 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13071 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13072 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13073 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13074 IEM_MC_REF_EFLAGS(pEFlags);
13075 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13076 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13077 IEM_MC_ADVANCE_RIP();
13078 IEM_MC_END();
13079 return VINF_SUCCESS;
13080
13081 case IEMMODE_64BIT:
13082 IEM_MC_BEGIN(3, 0);
13083 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13084 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13085 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13086 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13087 IEM_MC_REF_EFLAGS(pEFlags);
13088 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13089 IEM_MC_ADVANCE_RIP();
13090 IEM_MC_END();
13091 return VINF_SUCCESS;
13092
13093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13094 }
13095 }
13096 else
13097 {
13098 /* memory */
13099 switch (pVCpu->iem.s.enmEffOpSize)
13100 {
13101 case IEMMODE_16BIT:
13102 IEM_MC_BEGIN(3, 2);
13103 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13104 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13105 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13107
13108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13110 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13111 IEM_MC_FETCH_EFLAGS(EFlags);
13112 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13113
13114 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13115 IEM_MC_COMMIT_EFLAGS(EFlags);
13116 IEM_MC_ADVANCE_RIP();
13117 IEM_MC_END();
13118 return VINF_SUCCESS;
13119
13120 case IEMMODE_32BIT:
13121 IEM_MC_BEGIN(3, 2);
13122 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13123 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13124 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13126
13127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13129 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13130 IEM_MC_FETCH_EFLAGS(EFlags);
13131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13132
13133 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13134 IEM_MC_COMMIT_EFLAGS(EFlags);
13135 IEM_MC_ADVANCE_RIP();
13136 IEM_MC_END();
13137 return VINF_SUCCESS;
13138
13139 case IEMMODE_64BIT:
13140 IEM_MC_BEGIN(3, 2);
13141 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13142 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13143 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13145
13146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13148 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13149 IEM_MC_FETCH_EFLAGS(EFlags);
13150 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13151
13152 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13153 IEM_MC_COMMIT_EFLAGS(EFlags);
13154 IEM_MC_ADVANCE_RIP();
13155 IEM_MC_END();
13156 return VINF_SUCCESS;
13157
13158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13159 }
13160 }
13161}
13162
13163
13164/** Opcode 0xd2. */
13165FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13166{
13167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13168 PCIEMOPSHIFTSIZES pImpl;
13169 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13170 {
13171 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13172 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13173 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13174 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13175 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13176 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13177 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13178 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13179 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13180 }
13181 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13182
13183 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13184 {
13185 /* register */
13186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13187 IEM_MC_BEGIN(3, 0);
13188 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13189 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13191 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13192 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13193 IEM_MC_REF_EFLAGS(pEFlags);
13194 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13195 IEM_MC_ADVANCE_RIP();
13196 IEM_MC_END();
13197 }
13198 else
13199 {
13200 /* memory */
13201 IEM_MC_BEGIN(3, 2);
13202 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13203 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13204 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13206
13207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13209 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13210 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13211 IEM_MC_FETCH_EFLAGS(EFlags);
13212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13213
13214 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13215 IEM_MC_COMMIT_EFLAGS(EFlags);
13216 IEM_MC_ADVANCE_RIP();
13217 IEM_MC_END();
13218 }
13219 return VINF_SUCCESS;
13220}
13221
13222
13223/** Opcode 0xd3. */
13224FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13225{
13226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13227 PCIEMOPSHIFTSIZES pImpl;
13228 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13229 {
13230 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13231 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13232 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13233 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13234 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13235 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13236 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13237 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13238 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13239 }
13240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13241
13242 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13243 {
13244 /* register */
13245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13246 switch (pVCpu->iem.s.enmEffOpSize)
13247 {
13248 case IEMMODE_16BIT:
13249 IEM_MC_BEGIN(3, 0);
13250 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13251 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13253 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13254 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13255 IEM_MC_REF_EFLAGS(pEFlags);
13256 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13257 IEM_MC_ADVANCE_RIP();
13258 IEM_MC_END();
13259 return VINF_SUCCESS;
13260
13261 case IEMMODE_32BIT:
13262 IEM_MC_BEGIN(3, 0);
13263 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13264 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13266 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13267 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13268 IEM_MC_REF_EFLAGS(pEFlags);
13269 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13270 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13271 IEM_MC_ADVANCE_RIP();
13272 IEM_MC_END();
13273 return VINF_SUCCESS;
13274
13275 case IEMMODE_64BIT:
13276 IEM_MC_BEGIN(3, 0);
13277 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13278 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13279 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13280 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13281 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13282 IEM_MC_REF_EFLAGS(pEFlags);
13283 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13284 IEM_MC_ADVANCE_RIP();
13285 IEM_MC_END();
13286 return VINF_SUCCESS;
13287
13288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13289 }
13290 }
13291 else
13292 {
13293 /* memory */
13294 switch (pVCpu->iem.s.enmEffOpSize)
13295 {
13296 case IEMMODE_16BIT:
13297 IEM_MC_BEGIN(3, 2);
13298 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13299 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13300 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13302
13303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13305 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13306 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13307 IEM_MC_FETCH_EFLAGS(EFlags);
13308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13309
13310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13311 IEM_MC_COMMIT_EFLAGS(EFlags);
13312 IEM_MC_ADVANCE_RIP();
13313 IEM_MC_END();
13314 return VINF_SUCCESS;
13315
13316 case IEMMODE_32BIT:
13317 IEM_MC_BEGIN(3, 2);
13318 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13319 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13320 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13322
13323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13325 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13326 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13327 IEM_MC_FETCH_EFLAGS(EFlags);
13328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13329
13330 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13331 IEM_MC_COMMIT_EFLAGS(EFlags);
13332 IEM_MC_ADVANCE_RIP();
13333 IEM_MC_END();
13334 return VINF_SUCCESS;
13335
13336 case IEMMODE_64BIT:
13337 IEM_MC_BEGIN(3, 2);
13338 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13339 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13342
13343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13345 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13346 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13347 IEM_MC_FETCH_EFLAGS(EFlags);
13348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13349
13350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13351 IEM_MC_COMMIT_EFLAGS(EFlags);
13352 IEM_MC_ADVANCE_RIP();
13353 IEM_MC_END();
13354 return VINF_SUCCESS;
13355
13356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13357 }
13358 }
13359}
13360
13361/** Opcode 0xd4. */
13362FNIEMOP_DEF(iemOp_aam_Ib)
13363{
13364 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
13365 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13367 IEMOP_HLP_NO_64BIT();
13368 if (!bImm)
13369 return IEMOP_RAISE_DIVIDE_ERROR();
13370 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13371}
13372
13373
13374/** Opcode 0xd5. */
13375FNIEMOP_DEF(iemOp_aad_Ib)
13376{
13377 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
13378 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13380 IEMOP_HLP_NO_64BIT();
13381 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13382}
13383
13384
13385/** Opcode 0xd6. */
13386FNIEMOP_DEF(iemOp_salc)
13387{
13388 IEMOP_MNEMONIC(salc, "salc");
13389 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13390 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13392 IEMOP_HLP_NO_64BIT();
13393
13394 IEM_MC_BEGIN(0, 0);
13395 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13396 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13397 } IEM_MC_ELSE() {
13398 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13399 } IEM_MC_ENDIF();
13400 IEM_MC_ADVANCE_RIP();
13401 IEM_MC_END();
13402 return VINF_SUCCESS;
13403}
13404
13405
13406/** Opcode 0xd7. */
13407FNIEMOP_DEF(iemOp_xlat)
13408{
13409 IEMOP_MNEMONIC(xlat, "xlat");
13410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13411 switch (pVCpu->iem.s.enmEffAddrMode)
13412 {
13413 case IEMMODE_16BIT:
13414 IEM_MC_BEGIN(2, 0);
13415 IEM_MC_LOCAL(uint8_t, u8Tmp);
13416 IEM_MC_LOCAL(uint16_t, u16Addr);
13417 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13418 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13419 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
13420 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13421 IEM_MC_ADVANCE_RIP();
13422 IEM_MC_END();
13423 return VINF_SUCCESS;
13424
13425 case IEMMODE_32BIT:
13426 IEM_MC_BEGIN(2, 0);
13427 IEM_MC_LOCAL(uint8_t, u8Tmp);
13428 IEM_MC_LOCAL(uint32_t, u32Addr);
13429 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13430 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13431 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
13432 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13433 IEM_MC_ADVANCE_RIP();
13434 IEM_MC_END();
13435 return VINF_SUCCESS;
13436
13437 case IEMMODE_64BIT:
13438 IEM_MC_BEGIN(2, 0);
13439 IEM_MC_LOCAL(uint8_t, u8Tmp);
13440 IEM_MC_LOCAL(uint64_t, u64Addr);
13441 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13442 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13443 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
13444 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13445 IEM_MC_ADVANCE_RIP();
13446 IEM_MC_END();
13447 return VINF_SUCCESS;
13448
13449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13450 }
13451}
13452
13453
13454/**
13455 * Common worker for FPU instructions working on ST0 and STn, and storing the
13456 * result in ST0.
13457 *
13458 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13459 */
13460FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13461{
13462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13463
13464 IEM_MC_BEGIN(3, 1);
13465 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13466 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13467 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13468 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13469
13470 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13471 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13472 IEM_MC_PREPARE_FPU_USAGE();
13473 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13474 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13475 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13476 IEM_MC_ELSE()
13477 IEM_MC_FPU_STACK_UNDERFLOW(0);
13478 IEM_MC_ENDIF();
13479 IEM_MC_ADVANCE_RIP();
13480
13481 IEM_MC_END();
13482 return VINF_SUCCESS;
13483}
13484
13485
13486/**
13487 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13488 * flags.
13489 *
13490 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13491 */
13492FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13493{
13494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13495
13496 IEM_MC_BEGIN(3, 1);
13497 IEM_MC_LOCAL(uint16_t, u16Fsw);
13498 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13499 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13500 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13501
13502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13504 IEM_MC_PREPARE_FPU_USAGE();
13505 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13506 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13507 IEM_MC_UPDATE_FSW(u16Fsw);
13508 IEM_MC_ELSE()
13509 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13510 IEM_MC_ENDIF();
13511 IEM_MC_ADVANCE_RIP();
13512
13513 IEM_MC_END();
13514 return VINF_SUCCESS;
13515}
13516
13517
13518/**
13519 * Common worker for FPU instructions working on ST0 and STn, only affecting
13520 * flags, and popping when done.
13521 *
13522 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13523 */
13524FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13525{
13526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13527
13528 IEM_MC_BEGIN(3, 1);
13529 IEM_MC_LOCAL(uint16_t, u16Fsw);
13530 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13531 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13533
13534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13536 IEM_MC_PREPARE_FPU_USAGE();
13537 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13538 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13539 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13540 IEM_MC_ELSE()
13541 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13542 IEM_MC_ENDIF();
13543 IEM_MC_ADVANCE_RIP();
13544
13545 IEM_MC_END();
13546 return VINF_SUCCESS;
13547}
13548
13549
13550/** Opcode 0xd8 11/0. */
13551FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13552{
13553 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
13554 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13555}
13556
13557
13558/** Opcode 0xd8 11/1. */
13559FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13560{
13561 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
13562 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13563}
13564
13565
13566/** Opcode 0xd8 11/2. */
13567FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13568{
13569 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
13570 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13571}
13572
13573
13574/** Opcode 0xd8 11/3. */
13575FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13576{
13577 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
13578 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13579}
13580
13581
13582/** Opcode 0xd8 11/4. */
13583FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13584{
13585 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
13586 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13587}
13588
13589
13590/** Opcode 0xd8 11/5. */
13591FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13592{
13593 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
13594 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13595}
13596
13597
13598/** Opcode 0xd8 11/6. */
13599FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13600{
13601 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
13602 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13603}
13604
13605
13606/** Opcode 0xd8 11/7. */
13607FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13608{
13609 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
13610 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13611}
13612
13613
13614/**
13615 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13616 * the result in ST0.
13617 *
13618 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13619 */
13620FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13621{
13622 IEM_MC_BEGIN(3, 3);
13623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13624 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13625 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13626 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13627 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13628 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13629
13630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13632
13633 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13634 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13635 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13636
13637 IEM_MC_PREPARE_FPU_USAGE();
13638 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13639 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13640 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13641 IEM_MC_ELSE()
13642 IEM_MC_FPU_STACK_UNDERFLOW(0);
13643 IEM_MC_ENDIF();
13644 IEM_MC_ADVANCE_RIP();
13645
13646 IEM_MC_END();
13647 return VINF_SUCCESS;
13648}
13649
13650
13651/** Opcode 0xd8 !11/0. */
13652FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13653{
13654 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
13655 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13656}
13657
13658
13659/** Opcode 0xd8 !11/1. */
13660FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13661{
13662 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
13663 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13664}
13665
13666
13667/** Opcode 0xd8 !11/2. */
13668FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13669{
13670 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
13671
13672 IEM_MC_BEGIN(3, 3);
13673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13674 IEM_MC_LOCAL(uint16_t, u16Fsw);
13675 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13676 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13677 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13678 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13679
13680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13682
13683 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13684 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13685 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13686
13687 IEM_MC_PREPARE_FPU_USAGE();
13688 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13689 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13690 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13691 IEM_MC_ELSE()
13692 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13693 IEM_MC_ENDIF();
13694 IEM_MC_ADVANCE_RIP();
13695
13696 IEM_MC_END();
13697 return VINF_SUCCESS;
13698}
13699
13700
13701/** Opcode 0xd8 !11/3. */
13702FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13703{
13704 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
13705
13706 IEM_MC_BEGIN(3, 3);
13707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13708 IEM_MC_LOCAL(uint16_t, u16Fsw);
13709 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13710 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13711 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13712 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13713
13714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13716
13717 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13718 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13719 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13720
13721 IEM_MC_PREPARE_FPU_USAGE();
13722 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13723 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13724 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13725 IEM_MC_ELSE()
13726 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13727 IEM_MC_ENDIF();
13728 IEM_MC_ADVANCE_RIP();
13729
13730 IEM_MC_END();
13731 return VINF_SUCCESS;
13732}
13733
13734
13735/** Opcode 0xd8 !11/4. */
13736FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13737{
13738 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
13739 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13740}
13741
13742
13743/** Opcode 0xd8 !11/5. */
13744FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13745{
13746 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
13747 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13748}
13749
13750
13751/** Opcode 0xd8 !11/6. */
13752FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13753{
13754 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
13755 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13756}
13757
13758
13759/** Opcode 0xd8 !11/7. */
13760FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13761{
13762 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
13763 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13764}
13765
13766
13767/** Opcode 0xd8. */
13768FNIEMOP_DEF(iemOp_EscF0)
13769{
13770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13771 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
13772
13773 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13774 {
13775 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13776 {
13777 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13778 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13779 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13780 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13781 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13782 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13783 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13784 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13786 }
13787 }
13788 else
13789 {
13790 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13791 {
13792 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13793 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13794 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13795 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13796 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13797 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13798 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13799 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13801 }
13802 }
13803}
13804
13805
13806/** Opcode 0xd9 /0 mem32real
13807 * @sa iemOp_fld_m64r */
13808FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13809{
13810 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
13811
13812 IEM_MC_BEGIN(2, 3);
13813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13814 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13815 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13816 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13817 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13818
13819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13821
13822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13824 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13825
13826 IEM_MC_PREPARE_FPU_USAGE();
13827 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13828 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13829 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13830 IEM_MC_ELSE()
13831 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13832 IEM_MC_ENDIF();
13833 IEM_MC_ADVANCE_RIP();
13834
13835 IEM_MC_END();
13836 return VINF_SUCCESS;
13837}
13838
13839
13840/** Opcode 0xd9 !11/2 mem32real */
13841FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13842{
13843 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
13844 IEM_MC_BEGIN(3, 2);
13845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13846 IEM_MC_LOCAL(uint16_t, u16Fsw);
13847 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13848 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13849 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13850
13851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13853 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13854 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13855
13856 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13857 IEM_MC_PREPARE_FPU_USAGE();
13858 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13859 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13860 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13861 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13862 IEM_MC_ELSE()
13863 IEM_MC_IF_FCW_IM()
13864 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13865 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13866 IEM_MC_ENDIF();
13867 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13868 IEM_MC_ENDIF();
13869 IEM_MC_ADVANCE_RIP();
13870
13871 IEM_MC_END();
13872 return VINF_SUCCESS;
13873}
13874
13875
13876/** Opcode 0xd9 !11/3 */
13877FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13878{
13879 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
13880 IEM_MC_BEGIN(3, 2);
13881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13882 IEM_MC_LOCAL(uint16_t, u16Fsw);
13883 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13884 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13885 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13886
13887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13891
13892 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13893 IEM_MC_PREPARE_FPU_USAGE();
13894 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13895 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13896 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13897 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13898 IEM_MC_ELSE()
13899 IEM_MC_IF_FCW_IM()
13900 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13901 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13902 IEM_MC_ENDIF();
13903 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13904 IEM_MC_ENDIF();
13905 IEM_MC_ADVANCE_RIP();
13906
13907 IEM_MC_END();
13908 return VINF_SUCCESS;
13909}
13910
13911
13912/** Opcode 0xd9 !11/4 */
13913FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13914{
13915 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
13916 IEM_MC_BEGIN(3, 0);
13917 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13918 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13919 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13922 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13923 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13925 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13926 IEM_MC_END();
13927 return VINF_SUCCESS;
13928}
13929
13930
13931/** Opcode 0xd9 !11/5 */
13932FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13933{
13934 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
13935 IEM_MC_BEGIN(1, 1);
13936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13937 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13941 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13942 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13943 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13944 IEM_MC_END();
13945 return VINF_SUCCESS;
13946}
13947
13948
13949/** Opcode 0xd9 !11/6 */
13950FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13951{
13952 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
13953 IEM_MC_BEGIN(3, 0);
13954 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13955 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13956 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13960 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13961 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13962 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13963 IEM_MC_END();
13964 return VINF_SUCCESS;
13965}
13966
13967
13968/** Opcode 0xd9 !11/7 */
13969FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13970{
13971 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
13972 IEM_MC_BEGIN(2, 0);
13973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13974 IEM_MC_LOCAL(uint16_t, u16Fcw);
13975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13978 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13979 IEM_MC_FETCH_FCW(u16Fcw);
13980 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
13981 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13982 IEM_MC_END();
13983 return VINF_SUCCESS;
13984}
13985
13986
13987/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13988FNIEMOP_DEF(iemOp_fnop)
13989{
13990 IEMOP_MNEMONIC(fnop, "fnop");
13991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13992
13993 IEM_MC_BEGIN(0, 0);
13994 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13995 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13996 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13997 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13998 * intel optimizations. Investigate. */
13999 IEM_MC_UPDATE_FPU_OPCODE_IP();
14000 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14001 IEM_MC_END();
14002 return VINF_SUCCESS;
14003}
14004
14005
14006/** Opcode 0xd9 11/0 stN */
14007FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14008{
14009 IEMOP_MNEMONIC(fld_stN, "fld stN");
14010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14011
14012 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14013 * indicates that it does. */
14014 IEM_MC_BEGIN(0, 2);
14015 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14016 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14019
14020 IEM_MC_PREPARE_FPU_USAGE();
14021 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14022 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14023 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14024 IEM_MC_ELSE()
14025 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14026 IEM_MC_ENDIF();
14027
14028 IEM_MC_ADVANCE_RIP();
14029 IEM_MC_END();
14030
14031 return VINF_SUCCESS;
14032}
14033
14034
14035/** Opcode 0xd9 11/3 stN */
14036FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14037{
14038 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14040
14041 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14042 * indicates that it does. */
14043 IEM_MC_BEGIN(1, 3);
14044 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14045 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14046 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14047 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14050
14051 IEM_MC_PREPARE_FPU_USAGE();
14052 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14053 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14054 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14055 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14056 IEM_MC_ELSE()
14057 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14058 IEM_MC_ENDIF();
14059
14060 IEM_MC_ADVANCE_RIP();
14061 IEM_MC_END();
14062
14063 return VINF_SUCCESS;
14064}
14065
14066
14067/** Opcode 0xd9 11/4, 0xdd 11/2. */
14068FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14069{
14070 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14072
14073 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14074 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14075 if (!iDstReg)
14076 {
14077 IEM_MC_BEGIN(0, 1);
14078 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14081
14082 IEM_MC_PREPARE_FPU_USAGE();
14083 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14084 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14085 IEM_MC_ELSE()
14086 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14087 IEM_MC_ENDIF();
14088
14089 IEM_MC_ADVANCE_RIP();
14090 IEM_MC_END();
14091 }
14092 else
14093 {
14094 IEM_MC_BEGIN(0, 2);
14095 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14096 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14098 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14099
14100 IEM_MC_PREPARE_FPU_USAGE();
14101 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14102 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14103 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14104 IEM_MC_ELSE()
14105 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14106 IEM_MC_ENDIF();
14107
14108 IEM_MC_ADVANCE_RIP();
14109 IEM_MC_END();
14110 }
14111 return VINF_SUCCESS;
14112}
14113
14114
14115/**
14116 * Common worker for FPU instructions working on ST0 and replaces it with the
14117 * result, i.e. unary operators.
14118 *
14119 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14120 */
14121FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14122{
14123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14124
14125 IEM_MC_BEGIN(2, 1);
14126 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14127 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14128 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14129
14130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14131 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14132 IEM_MC_PREPARE_FPU_USAGE();
14133 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14134 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14135 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14136 IEM_MC_ELSE()
14137 IEM_MC_FPU_STACK_UNDERFLOW(0);
14138 IEM_MC_ENDIF();
14139 IEM_MC_ADVANCE_RIP();
14140
14141 IEM_MC_END();
14142 return VINF_SUCCESS;
14143}
14144
14145
14146/** Opcode 0xd9 0xe0. */
14147FNIEMOP_DEF(iemOp_fchs)
14148{
14149 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14150 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14151}
14152
14153
14154/** Opcode 0xd9 0xe1. */
14155FNIEMOP_DEF(iemOp_fabs)
14156{
14157 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14158 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14159}
14160
14161
14162/**
14163 * Common worker for FPU instructions working on ST0 and only returns FSW.
14164 *
14165 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14166 */
14167FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14168{
14169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14170
14171 IEM_MC_BEGIN(2, 1);
14172 IEM_MC_LOCAL(uint16_t, u16Fsw);
14173 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14174 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14175
14176 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14177 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14178 IEM_MC_PREPARE_FPU_USAGE();
14179 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14180 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14181 IEM_MC_UPDATE_FSW(u16Fsw);
14182 IEM_MC_ELSE()
14183 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14184 IEM_MC_ENDIF();
14185 IEM_MC_ADVANCE_RIP();
14186
14187 IEM_MC_END();
14188 return VINF_SUCCESS;
14189}
14190
14191
14192/** Opcode 0xd9 0xe4. */
14193FNIEMOP_DEF(iemOp_ftst)
14194{
14195 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14196 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14197}
14198
14199
14200/** Opcode 0xd9 0xe5. */
14201FNIEMOP_DEF(iemOp_fxam)
14202{
14203 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14204 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14205}
14206
14207
14208/**
14209 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14210 *
14211 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14212 */
14213FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14214{
14215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14216
14217 IEM_MC_BEGIN(1, 1);
14218 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14219 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14220
14221 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14222 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14223 IEM_MC_PREPARE_FPU_USAGE();
14224 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14225 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14226 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14227 IEM_MC_ELSE()
14228 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14229 IEM_MC_ENDIF();
14230 IEM_MC_ADVANCE_RIP();
14231
14232 IEM_MC_END();
14233 return VINF_SUCCESS;
14234}
14235
14236
14237/** Opcode 0xd9 0xe8. */
14238FNIEMOP_DEF(iemOp_fld1)
14239{
14240 IEMOP_MNEMONIC(fld1, "fld1");
14241 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14242}
14243
14244
14245/** Opcode 0xd9 0xe9. */
14246FNIEMOP_DEF(iemOp_fldl2t)
14247{
14248 IEMOP_MNEMONIC(fldl2t, "fldl2t");
14249 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14250}
14251
14252
14253/** Opcode 0xd9 0xea. */
14254FNIEMOP_DEF(iemOp_fldl2e)
14255{
14256 IEMOP_MNEMONIC(fldl2e, "fldl2e");
14257 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14258}
14259
14260/** Opcode 0xd9 0xeb. */
14261FNIEMOP_DEF(iemOp_fldpi)
14262{
14263 IEMOP_MNEMONIC(fldpi, "fldpi");
14264 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14265}
14266
14267
14268/** Opcode 0xd9 0xec. */
14269FNIEMOP_DEF(iemOp_fldlg2)
14270{
14271 IEMOP_MNEMONIC(fldlg2, "fldlg2");
14272 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14273}
14274
14275/** Opcode 0xd9 0xed. */
14276FNIEMOP_DEF(iemOp_fldln2)
14277{
14278 IEMOP_MNEMONIC(fldln2, "fldln2");
14279 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14280}
14281
14282
14283/** Opcode 0xd9 0xee. */
14284FNIEMOP_DEF(iemOp_fldz)
14285{
14286 IEMOP_MNEMONIC(fldz, "fldz");
14287 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14288}
14289
14290
14291/** Opcode 0xd9 0xf0. */
14292FNIEMOP_DEF(iemOp_f2xm1)
14293{
14294 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
14295 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14296}
14297
14298
14299/**
14300 * Common worker for FPU instructions working on STn and ST0, storing the result
14301 * in STn, and popping the stack unless IE, DE or ZE was raised.
14302 *
14303 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14304 */
14305FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14306{
14307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14308
14309 IEM_MC_BEGIN(3, 1);
14310 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14311 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14313 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14314
14315 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14316 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14317
14318 IEM_MC_PREPARE_FPU_USAGE();
14319 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14320 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14321 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14322 IEM_MC_ELSE()
14323 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14324 IEM_MC_ENDIF();
14325 IEM_MC_ADVANCE_RIP();
14326
14327 IEM_MC_END();
14328 return VINF_SUCCESS;
14329}
14330
14331
14332/** Opcode 0xd9 0xf1. */
14333FNIEMOP_DEF(iemOp_fyl2x)
14334{
14335 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
14336 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
14337}
14338
14339
14340/**
14341 * Common worker for FPU instructions working on ST0 and having two outputs, one
14342 * replacing ST0 and one pushed onto the stack.
14343 *
14344 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14345 */
14346FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14347{
14348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14349
14350 IEM_MC_BEGIN(2, 1);
14351 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14352 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14353 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14354
14355 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14356 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14357 IEM_MC_PREPARE_FPU_USAGE();
14358 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14359 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14360 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14361 IEM_MC_ELSE()
14362 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14363 IEM_MC_ENDIF();
14364 IEM_MC_ADVANCE_RIP();
14365
14366 IEM_MC_END();
14367 return VINF_SUCCESS;
14368}
14369
14370
14371/** Opcode 0xd9 0xf2. */
14372FNIEMOP_DEF(iemOp_fptan)
14373{
14374 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
14375 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14376}
14377
14378
14379/** Opcode 0xd9 0xf3. */
14380FNIEMOP_DEF(iemOp_fpatan)
14381{
14382 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
14383 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14384}
14385
14386
14387/** Opcode 0xd9 0xf4. */
14388FNIEMOP_DEF(iemOp_fxtract)
14389{
14390 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
14391 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14392}
14393
14394
14395/** Opcode 0xd9 0xf5. */
14396FNIEMOP_DEF(iemOp_fprem1)
14397{
14398 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
14399 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14400}
14401
14402
14403/** Opcode 0xd9 0xf6. */
14404FNIEMOP_DEF(iemOp_fdecstp)
14405{
14406 IEMOP_MNEMONIC(fdecstp, "fdecstp");
14407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14408 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14409 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14410 * FINCSTP and FDECSTP. */
14411
14412 IEM_MC_BEGIN(0,0);
14413
14414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14415 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14416
14417 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14418 IEM_MC_FPU_STACK_DEC_TOP();
14419 IEM_MC_UPDATE_FSW_CONST(0);
14420
14421 IEM_MC_ADVANCE_RIP();
14422 IEM_MC_END();
14423 return VINF_SUCCESS;
14424}
14425
14426
14427/** Opcode 0xd9 0xf7. */
14428FNIEMOP_DEF(iemOp_fincstp)
14429{
14430 IEMOP_MNEMONIC(fincstp, "fincstp");
14431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14432 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14433 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14434 * FINCSTP and FDECSTP. */
14435
14436 IEM_MC_BEGIN(0,0);
14437
14438 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14439 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14440
14441 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14442 IEM_MC_FPU_STACK_INC_TOP();
14443 IEM_MC_UPDATE_FSW_CONST(0);
14444
14445 IEM_MC_ADVANCE_RIP();
14446 IEM_MC_END();
14447 return VINF_SUCCESS;
14448}
14449
14450
14451/** Opcode 0xd9 0xf8. */
14452FNIEMOP_DEF(iemOp_fprem)
14453{
14454 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
14455 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14456}
14457
14458
14459/** Opcode 0xd9 0xf9. */
14460FNIEMOP_DEF(iemOp_fyl2xp1)
14461{
14462 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
14463 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14464}
14465
14466
14467/** Opcode 0xd9 0xfa. */
14468FNIEMOP_DEF(iemOp_fsqrt)
14469{
14470 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
14471 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14472}
14473
14474
14475/** Opcode 0xd9 0xfb. */
14476FNIEMOP_DEF(iemOp_fsincos)
14477{
14478 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
14479 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14480}
14481
14482
14483/** Opcode 0xd9 0xfc. */
14484FNIEMOP_DEF(iemOp_frndint)
14485{
14486 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
14487 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14488}
14489
14490
14491/** Opcode 0xd9 0xfd. */
14492FNIEMOP_DEF(iemOp_fscale)
14493{
14494 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
14495 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14496}
14497
14498
14499/** Opcode 0xd9 0xfe. */
14500FNIEMOP_DEF(iemOp_fsin)
14501{
14502 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
14503 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14504}
14505
14506
14507/** Opcode 0xd9 0xff. */
14508FNIEMOP_DEF(iemOp_fcos)
14509{
14510 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
14511 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14512}
14513
14514
14515/** Used by iemOp_EscF1. */
14516IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14517{
14518 /* 0xe0 */ iemOp_fchs,
14519 /* 0xe1 */ iemOp_fabs,
14520 /* 0xe2 */ iemOp_Invalid,
14521 /* 0xe3 */ iemOp_Invalid,
14522 /* 0xe4 */ iemOp_ftst,
14523 /* 0xe5 */ iemOp_fxam,
14524 /* 0xe6 */ iemOp_Invalid,
14525 /* 0xe7 */ iemOp_Invalid,
14526 /* 0xe8 */ iemOp_fld1,
14527 /* 0xe9 */ iemOp_fldl2t,
14528 /* 0xea */ iemOp_fldl2e,
14529 /* 0xeb */ iemOp_fldpi,
14530 /* 0xec */ iemOp_fldlg2,
14531 /* 0xed */ iemOp_fldln2,
14532 /* 0xee */ iemOp_fldz,
14533 /* 0xef */ iemOp_Invalid,
14534 /* 0xf0 */ iemOp_f2xm1,
14535 /* 0xf1 */ iemOp_fyl2x,
14536 /* 0xf2 */ iemOp_fptan,
14537 /* 0xf3 */ iemOp_fpatan,
14538 /* 0xf4 */ iemOp_fxtract,
14539 /* 0xf5 */ iemOp_fprem1,
14540 /* 0xf6 */ iemOp_fdecstp,
14541 /* 0xf7 */ iemOp_fincstp,
14542 /* 0xf8 */ iemOp_fprem,
14543 /* 0xf9 */ iemOp_fyl2xp1,
14544 /* 0xfa */ iemOp_fsqrt,
14545 /* 0xfb */ iemOp_fsincos,
14546 /* 0xfc */ iemOp_frndint,
14547 /* 0xfd */ iemOp_fscale,
14548 /* 0xfe */ iemOp_fsin,
14549 /* 0xff */ iemOp_fcos
14550};
14551
14552
14553/** Opcode 0xd9. */
14554FNIEMOP_DEF(iemOp_EscF1)
14555{
14556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14557 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
14558
14559 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14560 {
14561 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14562 {
14563 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14564 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14565 case 2:
14566 if (bRm == 0xd0)
14567 return FNIEMOP_CALL(iemOp_fnop);
14568 return IEMOP_RAISE_INVALID_OPCODE();
14569 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14570 case 4:
14571 case 5:
14572 case 6:
14573 case 7:
14574 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14575 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14577 }
14578 }
14579 else
14580 {
14581 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14582 {
14583 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14584 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14585 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14586 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14587 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14588 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14589 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14590 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14592 }
14593 }
14594}
14595
14596
14597/** Opcode 0xda 11/0. */
14598FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14599{
14600 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
14601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14602
14603 IEM_MC_BEGIN(0, 1);
14604 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14605
14606 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14607 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14608
14609 IEM_MC_PREPARE_FPU_USAGE();
14610 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14612 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14613 IEM_MC_ENDIF();
14614 IEM_MC_UPDATE_FPU_OPCODE_IP();
14615 IEM_MC_ELSE()
14616 IEM_MC_FPU_STACK_UNDERFLOW(0);
14617 IEM_MC_ENDIF();
14618 IEM_MC_ADVANCE_RIP();
14619
14620 IEM_MC_END();
14621 return VINF_SUCCESS;
14622}
14623
14624
14625/** Opcode 0xda 11/1. */
14626FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14627{
14628 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
14629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14630
14631 IEM_MC_BEGIN(0, 1);
14632 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14633
14634 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14635 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14636
14637 IEM_MC_PREPARE_FPU_USAGE();
14638 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14640 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14641 IEM_MC_ENDIF();
14642 IEM_MC_UPDATE_FPU_OPCODE_IP();
14643 IEM_MC_ELSE()
14644 IEM_MC_FPU_STACK_UNDERFLOW(0);
14645 IEM_MC_ENDIF();
14646 IEM_MC_ADVANCE_RIP();
14647
14648 IEM_MC_END();
14649 return VINF_SUCCESS;
14650}
14651
14652
14653/** Opcode 0xda 11/2. */
14654FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14655{
14656 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
14657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14658
14659 IEM_MC_BEGIN(0, 1);
14660 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14661
14662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14663 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14664
14665 IEM_MC_PREPARE_FPU_USAGE();
14666 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14667 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14668 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14669 IEM_MC_ENDIF();
14670 IEM_MC_UPDATE_FPU_OPCODE_IP();
14671 IEM_MC_ELSE()
14672 IEM_MC_FPU_STACK_UNDERFLOW(0);
14673 IEM_MC_ENDIF();
14674 IEM_MC_ADVANCE_RIP();
14675
14676 IEM_MC_END();
14677 return VINF_SUCCESS;
14678}
14679
14680
14681/** Opcode 0xda 11/3. */
14682FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14683{
14684 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
14685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14686
14687 IEM_MC_BEGIN(0, 1);
14688 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14689
14690 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14692
14693 IEM_MC_PREPARE_FPU_USAGE();
14694 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14696 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14697 IEM_MC_ENDIF();
14698 IEM_MC_UPDATE_FPU_OPCODE_IP();
14699 IEM_MC_ELSE()
14700 IEM_MC_FPU_STACK_UNDERFLOW(0);
14701 IEM_MC_ENDIF();
14702 IEM_MC_ADVANCE_RIP();
14703
14704 IEM_MC_END();
14705 return VINF_SUCCESS;
14706}
14707
14708
14709/**
14710 * Common worker for FPU instructions working on ST0 and STn, only affecting
14711 * flags, and popping twice when done.
14712 *
14713 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14714 */
14715FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14716{
14717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14718
14719 IEM_MC_BEGIN(3, 1);
14720 IEM_MC_LOCAL(uint16_t, u16Fsw);
14721 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14722 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14723 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14724
14725 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14726 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14727
14728 IEM_MC_PREPARE_FPU_USAGE();
14729 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14730 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14731 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14732 IEM_MC_ELSE()
14733 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14734 IEM_MC_ENDIF();
14735 IEM_MC_ADVANCE_RIP();
14736
14737 IEM_MC_END();
14738 return VINF_SUCCESS;
14739}
14740
14741
14742/** Opcode 0xda 0xe9. */
14743FNIEMOP_DEF(iemOp_fucompp)
14744{
14745 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
14746 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14747}
14748
14749
14750/**
14751 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14752 * the result in ST0.
14753 *
14754 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14755 */
14756FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14757{
14758 IEM_MC_BEGIN(3, 3);
14759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14760 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14761 IEM_MC_LOCAL(int32_t, i32Val2);
14762 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14763 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14764 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14765
14766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14768
14769 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14770 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14771 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14772
14773 IEM_MC_PREPARE_FPU_USAGE();
14774 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14775 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14776 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14777 IEM_MC_ELSE()
14778 IEM_MC_FPU_STACK_UNDERFLOW(0);
14779 IEM_MC_ENDIF();
14780 IEM_MC_ADVANCE_RIP();
14781
14782 IEM_MC_END();
14783 return VINF_SUCCESS;
14784}
14785
14786
14787/** Opcode 0xda !11/0. */
14788FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14789{
14790 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
14791 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14792}
14793
14794
14795/** Opcode 0xda !11/1. */
14796FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14797{
14798 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
14799 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14800}
14801
14802
14803/** Opcode 0xda !11/2. */
14804FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14805{
14806 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
14807
14808 IEM_MC_BEGIN(3, 3);
14809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14810 IEM_MC_LOCAL(uint16_t, u16Fsw);
14811 IEM_MC_LOCAL(int32_t, i32Val2);
14812 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14813 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14814 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14815
14816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14818
14819 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14820 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14821 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14822
14823 IEM_MC_PREPARE_FPU_USAGE();
14824 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14825 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14826 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14827 IEM_MC_ELSE()
14828 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14829 IEM_MC_ENDIF();
14830 IEM_MC_ADVANCE_RIP();
14831
14832 IEM_MC_END();
14833 return VINF_SUCCESS;
14834}
14835
14836
14837/** Opcode 0xda !11/3. */
14838FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14839{
14840 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
14841
14842 IEM_MC_BEGIN(3, 3);
14843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14844 IEM_MC_LOCAL(uint16_t, u16Fsw);
14845 IEM_MC_LOCAL(int32_t, i32Val2);
14846 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14847 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14848 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14849
14850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14852
14853 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14854 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14855 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14856
14857 IEM_MC_PREPARE_FPU_USAGE();
14858 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14859 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14860 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14861 IEM_MC_ELSE()
14862 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14863 IEM_MC_ENDIF();
14864 IEM_MC_ADVANCE_RIP();
14865
14866 IEM_MC_END();
14867 return VINF_SUCCESS;
14868}
14869
14870
14871/** Opcode 0xda !11/4. */
14872FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14873{
14874 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
14875 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14876}
14877
14878
14879/** Opcode 0xda !11/5. */
14880FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14881{
14882 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
14883 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14884}
14885
14886
14887/** Opcode 0xda !11/6. */
14888FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14889{
14890 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
14891 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14892}
14893
14894
14895/** Opcode 0xda !11/7. */
14896FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14897{
14898 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
14899 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14900}
14901
14902
14903/** Opcode 0xda. */
14904FNIEMOP_DEF(iemOp_EscF2)
14905{
14906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14907 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
14908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14909 {
14910 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14911 {
14912 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14913 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14914 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14915 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14916 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14917 case 5:
14918 if (bRm == 0xe9)
14919 return FNIEMOP_CALL(iemOp_fucompp);
14920 return IEMOP_RAISE_INVALID_OPCODE();
14921 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14922 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14924 }
14925 }
14926 else
14927 {
14928 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14929 {
14930 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14931 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14932 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14933 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14934 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14935 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14936 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14937 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14939 }
14940 }
14941}
14942
14943
14944/** Opcode 0xdb !11/0. */
14945FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14946{
14947 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
14948
14949 IEM_MC_BEGIN(2, 3);
14950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14951 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14952 IEM_MC_LOCAL(int32_t, i32Val);
14953 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14954 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14955
14956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14958
14959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14961 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14962
14963 IEM_MC_PREPARE_FPU_USAGE();
14964 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14965 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14966 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14967 IEM_MC_ELSE()
14968 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14969 IEM_MC_ENDIF();
14970 IEM_MC_ADVANCE_RIP();
14971
14972 IEM_MC_END();
14973 return VINF_SUCCESS;
14974}
14975
14976
14977/** Opcode 0xdb !11/1. */
14978FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14979{
14980 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
14981 IEM_MC_BEGIN(3, 2);
14982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14983 IEM_MC_LOCAL(uint16_t, u16Fsw);
14984 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14985 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14986 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14987
14988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14990 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14991 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14992
14993 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14994 IEM_MC_PREPARE_FPU_USAGE();
14995 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14996 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14997 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14998 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14999 IEM_MC_ELSE()
15000 IEM_MC_IF_FCW_IM()
15001 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15002 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15003 IEM_MC_ENDIF();
15004 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15005 IEM_MC_ENDIF();
15006 IEM_MC_ADVANCE_RIP();
15007
15008 IEM_MC_END();
15009 return VINF_SUCCESS;
15010}
15011
15012
15013/** Opcode 0xdb !11/2. */
15014FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15015{
15016 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15017 IEM_MC_BEGIN(3, 2);
15018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15019 IEM_MC_LOCAL(uint16_t, u16Fsw);
15020 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15021 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15022 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15023
15024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15026 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15027 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15028
15029 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15030 IEM_MC_PREPARE_FPU_USAGE();
15031 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15032 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15033 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15034 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15035 IEM_MC_ELSE()
15036 IEM_MC_IF_FCW_IM()
15037 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15038 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15039 IEM_MC_ENDIF();
15040 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15041 IEM_MC_ENDIF();
15042 IEM_MC_ADVANCE_RIP();
15043
15044 IEM_MC_END();
15045 return VINF_SUCCESS;
15046}
15047
15048
15049/** Opcode 0xdb !11/3. */
15050FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15051{
15052 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15053 IEM_MC_BEGIN(3, 2);
15054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15055 IEM_MC_LOCAL(uint16_t, u16Fsw);
15056 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15057 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15058 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15059
15060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15064
15065 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15066 IEM_MC_PREPARE_FPU_USAGE();
15067 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15068 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15069 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15070 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15071 IEM_MC_ELSE()
15072 IEM_MC_IF_FCW_IM()
15073 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15074 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15075 IEM_MC_ENDIF();
15076 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15077 IEM_MC_ENDIF();
15078 IEM_MC_ADVANCE_RIP();
15079
15080 IEM_MC_END();
15081 return VINF_SUCCESS;
15082}
15083
15084
15085/** Opcode 0xdb !11/5. */
15086FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15087{
15088 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15089
15090 IEM_MC_BEGIN(2, 3);
15091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15092 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15093 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15094 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15095 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15096
15097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15099
15100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15102 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15103
15104 IEM_MC_PREPARE_FPU_USAGE();
15105 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15106 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15107 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15108 IEM_MC_ELSE()
15109 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15110 IEM_MC_ENDIF();
15111 IEM_MC_ADVANCE_RIP();
15112
15113 IEM_MC_END();
15114 return VINF_SUCCESS;
15115}
15116
15117
15118/** Opcode 0xdb !11/7. */
15119FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15120{
15121 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15122 IEM_MC_BEGIN(3, 2);
15123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15124 IEM_MC_LOCAL(uint16_t, u16Fsw);
15125 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15126 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15127 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15128
15129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15133
15134 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15135 IEM_MC_PREPARE_FPU_USAGE();
15136 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15137 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15138 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15139 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15140 IEM_MC_ELSE()
15141 IEM_MC_IF_FCW_IM()
15142 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15143 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15144 IEM_MC_ENDIF();
15145 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15146 IEM_MC_ENDIF();
15147 IEM_MC_ADVANCE_RIP();
15148
15149 IEM_MC_END();
15150 return VINF_SUCCESS;
15151}
15152
15153
15154/** Opcode 0xdb 11/0. */
15155FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15156{
15157 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15159
15160 IEM_MC_BEGIN(0, 1);
15161 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15162
15163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15165
15166 IEM_MC_PREPARE_FPU_USAGE();
15167 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15168 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15169 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15170 IEM_MC_ENDIF();
15171 IEM_MC_UPDATE_FPU_OPCODE_IP();
15172 IEM_MC_ELSE()
15173 IEM_MC_FPU_STACK_UNDERFLOW(0);
15174 IEM_MC_ENDIF();
15175 IEM_MC_ADVANCE_RIP();
15176
15177 IEM_MC_END();
15178 return VINF_SUCCESS;
15179}
15180
15181
15182/** Opcode 0xdb 11/1. */
15183FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15184{
15185 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15187
15188 IEM_MC_BEGIN(0, 1);
15189 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15190
15191 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15192 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15193
15194 IEM_MC_PREPARE_FPU_USAGE();
15195 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15196 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15197 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15198 IEM_MC_ENDIF();
15199 IEM_MC_UPDATE_FPU_OPCODE_IP();
15200 IEM_MC_ELSE()
15201 IEM_MC_FPU_STACK_UNDERFLOW(0);
15202 IEM_MC_ENDIF();
15203 IEM_MC_ADVANCE_RIP();
15204
15205 IEM_MC_END();
15206 return VINF_SUCCESS;
15207}
15208
15209
15210/** Opcode 0xdb 11/2. */
15211FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15212{
15213 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15215
15216 IEM_MC_BEGIN(0, 1);
15217 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15218
15219 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15220 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15221
15222 IEM_MC_PREPARE_FPU_USAGE();
15223 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15224 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15225 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15226 IEM_MC_ENDIF();
15227 IEM_MC_UPDATE_FPU_OPCODE_IP();
15228 IEM_MC_ELSE()
15229 IEM_MC_FPU_STACK_UNDERFLOW(0);
15230 IEM_MC_ENDIF();
15231 IEM_MC_ADVANCE_RIP();
15232
15233 IEM_MC_END();
15234 return VINF_SUCCESS;
15235}
15236
15237
15238/** Opcode 0xdb 11/3. */
15239FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15240{
15241 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15243
15244 IEM_MC_BEGIN(0, 1);
15245 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15246
15247 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15248 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15249
15250 IEM_MC_PREPARE_FPU_USAGE();
15251 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15252 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15253 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15254 IEM_MC_ENDIF();
15255 IEM_MC_UPDATE_FPU_OPCODE_IP();
15256 IEM_MC_ELSE()
15257 IEM_MC_FPU_STACK_UNDERFLOW(0);
15258 IEM_MC_ENDIF();
15259 IEM_MC_ADVANCE_RIP();
15260
15261 IEM_MC_END();
15262 return VINF_SUCCESS;
15263}
15264
15265
15266/** Opcode 0xdb 0xe0. */
15267FNIEMOP_DEF(iemOp_fneni)
15268{
15269 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
15270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15271 IEM_MC_BEGIN(0,0);
15272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15273 IEM_MC_ADVANCE_RIP();
15274 IEM_MC_END();
15275 return VINF_SUCCESS;
15276}
15277
15278
15279/** Opcode 0xdb 0xe1. */
15280FNIEMOP_DEF(iemOp_fndisi)
15281{
15282 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
15283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15284 IEM_MC_BEGIN(0,0);
15285 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15286 IEM_MC_ADVANCE_RIP();
15287 IEM_MC_END();
15288 return VINF_SUCCESS;
15289}
15290
15291
15292/** Opcode 0xdb 0xe2. */
15293FNIEMOP_DEF(iemOp_fnclex)
15294{
15295 IEMOP_MNEMONIC(fnclex, "fnclex");
15296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15297
15298 IEM_MC_BEGIN(0,0);
15299 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15300 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15301 IEM_MC_CLEAR_FSW_EX();
15302 IEM_MC_ADVANCE_RIP();
15303 IEM_MC_END();
15304 return VINF_SUCCESS;
15305}
15306
15307
15308/** Opcode 0xdb 0xe3. */
15309FNIEMOP_DEF(iemOp_fninit)
15310{
15311 IEMOP_MNEMONIC(fninit, "fninit");
15312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15313 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15314}
15315
15316
15317/** Opcode 0xdb 0xe4. */
15318FNIEMOP_DEF(iemOp_fnsetpm)
15319{
15320 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15322 IEM_MC_BEGIN(0,0);
15323 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15324 IEM_MC_ADVANCE_RIP();
15325 IEM_MC_END();
15326 return VINF_SUCCESS;
15327}
15328
15329
15330/** Opcode 0xdb 0xe5. */
15331FNIEMOP_DEF(iemOp_frstpm)
15332{
15333 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15334#if 0 /* #UDs on newer CPUs */
15335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15336 IEM_MC_BEGIN(0,0);
15337 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15338 IEM_MC_ADVANCE_RIP();
15339 IEM_MC_END();
15340 return VINF_SUCCESS;
15341#else
15342 return IEMOP_RAISE_INVALID_OPCODE();
15343#endif
15344}
15345
15346
15347/** Opcode 0xdb 11/5. */
15348FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15349{
15350 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
15351 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15352}
15353
15354
15355/** Opcode 0xdb 11/6. */
15356FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15357{
15358 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
15359 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15360}
15361
15362
15363/** Opcode 0xdb. */
15364FNIEMOP_DEF(iemOp_EscF3)
15365{
15366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15367 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
15368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15369 {
15370 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15371 {
15372 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15373 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15374 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15375 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15376 case 4:
15377 switch (bRm)
15378 {
15379 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15380 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15381 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15382 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15383 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15384 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15385 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15386 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15388 }
15389 break;
15390 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15391 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15392 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15394 }
15395 }
15396 else
15397 {
15398 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15399 {
15400 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15401 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15402 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15403 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15404 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15405 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15406 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15407 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15409 }
15410 }
15411}
15412
15413
15414/**
15415 * Common worker for FPU instructions working on STn and ST0, and storing the
15416 * result in STn unless IE, DE or ZE was raised.
15417 *
15418 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15419 */
15420FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15421{
15422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15423
15424 IEM_MC_BEGIN(3, 1);
15425 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15426 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15427 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15428 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15429
15430 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15431 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15432
15433 IEM_MC_PREPARE_FPU_USAGE();
15434 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15435 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15436 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15437 IEM_MC_ELSE()
15438 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15439 IEM_MC_ENDIF();
15440 IEM_MC_ADVANCE_RIP();
15441
15442 IEM_MC_END();
15443 return VINF_SUCCESS;
15444}
15445
15446
15447/** Opcode 0xdc 11/0. */
15448FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15449{
15450 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
15451 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15452}
15453
15454
15455/** Opcode 0xdc 11/1. */
15456FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15457{
15458 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
15459 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15460}
15461
15462
15463/** Opcode 0xdc 11/4. */
15464FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15465{
15466 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
15467 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15468}
15469
15470
15471/** Opcode 0xdc 11/5. */
15472FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15473{
15474 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
15475 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15476}
15477
15478
15479/** Opcode 0xdc 11/6. */
15480FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15481{
15482 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
15483 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15484}
15485
15486
15487/** Opcode 0xdc 11/7. */
15488FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15489{
15490 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
15491 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15492}
15493
15494
15495/**
15496 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15497 * memory operand, and storing the result in ST0.
15498 *
15499 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15500 */
15501FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15502{
15503 IEM_MC_BEGIN(3, 3);
15504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15505 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15506 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15507 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15508 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15509 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15510
15511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15514 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15515
15516 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15517 IEM_MC_PREPARE_FPU_USAGE();
15518 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15519 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15520 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15521 IEM_MC_ELSE()
15522 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15523 IEM_MC_ENDIF();
15524 IEM_MC_ADVANCE_RIP();
15525
15526 IEM_MC_END();
15527 return VINF_SUCCESS;
15528}
15529
15530
15531/** Opcode 0xdc !11/0. */
15532FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15533{
15534 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
15535 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15536}
15537
15538
15539/** Opcode 0xdc !11/1. */
15540FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15541{
15542 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
15543 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15544}
15545
15546
15547/** Opcode 0xdc !11/2. */
15548FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15549{
15550 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
15551
15552 IEM_MC_BEGIN(3, 3);
15553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15554 IEM_MC_LOCAL(uint16_t, u16Fsw);
15555 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15556 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15557 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15558 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15559
15560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15562
15563 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15564 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15565 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15566
15567 IEM_MC_PREPARE_FPU_USAGE();
15568 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15569 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15570 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15571 IEM_MC_ELSE()
15572 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15573 IEM_MC_ENDIF();
15574 IEM_MC_ADVANCE_RIP();
15575
15576 IEM_MC_END();
15577 return VINF_SUCCESS;
15578}
15579
15580
15581/** Opcode 0xdc !11/3. */
15582FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15583{
15584 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
15585
15586 IEM_MC_BEGIN(3, 3);
15587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15588 IEM_MC_LOCAL(uint16_t, u16Fsw);
15589 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15590 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15591 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15592 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15593
15594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15596
15597 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15598 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15599 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15600
15601 IEM_MC_PREPARE_FPU_USAGE();
15602 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15603 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15604 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15605 IEM_MC_ELSE()
15606 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15607 IEM_MC_ENDIF();
15608 IEM_MC_ADVANCE_RIP();
15609
15610 IEM_MC_END();
15611 return VINF_SUCCESS;
15612}
15613
15614
15615/** Opcode 0xdc !11/4. */
15616FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15617{
15618 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
15619 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15620}
15621
15622
15623/** Opcode 0xdc !11/5. */
15624FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15625{
15626 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
15627 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15628}
15629
15630
15631/** Opcode 0xdc !11/6. */
15632FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15633{
15634 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
15635 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15636}
15637
15638
15639/** Opcode 0xdc !11/7. */
15640FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15641{
15642 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
15643 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15644}
15645
15646
15647/** Opcode 0xdc. */
15648FNIEMOP_DEF(iemOp_EscF4)
15649{
15650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15651 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
15652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15653 {
15654 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15655 {
15656 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15657 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15658 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15659 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15660 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15661 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15662 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15663 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15665 }
15666 }
15667 else
15668 {
15669 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15670 {
15671 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15672 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15673 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15674 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15675 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15676 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15677 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15678 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15680 }
15681 }
15682}
15683
15684
15685/** Opcode 0xdd !11/0.
15686 * @sa iemOp_fld_m32r */
15687FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15688{
15689 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
15690
15691 IEM_MC_BEGIN(2, 3);
15692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15693 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15694 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15695 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15696 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15697
15698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15700 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15701 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15702
15703 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15704 IEM_MC_PREPARE_FPU_USAGE();
15705 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15706 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15707 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15708 IEM_MC_ELSE()
15709 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15710 IEM_MC_ENDIF();
15711 IEM_MC_ADVANCE_RIP();
15712
15713 IEM_MC_END();
15714 return VINF_SUCCESS;
15715}
15716
15717
15718/** Opcode 0xdd !11/0. */
15719FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15720{
15721 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
15722 IEM_MC_BEGIN(3, 2);
15723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15724 IEM_MC_LOCAL(uint16_t, u16Fsw);
15725 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15726 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15727 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15728
15729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15731 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15732 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15733
15734 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15735 IEM_MC_PREPARE_FPU_USAGE();
15736 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15737 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15738 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15739 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15740 IEM_MC_ELSE()
15741 IEM_MC_IF_FCW_IM()
15742 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15743 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15744 IEM_MC_ENDIF();
15745 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15746 IEM_MC_ENDIF();
15747 IEM_MC_ADVANCE_RIP();
15748
15749 IEM_MC_END();
15750 return VINF_SUCCESS;
15751}
15752
15753
15754/** Opcode 0xdd !11/0. */
15755FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15756{
15757 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
15758 IEM_MC_BEGIN(3, 2);
15759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15760 IEM_MC_LOCAL(uint16_t, u16Fsw);
15761 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15762 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15763 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15764
15765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15767 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15768 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15769
15770 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15771 IEM_MC_PREPARE_FPU_USAGE();
15772 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15773 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15774 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15775 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15776 IEM_MC_ELSE()
15777 IEM_MC_IF_FCW_IM()
15778 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15779 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15780 IEM_MC_ENDIF();
15781 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15782 IEM_MC_ENDIF();
15783 IEM_MC_ADVANCE_RIP();
15784
15785 IEM_MC_END();
15786 return VINF_SUCCESS;
15787}
15788
15789
15790
15791
15792/** Opcode 0xdd !11/0. */
15793FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15794{
15795 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
15796 IEM_MC_BEGIN(3, 2);
15797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15798 IEM_MC_LOCAL(uint16_t, u16Fsw);
15799 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15800 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15801 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15802
15803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15806 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15807
15808 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15809 IEM_MC_PREPARE_FPU_USAGE();
15810 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15811 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15812 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15813 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15814 IEM_MC_ELSE()
15815 IEM_MC_IF_FCW_IM()
15816 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15817 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15818 IEM_MC_ENDIF();
15819 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15820 IEM_MC_ENDIF();
15821 IEM_MC_ADVANCE_RIP();
15822
15823 IEM_MC_END();
15824 return VINF_SUCCESS;
15825}
15826
15827
15828/** Opcode 0xdd !11/0. */
15829FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15830{
15831 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
15832 IEM_MC_BEGIN(3, 0);
15833 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15834 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15835 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15838 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15839 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15840 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15841 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15842 IEM_MC_END();
15843 return VINF_SUCCESS;
15844}
15845
15846
15847/** Opcode 0xdd !11/0. */
15848FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15849{
15850 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
15851 IEM_MC_BEGIN(3, 0);
15852 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15853 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15854 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15858 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15859 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15860 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15861 IEM_MC_END();
15862 return VINF_SUCCESS;
15863
15864}
15865
15866/** Opcode 0xdd !11/0. */
15867FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15868{
15869 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
15870
15871 IEM_MC_BEGIN(0, 2);
15872 IEM_MC_LOCAL(uint16_t, u16Tmp);
15873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15874
15875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15877 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15878
15879 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15880 IEM_MC_FETCH_FSW(u16Tmp);
15881 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
15882 IEM_MC_ADVANCE_RIP();
15883
15884/** @todo Debug / drop a hint to the verifier that things may differ
15885 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15886 * NT4SP1. (X86_FSW_PE) */
15887 IEM_MC_END();
15888 return VINF_SUCCESS;
15889}
15890
15891
15892/** Opcode 0xdd 11/0. */
15893FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15894{
15895 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
15896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15897 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15898 unmodified. */
15899
15900 IEM_MC_BEGIN(0, 0);
15901
15902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15903 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15904
15905 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15906 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15907 IEM_MC_UPDATE_FPU_OPCODE_IP();
15908
15909 IEM_MC_ADVANCE_RIP();
15910 IEM_MC_END();
15911 return VINF_SUCCESS;
15912}
15913
15914
15915/** Opcode 0xdd 11/1. */
15916FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15917{
15918 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
15919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15920
15921 IEM_MC_BEGIN(0, 2);
15922 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15923 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15924 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15925 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15926
15927 IEM_MC_PREPARE_FPU_USAGE();
15928 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15929 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15930 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15931 IEM_MC_ELSE()
15932 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15933 IEM_MC_ENDIF();
15934
15935 IEM_MC_ADVANCE_RIP();
15936 IEM_MC_END();
15937 return VINF_SUCCESS;
15938}
15939
15940
15941/** Opcode 0xdd 11/3. */
15942FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15943{
15944 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
15945 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15946}
15947
15948
15949/** Opcode 0xdd 11/4. */
15950FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15951{
15952 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
15953 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15954}
15955
15956
15957/** Opcode 0xdd. */
15958FNIEMOP_DEF(iemOp_EscF5)
15959{
15960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15961 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
15962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15963 {
15964 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15965 {
15966 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15967 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15968 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15969 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15970 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15971 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15972 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15973 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15975 }
15976 }
15977 else
15978 {
15979 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15980 {
15981 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15982 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15983 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15984 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15985 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15986 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15987 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15988 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15990 }
15991 }
15992}
15993
15994
15995/** Opcode 0xde 11/0. */
15996FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15997{
15998 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
15999 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16000}
16001
16002
16003/** Opcode 0xde 11/0. */
16004FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16005{
16006 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16007 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16008}
16009
16010
16011/** Opcode 0xde 0xd9. */
16012FNIEMOP_DEF(iemOp_fcompp)
16013{
16014 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16015 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16016}
16017
16018
16019/** Opcode 0xde 11/4. */
16020FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16021{
16022 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16023 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16024}
16025
16026
16027/** Opcode 0xde 11/5. */
16028FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16029{
16030 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16031 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16032}
16033
16034
16035/** Opcode 0xde 11/6. */
16036FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16037{
16038 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16039 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16040}
16041
16042
16043/** Opcode 0xde 11/7. */
16044FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16045{
16046 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16047 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16048}
16049
16050
16051/**
16052 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16053 * the result in ST0.
16054 *
16055 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16056 */
16057FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16058{
16059 IEM_MC_BEGIN(3, 3);
16060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16061 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16062 IEM_MC_LOCAL(int16_t, i16Val2);
16063 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16064 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16065 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16066
16067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16069
16070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16072 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16073
16074 IEM_MC_PREPARE_FPU_USAGE();
16075 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16076 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16077 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16078 IEM_MC_ELSE()
16079 IEM_MC_FPU_STACK_UNDERFLOW(0);
16080 IEM_MC_ENDIF();
16081 IEM_MC_ADVANCE_RIP();
16082
16083 IEM_MC_END();
16084 return VINF_SUCCESS;
16085}
16086
16087
16088/** Opcode 0xde !11/0. */
16089FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16090{
16091 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16092 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16093}
16094
16095
16096/** Opcode 0xde !11/1. */
16097FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16098{
16099 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16100 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16101}
16102
16103
16104/** Opcode 0xde !11/2. */
16105FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16106{
16107 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16108
16109 IEM_MC_BEGIN(3, 3);
16110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16111 IEM_MC_LOCAL(uint16_t, u16Fsw);
16112 IEM_MC_LOCAL(int16_t, i16Val2);
16113 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16114 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16115 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16116
16117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16119
16120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16122 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16123
16124 IEM_MC_PREPARE_FPU_USAGE();
16125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16126 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16127 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16128 IEM_MC_ELSE()
16129 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16130 IEM_MC_ENDIF();
16131 IEM_MC_ADVANCE_RIP();
16132
16133 IEM_MC_END();
16134 return VINF_SUCCESS;
16135}
16136
16137
16138/** Opcode 0xde !11/3. */
16139FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16140{
16141 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16142
16143 IEM_MC_BEGIN(3, 3);
16144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16145 IEM_MC_LOCAL(uint16_t, u16Fsw);
16146 IEM_MC_LOCAL(int16_t, i16Val2);
16147 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16149 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16150
16151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16153
16154 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16155 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16156 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16157
16158 IEM_MC_PREPARE_FPU_USAGE();
16159 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16160 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16161 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16162 IEM_MC_ELSE()
16163 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16164 IEM_MC_ENDIF();
16165 IEM_MC_ADVANCE_RIP();
16166
16167 IEM_MC_END();
16168 return VINF_SUCCESS;
16169}
16170
16171
16172/** Opcode 0xde !11/4. */
16173FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16174{
16175 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16176 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16177}
16178
16179
16180/** Opcode 0xde !11/5. */
16181FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16182{
16183 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16184 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16185}
16186
16187
16188/** Opcode 0xde !11/6. */
16189FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16190{
16191 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16192 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16193}
16194
16195
16196/** Opcode 0xde !11/7. */
16197FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16198{
16199 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16200 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16201}
16202
16203
16204/** Opcode 0xde. */
16205FNIEMOP_DEF(iemOp_EscF6)
16206{
16207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16208 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16210 {
16211 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16212 {
16213 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16214 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16215 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16216 case 3: if (bRm == 0xd9)
16217 return FNIEMOP_CALL(iemOp_fcompp);
16218 return IEMOP_RAISE_INVALID_OPCODE();
16219 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16220 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16221 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16222 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16224 }
16225 }
16226 else
16227 {
16228 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16229 {
16230 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16231 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16232 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16233 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16234 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16235 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16236 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16237 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16238 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16239 }
16240 }
16241}
16242
16243
16244/** Opcode 0xdf 11/0.
16245 * Undocument instruction, assumed to work like ffree + fincstp. */
16246FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16247{
16248 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
16249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16250
16251 IEM_MC_BEGIN(0, 0);
16252
16253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16255
16256 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16257 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16258 IEM_MC_FPU_STACK_INC_TOP();
16259 IEM_MC_UPDATE_FPU_OPCODE_IP();
16260
16261 IEM_MC_ADVANCE_RIP();
16262 IEM_MC_END();
16263 return VINF_SUCCESS;
16264}
16265
16266
16267/** Opcode 0xdf 0xe0. */
16268FNIEMOP_DEF(iemOp_fnstsw_ax)
16269{
16270 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
16271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16272
16273 IEM_MC_BEGIN(0, 1);
16274 IEM_MC_LOCAL(uint16_t, u16Tmp);
16275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16276 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16277 IEM_MC_FETCH_FSW(u16Tmp);
16278 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16279 IEM_MC_ADVANCE_RIP();
16280 IEM_MC_END();
16281 return VINF_SUCCESS;
16282}
16283
16284
16285/** Opcode 0xdf 11/5. */
16286FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16287{
16288 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
16289 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16290}
16291
16292
16293/** Opcode 0xdf 11/6. */
16294FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16295{
16296 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
16297 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16298}
16299
16300
16301/** Opcode 0xdf !11/0. */
16302FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16303{
16304 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
16305
16306 IEM_MC_BEGIN(2, 3);
16307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16308 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16309 IEM_MC_LOCAL(int16_t, i16Val);
16310 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16311 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16312
16313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16315
16316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16318 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16319
16320 IEM_MC_PREPARE_FPU_USAGE();
16321 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16322 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16323 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16324 IEM_MC_ELSE()
16325 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16326 IEM_MC_ENDIF();
16327 IEM_MC_ADVANCE_RIP();
16328
16329 IEM_MC_END();
16330 return VINF_SUCCESS;
16331}
16332
16333
16334/** Opcode 0xdf !11/1. */
16335FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16336{
16337 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
16338 IEM_MC_BEGIN(3, 2);
16339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16340 IEM_MC_LOCAL(uint16_t, u16Fsw);
16341 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16342 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16343 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16344
16345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16349
16350 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16351 IEM_MC_PREPARE_FPU_USAGE();
16352 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16353 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16354 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16355 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16356 IEM_MC_ELSE()
16357 IEM_MC_IF_FCW_IM()
16358 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16359 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16360 IEM_MC_ENDIF();
16361 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16362 IEM_MC_ENDIF();
16363 IEM_MC_ADVANCE_RIP();
16364
16365 IEM_MC_END();
16366 return VINF_SUCCESS;
16367}
16368
16369
16370/** Opcode 0xdf !11/2. */
16371FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16372{
16373 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
16374 IEM_MC_BEGIN(3, 2);
16375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16376 IEM_MC_LOCAL(uint16_t, u16Fsw);
16377 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16378 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16379 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16380
16381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16385
16386 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16387 IEM_MC_PREPARE_FPU_USAGE();
16388 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16389 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16390 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16391 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16392 IEM_MC_ELSE()
16393 IEM_MC_IF_FCW_IM()
16394 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16395 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16396 IEM_MC_ENDIF();
16397 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16398 IEM_MC_ENDIF();
16399 IEM_MC_ADVANCE_RIP();
16400
16401 IEM_MC_END();
16402 return VINF_SUCCESS;
16403}
16404
16405
16406/** Opcode 0xdf !11/3. */
16407FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16408{
16409 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
16410 IEM_MC_BEGIN(3, 2);
16411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16412 IEM_MC_LOCAL(uint16_t, u16Fsw);
16413 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16414 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16415 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16416
16417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16421
16422 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16423 IEM_MC_PREPARE_FPU_USAGE();
16424 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16425 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16426 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16427 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16428 IEM_MC_ELSE()
16429 IEM_MC_IF_FCW_IM()
16430 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16431 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16432 IEM_MC_ENDIF();
16433 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16434 IEM_MC_ENDIF();
16435 IEM_MC_ADVANCE_RIP();
16436
16437 IEM_MC_END();
16438 return VINF_SUCCESS;
16439}
16440
16441
16442/** Opcode 0xdf !11/4. */
16443FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16444
16445
16446/** Opcode 0xdf !11/5. */
16447FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16448{
16449 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
16450
16451 IEM_MC_BEGIN(2, 3);
16452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16453 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16454 IEM_MC_LOCAL(int64_t, i64Val);
16455 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16456 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16457
16458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16460
16461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16463 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16464
16465 IEM_MC_PREPARE_FPU_USAGE();
16466 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16467 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16468 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16469 IEM_MC_ELSE()
16470 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16471 IEM_MC_ENDIF();
16472 IEM_MC_ADVANCE_RIP();
16473
16474 IEM_MC_END();
16475 return VINF_SUCCESS;
16476}
16477
16478
16479/** Opcode 0xdf !11/6. */
16480FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16481
16482
16483/** Opcode 0xdf !11/7. */
16484FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16485{
16486 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
16487 IEM_MC_BEGIN(3, 2);
16488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16489 IEM_MC_LOCAL(uint16_t, u16Fsw);
16490 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16491 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16492 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16493
16494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16497 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16498
16499 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16500 IEM_MC_PREPARE_FPU_USAGE();
16501 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16502 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16503 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16504 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16505 IEM_MC_ELSE()
16506 IEM_MC_IF_FCW_IM()
16507 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16508 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16509 IEM_MC_ENDIF();
16510 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16511 IEM_MC_ENDIF();
16512 IEM_MC_ADVANCE_RIP();
16513
16514 IEM_MC_END();
16515 return VINF_SUCCESS;
16516}
16517
16518
16519/** Opcode 0xdf. */
16520FNIEMOP_DEF(iemOp_EscF7)
16521{
16522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16524 {
16525 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16526 {
16527 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16528 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16529 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16530 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16531 case 4: if (bRm == 0xe0)
16532 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16533 return IEMOP_RAISE_INVALID_OPCODE();
16534 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16535 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16536 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16538 }
16539 }
16540 else
16541 {
16542 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16543 {
16544 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16545 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16546 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16547 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16548 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16549 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16550 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16551 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16553 }
16554 }
16555}
16556
16557
16558/** Opcode 0xe0. */
16559FNIEMOP_DEF(iemOp_loopne_Jb)
16560{
16561 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
16562 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16564 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16565
16566 switch (pVCpu->iem.s.enmEffAddrMode)
16567 {
16568 case IEMMODE_16BIT:
16569 IEM_MC_BEGIN(0,0);
16570 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16571 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16572 IEM_MC_REL_JMP_S8(i8Imm);
16573 } IEM_MC_ELSE() {
16574 IEM_MC_ADVANCE_RIP();
16575 } IEM_MC_ENDIF();
16576 IEM_MC_END();
16577 return VINF_SUCCESS;
16578
16579 case IEMMODE_32BIT:
16580 IEM_MC_BEGIN(0,0);
16581 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16582 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16583 IEM_MC_REL_JMP_S8(i8Imm);
16584 } IEM_MC_ELSE() {
16585 IEM_MC_ADVANCE_RIP();
16586 } IEM_MC_ENDIF();
16587 IEM_MC_END();
16588 return VINF_SUCCESS;
16589
16590 case IEMMODE_64BIT:
16591 IEM_MC_BEGIN(0,0);
16592 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16593 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16594 IEM_MC_REL_JMP_S8(i8Imm);
16595 } IEM_MC_ELSE() {
16596 IEM_MC_ADVANCE_RIP();
16597 } IEM_MC_ENDIF();
16598 IEM_MC_END();
16599 return VINF_SUCCESS;
16600
16601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16602 }
16603}
16604
16605
16606/** Opcode 0xe1. */
16607FNIEMOP_DEF(iemOp_loope_Jb)
16608{
16609 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
16610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16613
16614 switch (pVCpu->iem.s.enmEffAddrMode)
16615 {
16616 case IEMMODE_16BIT:
16617 IEM_MC_BEGIN(0,0);
16618 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16619 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16620 IEM_MC_REL_JMP_S8(i8Imm);
16621 } IEM_MC_ELSE() {
16622 IEM_MC_ADVANCE_RIP();
16623 } IEM_MC_ENDIF();
16624 IEM_MC_END();
16625 return VINF_SUCCESS;
16626
16627 case IEMMODE_32BIT:
16628 IEM_MC_BEGIN(0,0);
16629 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16630 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16631 IEM_MC_REL_JMP_S8(i8Imm);
16632 } IEM_MC_ELSE() {
16633 IEM_MC_ADVANCE_RIP();
16634 } IEM_MC_ENDIF();
16635 IEM_MC_END();
16636 return VINF_SUCCESS;
16637
16638 case IEMMODE_64BIT:
16639 IEM_MC_BEGIN(0,0);
16640 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16641 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16642 IEM_MC_REL_JMP_S8(i8Imm);
16643 } IEM_MC_ELSE() {
16644 IEM_MC_ADVANCE_RIP();
16645 } IEM_MC_ENDIF();
16646 IEM_MC_END();
16647 return VINF_SUCCESS;
16648
16649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16650 }
16651}
16652
16653
16654/** Opcode 0xe2. */
16655FNIEMOP_DEF(iemOp_loop_Jb)
16656{
16657 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
16658 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16661
16662 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16663 * using the 32-bit operand size override. How can that be restarted? See
16664 * weird pseudo code in intel manual. */
16665 switch (pVCpu->iem.s.enmEffAddrMode)
16666 {
16667 case IEMMODE_16BIT:
16668 IEM_MC_BEGIN(0,0);
16669 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16670 {
16671 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16672 IEM_MC_IF_CX_IS_NZ() {
16673 IEM_MC_REL_JMP_S8(i8Imm);
16674 } IEM_MC_ELSE() {
16675 IEM_MC_ADVANCE_RIP();
16676 } IEM_MC_ENDIF();
16677 }
16678 else
16679 {
16680 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16681 IEM_MC_ADVANCE_RIP();
16682 }
16683 IEM_MC_END();
16684 return VINF_SUCCESS;
16685
16686 case IEMMODE_32BIT:
16687 IEM_MC_BEGIN(0,0);
16688 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16689 {
16690 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16691 IEM_MC_IF_ECX_IS_NZ() {
16692 IEM_MC_REL_JMP_S8(i8Imm);
16693 } IEM_MC_ELSE() {
16694 IEM_MC_ADVANCE_RIP();
16695 } IEM_MC_ENDIF();
16696 }
16697 else
16698 {
16699 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16700 IEM_MC_ADVANCE_RIP();
16701 }
16702 IEM_MC_END();
16703 return VINF_SUCCESS;
16704
16705 case IEMMODE_64BIT:
16706 IEM_MC_BEGIN(0,0);
16707 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16708 {
16709 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16710 IEM_MC_IF_RCX_IS_NZ() {
16711 IEM_MC_REL_JMP_S8(i8Imm);
16712 } IEM_MC_ELSE() {
16713 IEM_MC_ADVANCE_RIP();
16714 } IEM_MC_ENDIF();
16715 }
16716 else
16717 {
16718 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16719 IEM_MC_ADVANCE_RIP();
16720 }
16721 IEM_MC_END();
16722 return VINF_SUCCESS;
16723
16724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16725 }
16726}
16727
16728
16729/** Opcode 0xe3. */
16730FNIEMOP_DEF(iemOp_jecxz_Jb)
16731{
16732 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
16733 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16736
16737 switch (pVCpu->iem.s.enmEffAddrMode)
16738 {
16739 case IEMMODE_16BIT:
16740 IEM_MC_BEGIN(0,0);
16741 IEM_MC_IF_CX_IS_NZ() {
16742 IEM_MC_ADVANCE_RIP();
16743 } IEM_MC_ELSE() {
16744 IEM_MC_REL_JMP_S8(i8Imm);
16745 } IEM_MC_ENDIF();
16746 IEM_MC_END();
16747 return VINF_SUCCESS;
16748
16749 case IEMMODE_32BIT:
16750 IEM_MC_BEGIN(0,0);
16751 IEM_MC_IF_ECX_IS_NZ() {
16752 IEM_MC_ADVANCE_RIP();
16753 } IEM_MC_ELSE() {
16754 IEM_MC_REL_JMP_S8(i8Imm);
16755 } IEM_MC_ENDIF();
16756 IEM_MC_END();
16757 return VINF_SUCCESS;
16758
16759 case IEMMODE_64BIT:
16760 IEM_MC_BEGIN(0,0);
16761 IEM_MC_IF_RCX_IS_NZ() {
16762 IEM_MC_ADVANCE_RIP();
16763 } IEM_MC_ELSE() {
16764 IEM_MC_REL_JMP_S8(i8Imm);
16765 } IEM_MC_ENDIF();
16766 IEM_MC_END();
16767 return VINF_SUCCESS;
16768
16769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16770 }
16771}
16772
16773
16774/** Opcode 0xe4 */
16775FNIEMOP_DEF(iemOp_in_AL_Ib)
16776{
16777 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
16778 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16780 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16781}
16782
16783
16784/** Opcode 0xe5 */
16785FNIEMOP_DEF(iemOp_in_eAX_Ib)
16786{
16787 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
16788 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16790 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16791}
16792
16793
16794/** Opcode 0xe6 */
16795FNIEMOP_DEF(iemOp_out_Ib_AL)
16796{
16797 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
16798 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16800 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16801}
16802
16803
16804/** Opcode 0xe7 */
16805FNIEMOP_DEF(iemOp_out_Ib_eAX)
16806{
16807 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
16808 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16810 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16811}
16812
16813
16814/** Opcode 0xe8. */
16815FNIEMOP_DEF(iemOp_call_Jv)
16816{
16817 IEMOP_MNEMONIC(call_Jv, "call Jv");
16818 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16819 switch (pVCpu->iem.s.enmEffOpSize)
16820 {
16821 case IEMMODE_16BIT:
16822 {
16823 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16824 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16825 }
16826
16827 case IEMMODE_32BIT:
16828 {
16829 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16830 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16831 }
16832
16833 case IEMMODE_64BIT:
16834 {
16835 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16836 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16837 }
16838
16839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16840 }
16841}
16842
16843
16844/** Opcode 0xe9. */
16845FNIEMOP_DEF(iemOp_jmp_Jv)
16846{
16847 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
16848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16849 switch (pVCpu->iem.s.enmEffOpSize)
16850 {
16851 case IEMMODE_16BIT:
16852 {
16853 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16854 IEM_MC_BEGIN(0, 0);
16855 IEM_MC_REL_JMP_S16(i16Imm);
16856 IEM_MC_END();
16857 return VINF_SUCCESS;
16858 }
16859
16860 case IEMMODE_64BIT:
16861 case IEMMODE_32BIT:
16862 {
16863 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16864 IEM_MC_BEGIN(0, 0);
16865 IEM_MC_REL_JMP_S32(i32Imm);
16866 IEM_MC_END();
16867 return VINF_SUCCESS;
16868 }
16869
16870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16871 }
16872}
16873
16874
16875/** Opcode 0xea. */
16876FNIEMOP_DEF(iemOp_jmp_Ap)
16877{
16878 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
16879 IEMOP_HLP_NO_64BIT();
16880
16881 /* Decode the far pointer address and pass it on to the far call C implementation. */
16882 uint32_t offSeg;
16883 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
16884 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16885 else
16886 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16887 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16889 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
16890}
16891
16892
16893/** Opcode 0xeb. */
16894FNIEMOP_DEF(iemOp_jmp_Jb)
16895{
16896 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
16897 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16900
16901 IEM_MC_BEGIN(0, 0);
16902 IEM_MC_REL_JMP_S8(i8Imm);
16903 IEM_MC_END();
16904 return VINF_SUCCESS;
16905}
16906
16907
16908/** Opcode 0xec */
16909FNIEMOP_DEF(iemOp_in_AL_DX)
16910{
16911 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
16912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16913 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16914}
16915
16916
16917/** Opcode 0xed */
16918FNIEMOP_DEF(iemOp_eAX_DX)
16919{
16920 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
16921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16922 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16923}
16924
16925
16926/** Opcode 0xee */
16927FNIEMOP_DEF(iemOp_out_DX_AL)
16928{
16929 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
16930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16931 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16932}
16933
16934
16935/** Opcode 0xef */
16936FNIEMOP_DEF(iemOp_out_DX_eAX)
16937{
16938 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
16939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16940 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16941}
16942
16943
16944/** Opcode 0xf0. */
16945FNIEMOP_DEF(iemOp_lock)
16946{
16947 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16948 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
16949
16950 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16951 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16952}
16953
16954
16955/** Opcode 0xf1. */
16956FNIEMOP_DEF(iemOp_int_1)
16957{
16958 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
16959 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16960 /** @todo testcase! */
16961 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16962}
16963
16964
16965/** Opcode 0xf2. */
16966FNIEMOP_DEF(iemOp_repne)
16967{
16968 /* This overrides any previous REPE prefix. */
16969 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
16970 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16971 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
16972
16973 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16974 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16975}
16976
16977
16978/** Opcode 0xf3. */
16979FNIEMOP_DEF(iemOp_repe)
16980{
16981 /* This overrides any previous REPNE prefix. */
16982 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
16983 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16984 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
16985
16986 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16987 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16988}
16989
16990
16991/** Opcode 0xf4. */
16992FNIEMOP_DEF(iemOp_hlt)
16993{
16994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16996}
16997
16998
16999/** Opcode 0xf5. */
17000FNIEMOP_DEF(iemOp_cmc)
17001{
17002 IEMOP_MNEMONIC(cmc, "cmc");
17003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17004 IEM_MC_BEGIN(0, 0);
17005 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17006 IEM_MC_ADVANCE_RIP();
17007 IEM_MC_END();
17008 return VINF_SUCCESS;
17009}
17010
17011
17012/**
17013 * Common implementation of 'inc/dec/not/neg Eb'.
17014 *
17015 * @param bRm The RM byte.
17016 * @param pImpl The instruction implementation.
17017 */
17018FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17019{
17020 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17021 {
17022 /* register access */
17023 IEM_MC_BEGIN(2, 0);
17024 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17025 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17026 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17027 IEM_MC_REF_EFLAGS(pEFlags);
17028 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17029 IEM_MC_ADVANCE_RIP();
17030 IEM_MC_END();
17031 }
17032 else
17033 {
17034 /* memory access. */
17035 IEM_MC_BEGIN(2, 2);
17036 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17037 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17039
17040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17041 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17042 IEM_MC_FETCH_EFLAGS(EFlags);
17043 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17044 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17045 else
17046 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17047
17048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17049 IEM_MC_COMMIT_EFLAGS(EFlags);
17050 IEM_MC_ADVANCE_RIP();
17051 IEM_MC_END();
17052 }
17053 return VINF_SUCCESS;
17054}
17055
17056
17057/**
17058 * Common implementation of 'inc/dec/not/neg Ev'.
17059 *
17060 * @param bRm The RM byte.
17061 * @param pImpl The instruction implementation.
17062 */
17063FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17064{
17065 /* Registers are handled by a common worker. */
17066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17067 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17068
17069 /* Memory we do here. */
17070 switch (pVCpu->iem.s.enmEffOpSize)
17071 {
17072 case IEMMODE_16BIT:
17073 IEM_MC_BEGIN(2, 2);
17074 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17075 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17077
17078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17079 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17080 IEM_MC_FETCH_EFLAGS(EFlags);
17081 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17082 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17083 else
17084 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17085
17086 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17087 IEM_MC_COMMIT_EFLAGS(EFlags);
17088 IEM_MC_ADVANCE_RIP();
17089 IEM_MC_END();
17090 return VINF_SUCCESS;
17091
17092 case IEMMODE_32BIT:
17093 IEM_MC_BEGIN(2, 2);
17094 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17095 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17097
17098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17099 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17100 IEM_MC_FETCH_EFLAGS(EFlags);
17101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17102 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17103 else
17104 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17105
17106 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17107 IEM_MC_COMMIT_EFLAGS(EFlags);
17108 IEM_MC_ADVANCE_RIP();
17109 IEM_MC_END();
17110 return VINF_SUCCESS;
17111
17112 case IEMMODE_64BIT:
17113 IEM_MC_BEGIN(2, 2);
17114 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17115 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17117
17118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17119 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17120 IEM_MC_FETCH_EFLAGS(EFlags);
17121 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17122 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17123 else
17124 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17125
17126 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17127 IEM_MC_COMMIT_EFLAGS(EFlags);
17128 IEM_MC_ADVANCE_RIP();
17129 IEM_MC_END();
17130 return VINF_SUCCESS;
17131
17132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17133 }
17134}
17135
17136
17137/** Opcode 0xf6 /0. */
17138FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17139{
17140 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17141 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17142
17143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17144 {
17145 /* register access */
17146 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17148
17149 IEM_MC_BEGIN(3, 0);
17150 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17151 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17153 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17154 IEM_MC_REF_EFLAGS(pEFlags);
17155 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17156 IEM_MC_ADVANCE_RIP();
17157 IEM_MC_END();
17158 }
17159 else
17160 {
17161 /* memory access. */
17162 IEM_MC_BEGIN(3, 2);
17163 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17164 IEM_MC_ARG(uint8_t, u8Src, 1);
17165 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17167
17168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17169 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17170 IEM_MC_ASSIGN(u8Src, u8Imm);
17171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17172 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17173 IEM_MC_FETCH_EFLAGS(EFlags);
17174 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17175
17176 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17177 IEM_MC_COMMIT_EFLAGS(EFlags);
17178 IEM_MC_ADVANCE_RIP();
17179 IEM_MC_END();
17180 }
17181 return VINF_SUCCESS;
17182}
17183
17184
17185/** Opcode 0xf7 /0. */
17186FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17187{
17188 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17189 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17190
17191 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17192 {
17193 /* register access */
17194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17195 switch (pVCpu->iem.s.enmEffOpSize)
17196 {
17197 case IEMMODE_16BIT:
17198 {
17199 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17200 IEM_MC_BEGIN(3, 0);
17201 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17202 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17204 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17205 IEM_MC_REF_EFLAGS(pEFlags);
17206 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17207 IEM_MC_ADVANCE_RIP();
17208 IEM_MC_END();
17209 return VINF_SUCCESS;
17210 }
17211
17212 case IEMMODE_32BIT:
17213 {
17214 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17215 IEM_MC_BEGIN(3, 0);
17216 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17217 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17218 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17219 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17220 IEM_MC_REF_EFLAGS(pEFlags);
17221 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17222 /* No clearing the high dword here - test doesn't write back the result. */
17223 IEM_MC_ADVANCE_RIP();
17224 IEM_MC_END();
17225 return VINF_SUCCESS;
17226 }
17227
17228 case IEMMODE_64BIT:
17229 {
17230 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17231 IEM_MC_BEGIN(3, 0);
17232 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17233 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17234 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17235 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17236 IEM_MC_REF_EFLAGS(pEFlags);
17237 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17238 IEM_MC_ADVANCE_RIP();
17239 IEM_MC_END();
17240 return VINF_SUCCESS;
17241 }
17242
17243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17244 }
17245 }
17246 else
17247 {
17248 /* memory access. */
17249 switch (pVCpu->iem.s.enmEffOpSize)
17250 {
17251 case IEMMODE_16BIT:
17252 {
17253 IEM_MC_BEGIN(3, 2);
17254 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17255 IEM_MC_ARG(uint16_t, u16Src, 1);
17256 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17258
17259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17260 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17261 IEM_MC_ASSIGN(u16Src, u16Imm);
17262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17263 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17264 IEM_MC_FETCH_EFLAGS(EFlags);
17265 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17266
17267 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17268 IEM_MC_COMMIT_EFLAGS(EFlags);
17269 IEM_MC_ADVANCE_RIP();
17270 IEM_MC_END();
17271 return VINF_SUCCESS;
17272 }
17273
17274 case IEMMODE_32BIT:
17275 {
17276 IEM_MC_BEGIN(3, 2);
17277 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17278 IEM_MC_ARG(uint32_t, u32Src, 1);
17279 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17281
17282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17283 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17284 IEM_MC_ASSIGN(u32Src, u32Imm);
17285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17286 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17287 IEM_MC_FETCH_EFLAGS(EFlags);
17288 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17289
17290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17291 IEM_MC_COMMIT_EFLAGS(EFlags);
17292 IEM_MC_ADVANCE_RIP();
17293 IEM_MC_END();
17294 return VINF_SUCCESS;
17295 }
17296
17297 case IEMMODE_64BIT:
17298 {
17299 IEM_MC_BEGIN(3, 2);
17300 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17301 IEM_MC_ARG(uint64_t, u64Src, 1);
17302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17304
17305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17306 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17307 IEM_MC_ASSIGN(u64Src, u64Imm);
17308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17309 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17310 IEM_MC_FETCH_EFLAGS(EFlags);
17311 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17312
17313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17314 IEM_MC_COMMIT_EFLAGS(EFlags);
17315 IEM_MC_ADVANCE_RIP();
17316 IEM_MC_END();
17317 return VINF_SUCCESS;
17318 }
17319
17320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17321 }
17322 }
17323}
17324
17325
17326/** Opcode 0xf6 /4, /5, /6 and /7. */
17327FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17328{
17329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17330 {
17331 /* register access */
17332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17333 IEM_MC_BEGIN(3, 1);
17334 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17335 IEM_MC_ARG(uint8_t, u8Value, 1);
17336 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17337 IEM_MC_LOCAL(int32_t, rc);
17338
17339 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17340 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17341 IEM_MC_REF_EFLAGS(pEFlags);
17342 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17343 IEM_MC_IF_LOCAL_IS_Z(rc) {
17344 IEM_MC_ADVANCE_RIP();
17345 } IEM_MC_ELSE() {
17346 IEM_MC_RAISE_DIVIDE_ERROR();
17347 } IEM_MC_ENDIF();
17348
17349 IEM_MC_END();
17350 }
17351 else
17352 {
17353 /* memory access. */
17354 IEM_MC_BEGIN(3, 2);
17355 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17356 IEM_MC_ARG(uint8_t, u8Value, 1);
17357 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17359 IEM_MC_LOCAL(int32_t, rc);
17360
17361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17363 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17364 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17365 IEM_MC_REF_EFLAGS(pEFlags);
17366 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17367 IEM_MC_IF_LOCAL_IS_Z(rc) {
17368 IEM_MC_ADVANCE_RIP();
17369 } IEM_MC_ELSE() {
17370 IEM_MC_RAISE_DIVIDE_ERROR();
17371 } IEM_MC_ENDIF();
17372
17373 IEM_MC_END();
17374 }
17375 return VINF_SUCCESS;
17376}
17377
17378
17379/** Opcode 0xf7 /4, /5, /6 and /7. */
17380FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17381{
17382 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17383
17384 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17385 {
17386 /* register access */
17387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17388 switch (pVCpu->iem.s.enmEffOpSize)
17389 {
17390 case IEMMODE_16BIT:
17391 {
17392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17393 IEM_MC_BEGIN(4, 1);
17394 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17395 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17396 IEM_MC_ARG(uint16_t, u16Value, 2);
17397 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17398 IEM_MC_LOCAL(int32_t, rc);
17399
17400 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17401 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17402 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17403 IEM_MC_REF_EFLAGS(pEFlags);
17404 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17405 IEM_MC_IF_LOCAL_IS_Z(rc) {
17406 IEM_MC_ADVANCE_RIP();
17407 } IEM_MC_ELSE() {
17408 IEM_MC_RAISE_DIVIDE_ERROR();
17409 } IEM_MC_ENDIF();
17410
17411 IEM_MC_END();
17412 return VINF_SUCCESS;
17413 }
17414
17415 case IEMMODE_32BIT:
17416 {
17417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17418 IEM_MC_BEGIN(4, 1);
17419 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17420 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17421 IEM_MC_ARG(uint32_t, u32Value, 2);
17422 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17423 IEM_MC_LOCAL(int32_t, rc);
17424
17425 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17426 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17427 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17428 IEM_MC_REF_EFLAGS(pEFlags);
17429 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17430 IEM_MC_IF_LOCAL_IS_Z(rc) {
17431 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17432 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17433 IEM_MC_ADVANCE_RIP();
17434 } IEM_MC_ELSE() {
17435 IEM_MC_RAISE_DIVIDE_ERROR();
17436 } IEM_MC_ENDIF();
17437
17438 IEM_MC_END();
17439 return VINF_SUCCESS;
17440 }
17441
17442 case IEMMODE_64BIT:
17443 {
17444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17445 IEM_MC_BEGIN(4, 1);
17446 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17447 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17448 IEM_MC_ARG(uint64_t, u64Value, 2);
17449 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17450 IEM_MC_LOCAL(int32_t, rc);
17451
17452 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17453 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17454 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17455 IEM_MC_REF_EFLAGS(pEFlags);
17456 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17457 IEM_MC_IF_LOCAL_IS_Z(rc) {
17458 IEM_MC_ADVANCE_RIP();
17459 } IEM_MC_ELSE() {
17460 IEM_MC_RAISE_DIVIDE_ERROR();
17461 } IEM_MC_ENDIF();
17462
17463 IEM_MC_END();
17464 return VINF_SUCCESS;
17465 }
17466
17467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17468 }
17469 }
17470 else
17471 {
17472 /* memory access. */
17473 switch (pVCpu->iem.s.enmEffOpSize)
17474 {
17475 case IEMMODE_16BIT:
17476 {
17477 IEM_MC_BEGIN(4, 2);
17478 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17479 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17480 IEM_MC_ARG(uint16_t, u16Value, 2);
17481 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17483 IEM_MC_LOCAL(int32_t, rc);
17484
17485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17487 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17488 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17489 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17490 IEM_MC_REF_EFLAGS(pEFlags);
17491 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17492 IEM_MC_IF_LOCAL_IS_Z(rc) {
17493 IEM_MC_ADVANCE_RIP();
17494 } IEM_MC_ELSE() {
17495 IEM_MC_RAISE_DIVIDE_ERROR();
17496 } IEM_MC_ENDIF();
17497
17498 IEM_MC_END();
17499 return VINF_SUCCESS;
17500 }
17501
17502 case IEMMODE_32BIT:
17503 {
17504 IEM_MC_BEGIN(4, 2);
17505 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17506 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17507 IEM_MC_ARG(uint32_t, u32Value, 2);
17508 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17510 IEM_MC_LOCAL(int32_t, rc);
17511
17512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17514 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17515 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17516 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17517 IEM_MC_REF_EFLAGS(pEFlags);
17518 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17519 IEM_MC_IF_LOCAL_IS_Z(rc) {
17520 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17521 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17522 IEM_MC_ADVANCE_RIP();
17523 } IEM_MC_ELSE() {
17524 IEM_MC_RAISE_DIVIDE_ERROR();
17525 } IEM_MC_ENDIF();
17526
17527 IEM_MC_END();
17528 return VINF_SUCCESS;
17529 }
17530
17531 case IEMMODE_64BIT:
17532 {
17533 IEM_MC_BEGIN(4, 2);
17534 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17535 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17536 IEM_MC_ARG(uint64_t, u64Value, 2);
17537 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17539 IEM_MC_LOCAL(int32_t, rc);
17540
17541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17543 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17544 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17545 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17546 IEM_MC_REF_EFLAGS(pEFlags);
17547 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17548 IEM_MC_IF_LOCAL_IS_Z(rc) {
17549 IEM_MC_ADVANCE_RIP();
17550 } IEM_MC_ELSE() {
17551 IEM_MC_RAISE_DIVIDE_ERROR();
17552 } IEM_MC_ENDIF();
17553
17554 IEM_MC_END();
17555 return VINF_SUCCESS;
17556 }
17557
17558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17559 }
17560 }
17561}
17562
17563/** Opcode 0xf6. */
17564FNIEMOP_DEF(iemOp_Grp3_Eb)
17565{
17566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17567 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17568 {
17569 case 0:
17570 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17571 case 1:
17572/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17573 return IEMOP_RAISE_INVALID_OPCODE();
17574 case 2:
17575 IEMOP_MNEMONIC(not_Eb, "not Eb");
17576 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17577 case 3:
17578 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
17579 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17580 case 4:
17581 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
17582 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17583 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17584 case 5:
17585 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
17586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17587 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17588 case 6:
17589 IEMOP_MNEMONIC(div_Eb, "div Eb");
17590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17591 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17592 case 7:
17593 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
17594 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17595 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17597 }
17598}
17599
17600
17601/** Opcode 0xf7. */
17602FNIEMOP_DEF(iemOp_Grp3_Ev)
17603{
17604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17605 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17606 {
17607 case 0:
17608 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17609 case 1:
17610/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17611 return IEMOP_RAISE_INVALID_OPCODE();
17612 case 2:
17613 IEMOP_MNEMONIC(not_Ev, "not Ev");
17614 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17615 case 3:
17616 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
17617 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17618 case 4:
17619 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
17620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17621 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17622 case 5:
17623 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
17624 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17625 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17626 case 6:
17627 IEMOP_MNEMONIC(div_Ev, "div Ev");
17628 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17629 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17630 case 7:
17631 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
17632 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17633 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17635 }
17636}
17637
17638
17639/** Opcode 0xf8. */
17640FNIEMOP_DEF(iemOp_clc)
17641{
17642 IEMOP_MNEMONIC(clc, "clc");
17643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17644 IEM_MC_BEGIN(0, 0);
17645 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17646 IEM_MC_ADVANCE_RIP();
17647 IEM_MC_END();
17648 return VINF_SUCCESS;
17649}
17650
17651
17652/** Opcode 0xf9. */
17653FNIEMOP_DEF(iemOp_stc)
17654{
17655 IEMOP_MNEMONIC(stc, "stc");
17656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17657 IEM_MC_BEGIN(0, 0);
17658 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17659 IEM_MC_ADVANCE_RIP();
17660 IEM_MC_END();
17661 return VINF_SUCCESS;
17662}
17663
17664
17665/** Opcode 0xfa. */
17666FNIEMOP_DEF(iemOp_cli)
17667{
17668 IEMOP_MNEMONIC(cli, "cli");
17669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17670 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17671}
17672
17673
17674FNIEMOP_DEF(iemOp_sti)
17675{
17676 IEMOP_MNEMONIC(sti, "sti");
17677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17678 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17679}
17680
17681
17682/** Opcode 0xfc. */
17683FNIEMOP_DEF(iemOp_cld)
17684{
17685 IEMOP_MNEMONIC(cld, "cld");
17686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17687 IEM_MC_BEGIN(0, 0);
17688 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17689 IEM_MC_ADVANCE_RIP();
17690 IEM_MC_END();
17691 return VINF_SUCCESS;
17692}
17693
17694
17695/** Opcode 0xfd. */
17696FNIEMOP_DEF(iemOp_std)
17697{
17698 IEMOP_MNEMONIC(std, "std");
17699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17700 IEM_MC_BEGIN(0, 0);
17701 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17702 IEM_MC_ADVANCE_RIP();
17703 IEM_MC_END();
17704 return VINF_SUCCESS;
17705}
17706
17707
17708/** Opcode 0xfe. */
17709FNIEMOP_DEF(iemOp_Grp4)
17710{
17711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17712 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17713 {
17714 case 0:
17715 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
17716 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17717 case 1:
17718 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
17719 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17720 default:
17721 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
17722 return IEMOP_RAISE_INVALID_OPCODE();
17723 }
17724}
17725
17726
17727/**
17728 * Opcode 0xff /2.
17729 * @param bRm The RM byte.
17730 */
17731FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17732{
17733 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
17734 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17735
17736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17737 {
17738 /* The new RIP is taken from a register. */
17739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17740 switch (pVCpu->iem.s.enmEffOpSize)
17741 {
17742 case IEMMODE_16BIT:
17743 IEM_MC_BEGIN(1, 0);
17744 IEM_MC_ARG(uint16_t, u16Target, 0);
17745 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17746 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17747 IEM_MC_END()
17748 return VINF_SUCCESS;
17749
17750 case IEMMODE_32BIT:
17751 IEM_MC_BEGIN(1, 0);
17752 IEM_MC_ARG(uint32_t, u32Target, 0);
17753 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17754 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17755 IEM_MC_END()
17756 return VINF_SUCCESS;
17757
17758 case IEMMODE_64BIT:
17759 IEM_MC_BEGIN(1, 0);
17760 IEM_MC_ARG(uint64_t, u64Target, 0);
17761 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17762 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17763 IEM_MC_END()
17764 return VINF_SUCCESS;
17765
17766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17767 }
17768 }
17769 else
17770 {
17771 /* The new RIP is taken from a register. */
17772 switch (pVCpu->iem.s.enmEffOpSize)
17773 {
17774 case IEMMODE_16BIT:
17775 IEM_MC_BEGIN(1, 1);
17776 IEM_MC_ARG(uint16_t, u16Target, 0);
17777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17780 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17781 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17782 IEM_MC_END()
17783 return VINF_SUCCESS;
17784
17785 case IEMMODE_32BIT:
17786 IEM_MC_BEGIN(1, 1);
17787 IEM_MC_ARG(uint32_t, u32Target, 0);
17788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17791 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17792 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17793 IEM_MC_END()
17794 return VINF_SUCCESS;
17795
17796 case IEMMODE_64BIT:
17797 IEM_MC_BEGIN(1, 1);
17798 IEM_MC_ARG(uint64_t, u64Target, 0);
17799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17802 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17803 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17804 IEM_MC_END()
17805 return VINF_SUCCESS;
17806
17807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17808 }
17809 }
17810}
17811
17812typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17813
17814FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17815{
17816 /* Registers? How?? */
17817 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
17818 { /* likely */ }
17819 else
17820 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17821
17822 /* Far pointer loaded from memory. */
17823 switch (pVCpu->iem.s.enmEffOpSize)
17824 {
17825 case IEMMODE_16BIT:
17826 IEM_MC_BEGIN(3, 1);
17827 IEM_MC_ARG(uint16_t, u16Sel, 0);
17828 IEM_MC_ARG(uint16_t, offSeg, 1);
17829 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17833 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17834 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
17835 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17836 IEM_MC_END();
17837 return VINF_SUCCESS;
17838
17839 case IEMMODE_64BIT:
17840 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17841 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17842 * and call far qword [rsp] encodings. */
17843 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
17844 {
17845 IEM_MC_BEGIN(3, 1);
17846 IEM_MC_ARG(uint16_t, u16Sel, 0);
17847 IEM_MC_ARG(uint64_t, offSeg, 1);
17848 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17852 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17853 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
17854 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17855 IEM_MC_END();
17856 return VINF_SUCCESS;
17857 }
17858 /* AMD falls thru. */
17859
17860 case IEMMODE_32BIT:
17861 IEM_MC_BEGIN(3, 1);
17862 IEM_MC_ARG(uint16_t, u16Sel, 0);
17863 IEM_MC_ARG(uint32_t, offSeg, 1);
17864 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17868 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17869 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
17870 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17871 IEM_MC_END();
17872 return VINF_SUCCESS;
17873
17874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17875 }
17876}
17877
17878
17879/**
17880 * Opcode 0xff /3.
17881 * @param bRm The RM byte.
17882 */
17883FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17884{
17885 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
17886 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17887}
17888
17889
17890/**
17891 * Opcode 0xff /4.
17892 * @param bRm The RM byte.
17893 */
17894FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17895{
17896 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
17897 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17898
17899 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17900 {
17901 /* The new RIP is taken from a register. */
17902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17903 switch (pVCpu->iem.s.enmEffOpSize)
17904 {
17905 case IEMMODE_16BIT:
17906 IEM_MC_BEGIN(0, 1);
17907 IEM_MC_LOCAL(uint16_t, u16Target);
17908 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17909 IEM_MC_SET_RIP_U16(u16Target);
17910 IEM_MC_END()
17911 return VINF_SUCCESS;
17912
17913 case IEMMODE_32BIT:
17914 IEM_MC_BEGIN(0, 1);
17915 IEM_MC_LOCAL(uint32_t, u32Target);
17916 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17917 IEM_MC_SET_RIP_U32(u32Target);
17918 IEM_MC_END()
17919 return VINF_SUCCESS;
17920
17921 case IEMMODE_64BIT:
17922 IEM_MC_BEGIN(0, 1);
17923 IEM_MC_LOCAL(uint64_t, u64Target);
17924 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17925 IEM_MC_SET_RIP_U64(u64Target);
17926 IEM_MC_END()
17927 return VINF_SUCCESS;
17928
17929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17930 }
17931 }
17932 else
17933 {
17934 /* The new RIP is taken from a memory location. */
17935 switch (pVCpu->iem.s.enmEffOpSize)
17936 {
17937 case IEMMODE_16BIT:
17938 IEM_MC_BEGIN(0, 2);
17939 IEM_MC_LOCAL(uint16_t, u16Target);
17940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17943 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17944 IEM_MC_SET_RIP_U16(u16Target);
17945 IEM_MC_END()
17946 return VINF_SUCCESS;
17947
17948 case IEMMODE_32BIT:
17949 IEM_MC_BEGIN(0, 2);
17950 IEM_MC_LOCAL(uint32_t, u32Target);
17951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17954 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17955 IEM_MC_SET_RIP_U32(u32Target);
17956 IEM_MC_END()
17957 return VINF_SUCCESS;
17958
17959 case IEMMODE_64BIT:
17960 IEM_MC_BEGIN(0, 2);
17961 IEM_MC_LOCAL(uint64_t, u64Target);
17962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17965 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17966 IEM_MC_SET_RIP_U64(u64Target);
17967 IEM_MC_END()
17968 return VINF_SUCCESS;
17969
17970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17971 }
17972 }
17973}
17974
17975
17976/**
17977 * Opcode 0xff /5.
17978 * @param bRm The RM byte.
17979 */
17980FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17981{
17982 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
17983 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17984}
17985
17986
17987/**
17988 * Opcode 0xff /6.
17989 * @param bRm The RM byte.
17990 */
17991FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17992{
17993 IEMOP_MNEMONIC(push_Ev, "push Ev");
17994
17995 /* Registers are handled by a common worker. */
17996 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17997 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17998
17999 /* Memory we do here. */
18000 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18001 switch (pVCpu->iem.s.enmEffOpSize)
18002 {
18003 case IEMMODE_16BIT:
18004 IEM_MC_BEGIN(0, 2);
18005 IEM_MC_LOCAL(uint16_t, u16Src);
18006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18009 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18010 IEM_MC_PUSH_U16(u16Src);
18011 IEM_MC_ADVANCE_RIP();
18012 IEM_MC_END();
18013 return VINF_SUCCESS;
18014
18015 case IEMMODE_32BIT:
18016 IEM_MC_BEGIN(0, 2);
18017 IEM_MC_LOCAL(uint32_t, u32Src);
18018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18021 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18022 IEM_MC_PUSH_U32(u32Src);
18023 IEM_MC_ADVANCE_RIP();
18024 IEM_MC_END();
18025 return VINF_SUCCESS;
18026
18027 case IEMMODE_64BIT:
18028 IEM_MC_BEGIN(0, 2);
18029 IEM_MC_LOCAL(uint64_t, u64Src);
18030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18033 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18034 IEM_MC_PUSH_U64(u64Src);
18035 IEM_MC_ADVANCE_RIP();
18036 IEM_MC_END();
18037 return VINF_SUCCESS;
18038
18039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18040 }
18041}
18042
18043
18044/** Opcode 0xff. */
18045FNIEMOP_DEF(iemOp_Grp5)
18046{
18047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18048 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18049 {
18050 case 0:
18051 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18052 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18053 case 1:
18054 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18055 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18056 case 2:
18057 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18058 case 3:
18059 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18060 case 4:
18061 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18062 case 5:
18063 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18064 case 6:
18065 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18066 case 7:
18067 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18068 return IEMOP_RAISE_INVALID_OPCODE();
18069 }
18070 AssertFailedReturn(VERR_IEM_IPE_3);
18071}
18072
18073
18074
18075const PFNIEMOP g_apfnOneByteMap[256] =
18076{
18077 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18078 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18079 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18080 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18081 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18082 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18083 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18084 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18085 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18086 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18087 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18088 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18089 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18090 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18091 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18092 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18093 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18094 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18095 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18096 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18097 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18098 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18099 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18100 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18101 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18102 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18103 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18104 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18105 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18106 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18107 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18108 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18109 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18110 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18111 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18112 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18113 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18114 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18115 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18116 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18117 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18118 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18119 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18120 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18121 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18122 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18123 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18124 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18125 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18126 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18127 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18128 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18129 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18130 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18131 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18132 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18133 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18134 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18135 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18136 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18137 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18138 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18139 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18140 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18141};
18142
18143
18144/** @} */
18145
18146#ifdef _MSC_VER
18147# pragma warning(pop)
18148#endif
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette