VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65029

Last change on this file since 65029 was 64545, checked in by vboxsync, 8 years ago

IEM: Added per-instruction statistics (not release).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 629.6 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 64545 2016-11-04 01:58:05Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551
552/** @name ..... opcodes.
553 *
554 * @{
555 */
556
557/** @} */
558
559
560/** @name Two byte opcodes (first byte 0x0f).
561 *
562 * @{
563 */
564
565/** Opcode 0x0f 0x00 /0. */
566FNIEMOPRM_DEF(iemOp_Grp6_sldt)
567{
568 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
569 IEMOP_HLP_MIN_286();
570 IEMOP_HLP_NO_REAL_OR_V86_MODE();
571
572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
573 {
574 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
575 switch (pVCpu->iem.s.enmEffOpSize)
576 {
577 case IEMMODE_16BIT:
578 IEM_MC_BEGIN(0, 1);
579 IEM_MC_LOCAL(uint16_t, u16Ldtr);
580 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
581 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 break;
585
586 case IEMMODE_32BIT:
587 IEM_MC_BEGIN(0, 1);
588 IEM_MC_LOCAL(uint32_t, u32Ldtr);
589 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
590 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
591 IEM_MC_ADVANCE_RIP();
592 IEM_MC_END();
593 break;
594
595 case IEMMODE_64BIT:
596 IEM_MC_BEGIN(0, 1);
597 IEM_MC_LOCAL(uint64_t, u64Ldtr);
598 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
599 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
600 IEM_MC_ADVANCE_RIP();
601 IEM_MC_END();
602 break;
603
604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
605 }
606 }
607 else
608 {
609 IEM_MC_BEGIN(0, 2);
610 IEM_MC_LOCAL(uint16_t, u16Ldtr);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
613 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
614 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
615 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
616 IEM_MC_ADVANCE_RIP();
617 IEM_MC_END();
618 }
619 return VINF_SUCCESS;
620}
621
622
623/** Opcode 0x0f 0x00 /1. */
624FNIEMOPRM_DEF(iemOp_Grp6_str)
625{
626 IEMOP_MNEMONIC(str, "str Rv/Mw");
627 IEMOP_HLP_MIN_286();
628 IEMOP_HLP_NO_REAL_OR_V86_MODE();
629
630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
631 {
632 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
633 switch (pVCpu->iem.s.enmEffOpSize)
634 {
635 case IEMMODE_16BIT:
636 IEM_MC_BEGIN(0, 1);
637 IEM_MC_LOCAL(uint16_t, u16Tr);
638 IEM_MC_FETCH_TR_U16(u16Tr);
639 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
640 IEM_MC_ADVANCE_RIP();
641 IEM_MC_END();
642 break;
643
644 case IEMMODE_32BIT:
645 IEM_MC_BEGIN(0, 1);
646 IEM_MC_LOCAL(uint32_t, u32Tr);
647 IEM_MC_FETCH_TR_U32(u32Tr);
648 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
649 IEM_MC_ADVANCE_RIP();
650 IEM_MC_END();
651 break;
652
653 case IEMMODE_64BIT:
654 IEM_MC_BEGIN(0, 1);
655 IEM_MC_LOCAL(uint64_t, u64Tr);
656 IEM_MC_FETCH_TR_U64(u64Tr);
657 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
658 IEM_MC_ADVANCE_RIP();
659 IEM_MC_END();
660 break;
661
662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
663 }
664 }
665 else
666 {
667 IEM_MC_BEGIN(0, 2);
668 IEM_MC_LOCAL(uint16_t, u16Tr);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
671 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
672 IEM_MC_FETCH_TR_U16(u16Tr);
673 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 }
677 return VINF_SUCCESS;
678}
679
680
681/** Opcode 0x0f 0x00 /2. */
682FNIEMOPRM_DEF(iemOp_Grp6_lldt)
683{
684 IEMOP_MNEMONIC(lldt, "lldt Ew");
685 IEMOP_HLP_MIN_286();
686 IEMOP_HLP_NO_REAL_OR_V86_MODE();
687
688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
689 {
690 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
691 IEM_MC_BEGIN(1, 0);
692 IEM_MC_ARG(uint16_t, u16Sel, 0);
693 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
694 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
695 IEM_MC_END();
696 }
697 else
698 {
699 IEM_MC_BEGIN(1, 1);
700 IEM_MC_ARG(uint16_t, u16Sel, 0);
701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
703 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
704 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
705 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
706 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
707 IEM_MC_END();
708 }
709 return VINF_SUCCESS;
710}
711
712
713/** Opcode 0x0f 0x00 /3. */
714FNIEMOPRM_DEF(iemOp_Grp6_ltr)
715{
716 IEMOP_MNEMONIC(ltr, "ltr Ew");
717 IEMOP_HLP_MIN_286();
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
723 IEM_MC_BEGIN(1, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
726 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
727 IEM_MC_END();
728 }
729 else
730 {
731 IEM_MC_BEGIN(1, 1);
732 IEM_MC_ARG(uint16_t, u16Sel, 0);
733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
737 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
738 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
739 IEM_MC_END();
740 }
741 return VINF_SUCCESS;
742}
743
744
745/** Opcode 0x0f 0x00 /3. */
746FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
747{
748 IEMOP_HLP_MIN_286();
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750
751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
752 {
753 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
754 IEM_MC_BEGIN(2, 0);
755 IEM_MC_ARG(uint16_t, u16Sel, 0);
756 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
757 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
758 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
759 IEM_MC_END();
760 }
761 else
762 {
763 IEM_MC_BEGIN(2, 1);
764 IEM_MC_ARG(uint16_t, u16Sel, 0);
765 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
768 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
769 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
770 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
771 IEM_MC_END();
772 }
773 return VINF_SUCCESS;
774}
775
776
777/** Opcode 0x0f 0x00 /4. */
778FNIEMOPRM_DEF(iemOp_Grp6_verr)
779{
780 IEMOP_MNEMONIC(verr, "verr Ew");
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
783}
784
785
786/** Opcode 0x0f 0x00 /5. */
787FNIEMOPRM_DEF(iemOp_Grp6_verw)
788{
789 IEMOP_MNEMONIC(verw, "verw Ew");
790 IEMOP_HLP_MIN_286();
791 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
792}
793
794
795/**
796 * Group 6 jump table.
797 */
798IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
799{
800 iemOp_Grp6_sldt,
801 iemOp_Grp6_str,
802 iemOp_Grp6_lldt,
803 iemOp_Grp6_ltr,
804 iemOp_Grp6_verr,
805 iemOp_Grp6_verw,
806 iemOp_InvalidWithRM,
807 iemOp_InvalidWithRM
808};
809
810/** Opcode 0x0f 0x00. */
811FNIEMOP_DEF(iemOp_Grp6)
812{
813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
814 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
815}
816
817
818/** Opcode 0x0f 0x01 /0. */
819FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
820{
821 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
822 IEMOP_HLP_MIN_286();
823 IEMOP_HLP_64BIT_OP_SIZE();
824 IEM_MC_BEGIN(2, 1);
825 IEM_MC_ARG(uint8_t, iEffSeg, 0);
826 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
829 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
830 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
831 IEM_MC_END();
832 return VINF_SUCCESS;
833}
834
835
836/** Opcode 0x0f 0x01 /0. */
837FNIEMOP_DEF(iemOp_Grp7_vmcall)
838{
839 IEMOP_BITCH_ABOUT_STUB();
840 return IEMOP_RAISE_INVALID_OPCODE();
841}
842
843
844/** Opcode 0x0f 0x01 /0. */
845FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
846{
847 IEMOP_BITCH_ABOUT_STUB();
848 return IEMOP_RAISE_INVALID_OPCODE();
849}
850
851
852/** Opcode 0x0f 0x01 /0. */
853FNIEMOP_DEF(iemOp_Grp7_vmresume)
854{
855 IEMOP_BITCH_ABOUT_STUB();
856 return IEMOP_RAISE_INVALID_OPCODE();
857}
858
859
860/** Opcode 0x0f 0x01 /0. */
861FNIEMOP_DEF(iemOp_Grp7_vmxoff)
862{
863 IEMOP_BITCH_ABOUT_STUB();
864 return IEMOP_RAISE_INVALID_OPCODE();
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC(sidt, "sidt Ms");
872 IEMOP_HLP_MIN_286();
873 IEMOP_HLP_64BIT_OP_SIZE();
874 IEM_MC_BEGIN(2, 1);
875 IEM_MC_ARG(uint8_t, iEffSeg, 0);
876 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
880 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 /1. */
887FNIEMOP_DEF(iemOp_Grp7_monitor)
888{
889 IEMOP_MNEMONIC(monitor, "monitor");
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
891 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
892}
893
894
895/** Opcode 0x0f 0x01 /1. */
896FNIEMOP_DEF(iemOp_Grp7_mwait)
897{
898 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
901}
902
903
904/** Opcode 0x0f 0x01 /2. */
905FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
906{
907 IEMOP_MNEMONIC(lgdt, "lgdt");
908 IEMOP_HLP_64BIT_OP_SIZE();
909 IEM_MC_BEGIN(3, 1);
910 IEM_MC_ARG(uint8_t, iEffSeg, 0);
911 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
912 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
915 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
916 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
917 IEM_MC_END();
918 return VINF_SUCCESS;
919}
920
921
922/** Opcode 0x0f 0x01 0xd0. */
923FNIEMOP_DEF(iemOp_Grp7_xgetbv)
924{
925 IEMOP_MNEMONIC(xgetbv, "xgetbv");
926 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
927 {
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
929 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
930 }
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 0xd1. */
936FNIEMOP_DEF(iemOp_Grp7_xsetbv)
937{
938 IEMOP_MNEMONIC(xsetbv, "xsetbv");
939 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
940 {
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
942 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
943 }
944 return IEMOP_RAISE_INVALID_OPCODE();
945}
946
947
948/** Opcode 0x0f 0x01 /3. */
949FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
950{
951 IEMOP_MNEMONIC(lidt, "lidt");
952 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
953 ? IEMMODE_64BIT
954 : pVCpu->iem.s.enmEffOpSize;
955 IEM_MC_BEGIN(3, 1);
956 IEM_MC_ARG(uint8_t, iEffSeg, 0);
957 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
958 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
961 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
962 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x01 0xd8. */
969FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
970
971/** Opcode 0x0f 0x01 0xd9. */
972FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
973
974/** Opcode 0x0f 0x01 0xda. */
975FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
976
977/** Opcode 0x0f 0x01 0xdb. */
978FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
979
980/** Opcode 0x0f 0x01 0xdc. */
981FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
982
983/** Opcode 0x0f 0x01 0xdd. */
984FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
985
986/** Opcode 0x0f 0x01 0xde. */
987FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
988
989/** Opcode 0x0f 0x01 0xdf. */
990FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
991
992/** Opcode 0x0f 0x01 /4. */
993FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
994{
995 IEMOP_MNEMONIC(smsw, "smsw");
996 IEMOP_HLP_MIN_286();
997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
998 {
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 switch (pVCpu->iem.s.enmEffOpSize)
1001 {
1002 case IEMMODE_16BIT:
1003 IEM_MC_BEGIN(0, 1);
1004 IEM_MC_LOCAL(uint16_t, u16Tmp);
1005 IEM_MC_FETCH_CR0_U16(u16Tmp);
1006 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1007 { /* likely */ }
1008 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1009 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1010 else
1011 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1012 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1013 IEM_MC_ADVANCE_RIP();
1014 IEM_MC_END();
1015 return VINF_SUCCESS;
1016
1017 case IEMMODE_32BIT:
1018 IEM_MC_BEGIN(0, 1);
1019 IEM_MC_LOCAL(uint32_t, u32Tmp);
1020 IEM_MC_FETCH_CR0_U32(u32Tmp);
1021 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 return VINF_SUCCESS;
1025
1026 case IEMMODE_64BIT:
1027 IEM_MC_BEGIN(0, 1);
1028 IEM_MC_LOCAL(uint64_t, u64Tmp);
1029 IEM_MC_FETCH_CR0_U64(u64Tmp);
1030 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1031 IEM_MC_ADVANCE_RIP();
1032 IEM_MC_END();
1033 return VINF_SUCCESS;
1034
1035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1036 }
1037 }
1038 else
1039 {
1040 /* Ignore operand size here, memory refs are always 16-bit. */
1041 IEM_MC_BEGIN(0, 2);
1042 IEM_MC_LOCAL(uint16_t, u16Tmp);
1043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1046 IEM_MC_FETCH_CR0_U16(u16Tmp);
1047 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1048 { /* likely */ }
1049 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1050 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1051 else
1052 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1053 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1054 IEM_MC_ADVANCE_RIP();
1055 IEM_MC_END();
1056 return VINF_SUCCESS;
1057 }
1058}
1059
1060
1061/** Opcode 0x0f 0x01 /6. */
1062FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1063{
1064 /* The operand size is effectively ignored, all is 16-bit and only the
1065 lower 3-bits are used. */
1066 IEMOP_MNEMONIC(lmsw, "lmsw");
1067 IEMOP_HLP_MIN_286();
1068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1069 {
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_BEGIN(1, 0);
1072 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1073 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1074 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1075 IEM_MC_END();
1076 }
1077 else
1078 {
1079 IEM_MC_BEGIN(1, 1);
1080 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1084 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1085 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1086 IEM_MC_END();
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/** Opcode 0x0f 0x01 /7. */
1093FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1094{
1095 IEMOP_MNEMONIC(invlpg, "invlpg");
1096 IEMOP_HLP_MIN_486();
1097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1098 IEM_MC_BEGIN(1, 1);
1099 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1101 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1102 IEM_MC_END();
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/** Opcode 0x0f 0x01 /7. */
1108FNIEMOP_DEF(iemOp_Grp7_swapgs)
1109{
1110 IEMOP_MNEMONIC(swapgs, "swapgs");
1111 IEMOP_HLP_ONLY_64BIT();
1112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1113 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1114}
1115
1116
1117/** Opcode 0x0f 0x01 /7. */
1118FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1119{
1120 NOREF(pVCpu);
1121 IEMOP_BITCH_ABOUT_STUB();
1122 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1123}
1124
1125
1126/** Opcode 0x0f 0x01. */
1127FNIEMOP_DEF(iemOp_Grp7)
1128{
1129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1130 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1131 {
1132 case 0:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1138 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1139 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1140 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1141 }
1142 return IEMOP_RAISE_INVALID_OPCODE();
1143
1144 case 1:
1145 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1146 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1147 switch (bRm & X86_MODRM_RM_MASK)
1148 {
1149 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1150 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1151 }
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 2:
1155 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1156 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1157 switch (bRm & X86_MODRM_RM_MASK)
1158 {
1159 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1160 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1161 }
1162 return IEMOP_RAISE_INVALID_OPCODE();
1163
1164 case 3:
1165 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1166 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1167 switch (bRm & X86_MODRM_RM_MASK)
1168 {
1169 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1170 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1171 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1172 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1173 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1174 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1175 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1176 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1178 }
1179
1180 case 4:
1181 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1182
1183 case 5:
1184 return IEMOP_RAISE_INVALID_OPCODE();
1185
1186 case 6:
1187 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1188
1189 case 7:
1190 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1191 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1192 switch (bRm & X86_MODRM_RM_MASK)
1193 {
1194 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1195 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1196 }
1197 return IEMOP_RAISE_INVALID_OPCODE();
1198
1199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1200 }
1201}
1202
1203/** Opcode 0x0f 0x00 /3. */
1204FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1205{
1206 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1208
1209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1210 {
1211 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1212 switch (pVCpu->iem.s.enmEffOpSize)
1213 {
1214 case IEMMODE_16BIT:
1215 {
1216 IEM_MC_BEGIN(3, 0);
1217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1218 IEM_MC_ARG(uint16_t, u16Sel, 1);
1219 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1220
1221 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1222 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1223 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1224
1225 IEM_MC_END();
1226 return VINF_SUCCESS;
1227 }
1228
1229 case IEMMODE_32BIT:
1230 case IEMMODE_64BIT:
1231 {
1232 IEM_MC_BEGIN(3, 0);
1233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1234 IEM_MC_ARG(uint16_t, u16Sel, 1);
1235 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1236
1237 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1238 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1239 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1246 }
1247 }
1248 else
1249 {
1250 switch (pVCpu->iem.s.enmEffOpSize)
1251 {
1252 case IEMMODE_16BIT:
1253 {
1254 IEM_MC_BEGIN(3, 1);
1255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1256 IEM_MC_ARG(uint16_t, u16Sel, 1);
1257 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259
1260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1261 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1262
1263 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1264 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1265 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1266
1267 IEM_MC_END();
1268 return VINF_SUCCESS;
1269 }
1270
1271 case IEMMODE_32BIT:
1272 case IEMMODE_64BIT:
1273 {
1274 IEM_MC_BEGIN(3, 1);
1275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1276 IEM_MC_ARG(uint16_t, u16Sel, 1);
1277 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279
1280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1281 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1282/** @todo testcase: make sure it's a 16-bit read. */
1283
1284 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1286 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1287
1288 IEM_MC_END();
1289 return VINF_SUCCESS;
1290 }
1291
1292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1293 }
1294 }
1295}
1296
1297
1298
1299/** Opcode 0x0f 0x02. */
1300FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1301{
1302 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1303 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1304}
1305
1306
1307/** Opcode 0x0f 0x03. */
1308FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1309{
1310 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1311 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1312}
1313
1314
1315/** Opcode 0x0f 0x05. */
1316FNIEMOP_DEF(iemOp_syscall)
1317{
1318 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1321}
1322
1323
1324/** Opcode 0x0f 0x06. */
1325FNIEMOP_DEF(iemOp_clts)
1326{
1327 IEMOP_MNEMONIC(clts, "clts");
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1330}
1331
1332
1333/** Opcode 0x0f 0x07. */
1334FNIEMOP_DEF(iemOp_sysret)
1335{
1336 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1339}
1340
1341
1342/** Opcode 0x0f 0x08. */
1343FNIEMOP_STUB(iemOp_invd);
1344// IEMOP_HLP_MIN_486();
1345
1346
1347/** Opcode 0x0f 0x09. */
1348FNIEMOP_DEF(iemOp_wbinvd)
1349{
1350 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1351 IEMOP_HLP_MIN_486();
1352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1353 IEM_MC_BEGIN(0, 0);
1354 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1355 IEM_MC_ADVANCE_RIP();
1356 IEM_MC_END();
1357 return VINF_SUCCESS; /* ignore for now */
1358}
1359
1360
1361/** Opcode 0x0f 0x0b. */
1362FNIEMOP_DEF(iemOp_ud2)
1363{
1364 IEMOP_MNEMONIC(ud2, "ud2");
1365 return IEMOP_RAISE_INVALID_OPCODE();
1366}
1367
1368/** Opcode 0x0f 0x0d. */
1369FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1370{
1371 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1372 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1373 {
1374 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1375 return IEMOP_RAISE_INVALID_OPCODE();
1376 }
1377
1378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1380 {
1381 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383 }
1384
1385 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1386 {
1387 case 2: /* Aliased to /0 for the time being. */
1388 case 4: /* Aliased to /0 for the time being. */
1389 case 5: /* Aliased to /0 for the time being. */
1390 case 6: /* Aliased to /0 for the time being. */
1391 case 7: /* Aliased to /0 for the time being. */
1392 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1393 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1394 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1396 }
1397
1398 IEM_MC_BEGIN(0, 1);
1399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1402 /* Currently a NOP. */
1403 NOREF(GCPtrEffSrc);
1404 IEM_MC_ADVANCE_RIP();
1405 IEM_MC_END();
1406 return VINF_SUCCESS;
1407}
1408
1409
1410/** Opcode 0x0f 0x0e. */
1411FNIEMOP_STUB(iemOp_femms);
1412
1413
1414/** Opcode 0x0f 0x0f 0x0c. */
1415FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1416
1417/** Opcode 0x0f 0x0f 0x0d. */
1418FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1419
1420/** Opcode 0x0f 0x0f 0x1c. */
1421FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1422
1423/** Opcode 0x0f 0x0f 0x1d. */
1424FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1425
1426/** Opcode 0x0f 0x0f 0x8a. */
1427FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1428
1429/** Opcode 0x0f 0x0f 0x8e. */
1430FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1431
1432/** Opcode 0x0f 0x0f 0x90. */
1433FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1434
1435/** Opcode 0x0f 0x0f 0x94. */
1436FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1437
1438/** Opcode 0x0f 0x0f 0x96. */
1439FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1440
1441/** Opcode 0x0f 0x0f 0x97. */
1442FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1443
1444/** Opcode 0x0f 0x0f 0x9a. */
1445FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1446
1447/** Opcode 0x0f 0x0f 0x9e. */
1448FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1449
1450/** Opcode 0x0f 0x0f 0xa0. */
1451FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1452
1453/** Opcode 0x0f 0x0f 0xa4. */
1454FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1455
1456/** Opcode 0x0f 0x0f 0xa6. */
1457FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1458
1459/** Opcode 0x0f 0x0f 0xa7. */
1460FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1461
1462/** Opcode 0x0f 0x0f 0xaa. */
1463FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1464
1465/** Opcode 0x0f 0x0f 0xae. */
1466FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1467
1468/** Opcode 0x0f 0x0f 0xb0. */
1469FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1470
1471/** Opcode 0x0f 0x0f 0xb4. */
1472FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1473
1474/** Opcode 0x0f 0x0f 0xb6. */
1475FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1476
1477/** Opcode 0x0f 0x0f 0xb7. */
1478FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1479
1480/** Opcode 0x0f 0x0f 0xbb. */
1481FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1482
1483/** Opcode 0x0f 0x0f 0xbf. */
1484FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1485
1486
1487/** Opcode 0x0f 0x0f. */
1488FNIEMOP_DEF(iemOp_3Dnow)
1489{
1490 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1491 {
1492 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1493 return IEMOP_RAISE_INVALID_OPCODE();
1494 }
1495
1496 /* This is pretty sparse, use switch instead of table. */
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 switch (b)
1499 {
1500 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1501 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1502 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1503 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1504 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1505 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1506 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1507 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1508 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1509 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1510 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1511 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1512 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1513 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1514 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1515 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1516 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1517 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1518 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1519 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1520 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1521 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1522 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1523 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1524 default:
1525 return IEMOP_RAISE_INVALID_OPCODE();
1526 }
1527}
1528
1529
1530/** Opcode 0x0f 0x10. */
1531FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1532
1533
1534/** Opcode 0x0f 0x11. */
1535FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1536{
1537 /* Quick hack. Need to restructure all of this later some time. */
1538 uint32_t const fRelevantPrefix = pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ);
1539 if (fRelevantPrefix == 0)
1540 {
1541 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1544 {
1545 /*
1546 * Register, register.
1547 */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_BEGIN(0, 0);
1550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1552 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1553 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 }
1557 else
1558 {
1559 /*
1560 * Memory, register.
1561 */
1562 IEM_MC_BEGIN(0, 2);
1563 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1565
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1568 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1570
1571 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1572 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1573
1574 IEM_MC_ADVANCE_RIP();
1575 IEM_MC_END();
1576 }
1577 }
1578 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ)
1579 {
1580 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1583 {
1584 /*
1585 * Register, register.
1586 */
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1588 IEM_MC_BEGIN(0, 1);
1589 IEM_MC_LOCAL(uint64_t, uSrc);
1590
1591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1593 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1594 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1595
1596 IEM_MC_ADVANCE_RIP();
1597 IEM_MC_END();
1598 }
1599 else
1600 {
1601 /*
1602 * Memory, register.
1603 */
1604 IEM_MC_BEGIN(0, 2);
1605 IEM_MC_LOCAL(uint64_t, uSrc);
1606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1607
1608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1610 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1611 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1612
1613 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 }
1620 else
1621 {
1622 IEMOP_BITCH_ABOUT_STUB();
1623 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1624 }
1625 return VINF_SUCCESS;
1626}
1627
1628
1629/** Opcode 0x0f 0x12. */
1630FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1631
1632
1633/** Opcode 0x0f 0x13. */
1634FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1635{
1636 /* Quick hack. Need to restructure all of this later some time. */
1637 if (pVCpu->iem.s.fPrefixes == IEM_OP_PRF_SIZE_OP)
1638 {
1639 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1642 {
1643#if 0
1644 /*
1645 * Register, register.
1646 */
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1648 IEM_MC_BEGIN(0, 1);
1649 IEM_MC_LOCAL(uint64_t, uSrc);
1650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1652 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1653 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1654 IEM_MC_ADVANCE_RIP();
1655 IEM_MC_END();
1656#else
1657 return IEMOP_RAISE_INVALID_OPCODE();
1658#endif
1659 }
1660 else
1661 {
1662 /*
1663 * Memory, register.
1664 */
1665 IEM_MC_BEGIN(0, 2);
1666 IEM_MC_LOCAL(uint64_t, uSrc);
1667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1668
1669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1670 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1671 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1673
1674 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1675 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1676
1677 IEM_MC_ADVANCE_RIP();
1678 IEM_MC_END();
1679 }
1680 return VINF_SUCCESS;
1681 }
1682
1683 IEMOP_BITCH_ABOUT_STUB();
1684 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1685}
1686
1687
1688/** Opcode 0x0f 0x14. */
1689FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1690/** Opcode 0x0f 0x15. */
1691FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1692/** Opcode 0x0f 0x16. */
1693FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1694/** Opcode 0x0f 0x17. */
1695FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1696
1697
1698/** Opcode 0x0f 0x18. */
1699FNIEMOP_DEF(iemOp_prefetch_Grp16)
1700{
1701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1702 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1703 {
1704 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1705 {
1706 case 4: /* Aliased to /0 for the time being according to AMD. */
1707 case 5: /* Aliased to /0 for the time being according to AMD. */
1708 case 6: /* Aliased to /0 for the time being according to AMD. */
1709 case 7: /* Aliased to /0 for the time being according to AMD. */
1710 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1711 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1712 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1713 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1715 }
1716
1717 IEM_MC_BEGIN(0, 1);
1718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1721 /* Currently a NOP. */
1722 NOREF(GCPtrEffSrc);
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 return VINF_SUCCESS;
1726 }
1727
1728 return IEMOP_RAISE_INVALID_OPCODE();
1729}
1730
1731
1732/** Opcode 0x0f 0x19..0x1f. */
1733FNIEMOP_DEF(iemOp_nop_Ev)
1734{
1735 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1738 {
1739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740 IEM_MC_BEGIN(0, 0);
1741 IEM_MC_ADVANCE_RIP();
1742 IEM_MC_END();
1743 }
1744 else
1745 {
1746 IEM_MC_BEGIN(0, 1);
1747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 /* Currently a NOP. */
1751 NOREF(GCPtrEffSrc);
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756}
1757
1758
1759/** Opcode 0x0f 0x20. */
1760FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1761{
1762 /* mod is ignored, as is operand size overrides. */
1763 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1764 IEMOP_HLP_MIN_386();
1765 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1766 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1767 else
1768 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1769
1770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1771 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1773 {
1774 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1776 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1777 iCrReg |= 8;
1778 }
1779 switch (iCrReg)
1780 {
1781 case 0: case 2: case 3: case 4: case 8:
1782 break;
1783 default:
1784 return IEMOP_RAISE_INVALID_OPCODE();
1785 }
1786 IEMOP_HLP_DONE_DECODING();
1787
1788 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1789}
1790
1791
1792/** Opcode 0x0f 0x21. */
1793FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1794{
1795 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1796 IEMOP_HLP_MIN_386();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1800 return IEMOP_RAISE_INVALID_OPCODE();
1801 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1802 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1803 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1804}
1805
1806
1807/** Opcode 0x0f 0x22. */
1808FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1809{
1810 /* mod is ignored, as is operand size overrides. */
1811 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1812 IEMOP_HLP_MIN_386();
1813 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1814 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1815 else
1816 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1817
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1821 {
1822 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1823 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1824 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1825 iCrReg |= 8;
1826 }
1827 switch (iCrReg)
1828 {
1829 case 0: case 2: case 3: case 4: case 8:
1830 break;
1831 default:
1832 return IEMOP_RAISE_INVALID_OPCODE();
1833 }
1834 IEMOP_HLP_DONE_DECODING();
1835
1836 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1837}
1838
1839
1840/** Opcode 0x0f 0x23. */
1841FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1842{
1843 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1844 IEMOP_HLP_MIN_386();
1845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1848 return IEMOP_RAISE_INVALID_OPCODE();
1849 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1850 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1851 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1852}
1853
1854
1855/** Opcode 0x0f 0x24. */
1856FNIEMOP_DEF(iemOp_mov_Rd_Td)
1857{
1858 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1859 /** @todo works on 386 and 486. */
1860 /* The RM byte is not considered, see testcase. */
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862}
1863
1864
1865/** Opcode 0x0f 0x26. */
1866FNIEMOP_DEF(iemOp_mov_Td_Rd)
1867{
1868 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1869 /** @todo works on 386 and 486. */
1870 /* The RM byte is not considered, see testcase. */
1871 return IEMOP_RAISE_INVALID_OPCODE();
1872}
1873
1874
1875/** Opcode 0x0f 0x28. */
1876FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1877{
1878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1879 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1880 else
1881 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1884 {
1885 /*
1886 * Register, register.
1887 */
1888 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1889 IEM_MC_BEGIN(0, 0);
1890 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1892 else
1893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1895 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1896 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1897 IEM_MC_ADVANCE_RIP();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 /*
1903 * Register, memory.
1904 */
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1911 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1912 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1913 else
1914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1916
1917 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1918 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1919
1920 IEM_MC_ADVANCE_RIP();
1921 IEM_MC_END();
1922 }
1923 return VINF_SUCCESS;
1924}
1925
1926
1927/** Opcode 0x0f 0x29. */
1928FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1929{
1930 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1931 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1932 else
1933 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1936 {
1937 /*
1938 * Register, register.
1939 */
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1941 IEM_MC_BEGIN(0, 0);
1942 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1944 else
1945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1946 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1947 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1948 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1949 IEM_MC_ADVANCE_RIP();
1950 IEM_MC_END();
1951 }
1952 else
1953 {
1954 /*
1955 * Memory, register.
1956 */
1957 IEM_MC_BEGIN(0, 2);
1958 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1960
1961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1964 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1965 else
1966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1968
1969 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1970 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1971
1972 IEM_MC_ADVANCE_RIP();
1973 IEM_MC_END();
1974 }
1975 return VINF_SUCCESS;
1976}
1977
1978
1979/** Opcode 0x0f 0x2a. */
1980FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1981
1982
1983/** Opcode 0x0f 0x2b. */
1984FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1985{
1986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1987 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1988 else
1989 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1991 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1992 {
1993 /*
1994 * memory, register.
1995 */
1996 IEM_MC_BEGIN(0, 2);
1997 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1999
2000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2002 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2003 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2004 else
2005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2007
2008 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 /* The register, register encoding is invalid. */
2015 else
2016 return IEMOP_RAISE_INVALID_OPCODE();
2017 return VINF_SUCCESS;
2018}
2019
2020
2021/** Opcode 0x0f 0x2c. */
2022FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
2023/** Opcode 0x0f 0x2d. */
2024FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
2025/** Opcode 0x0f 0x2e. */
2026FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
2027/** Opcode 0x0f 0x2f. */
2028FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
2029
2030
2031/** Opcode 0x0f 0x30. */
2032FNIEMOP_DEF(iemOp_wrmsr)
2033{
2034 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2037}
2038
2039
2040/** Opcode 0x0f 0x31. */
2041FNIEMOP_DEF(iemOp_rdtsc)
2042{
2043 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2046}
2047
2048
2049/** Opcode 0x0f 0x33. */
2050FNIEMOP_DEF(iemOp_rdmsr)
2051{
2052 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2055}
2056
2057
2058/** Opcode 0x0f 0x34. */
2059FNIEMOP_STUB(iemOp_rdpmc);
2060/** Opcode 0x0f 0x34. */
2061FNIEMOP_STUB(iemOp_sysenter);
2062/** Opcode 0x0f 0x35. */
2063FNIEMOP_STUB(iemOp_sysexit);
2064/** Opcode 0x0f 0x37. */
2065FNIEMOP_STUB(iemOp_getsec);
2066/** Opcode 0x0f 0x38. */
2067FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2068/** Opcode 0x0f 0x3a. */
2069FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2070
2071
2072/**
2073 * Implements a conditional move.
2074 *
2075 * Wish there was an obvious way to do this where we could share and reduce
2076 * code bloat.
2077 *
2078 * @param a_Cnd The conditional "microcode" operation.
2079 */
2080#define CMOV_X(a_Cnd) \
2081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2083 { \
2084 switch (pVCpu->iem.s.enmEffOpSize) \
2085 { \
2086 case IEMMODE_16BIT: \
2087 IEM_MC_BEGIN(0, 1); \
2088 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2089 a_Cnd { \
2090 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2091 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2092 } IEM_MC_ENDIF(); \
2093 IEM_MC_ADVANCE_RIP(); \
2094 IEM_MC_END(); \
2095 return VINF_SUCCESS; \
2096 \
2097 case IEMMODE_32BIT: \
2098 IEM_MC_BEGIN(0, 1); \
2099 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2100 a_Cnd { \
2101 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2102 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2103 } IEM_MC_ELSE() { \
2104 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2105 } IEM_MC_ENDIF(); \
2106 IEM_MC_ADVANCE_RIP(); \
2107 IEM_MC_END(); \
2108 return VINF_SUCCESS; \
2109 \
2110 case IEMMODE_64BIT: \
2111 IEM_MC_BEGIN(0, 1); \
2112 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2113 a_Cnd { \
2114 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2115 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2116 } IEM_MC_ENDIF(); \
2117 IEM_MC_ADVANCE_RIP(); \
2118 IEM_MC_END(); \
2119 return VINF_SUCCESS; \
2120 \
2121 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2122 } \
2123 } \
2124 else \
2125 { \
2126 switch (pVCpu->iem.s.enmEffOpSize) \
2127 { \
2128 case IEMMODE_16BIT: \
2129 IEM_MC_BEGIN(0, 2); \
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2131 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2133 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2134 a_Cnd { \
2135 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2136 } IEM_MC_ENDIF(); \
2137 IEM_MC_ADVANCE_RIP(); \
2138 IEM_MC_END(); \
2139 return VINF_SUCCESS; \
2140 \
2141 case IEMMODE_32BIT: \
2142 IEM_MC_BEGIN(0, 2); \
2143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2144 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2146 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2147 a_Cnd { \
2148 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2149 } IEM_MC_ELSE() { \
2150 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2151 } IEM_MC_ENDIF(); \
2152 IEM_MC_ADVANCE_RIP(); \
2153 IEM_MC_END(); \
2154 return VINF_SUCCESS; \
2155 \
2156 case IEMMODE_64BIT: \
2157 IEM_MC_BEGIN(0, 2); \
2158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2159 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2161 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2162 a_Cnd { \
2163 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2164 } IEM_MC_ENDIF(); \
2165 IEM_MC_ADVANCE_RIP(); \
2166 IEM_MC_END(); \
2167 return VINF_SUCCESS; \
2168 \
2169 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2170 } \
2171 } do {} while (0)
2172
2173
2174
2175/** Opcode 0x0f 0x40. */
2176FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2177{
2178 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2179 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2180}
2181
2182
2183/** Opcode 0x0f 0x41. */
2184FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2185{
2186 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2187 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2188}
2189
2190
2191/** Opcode 0x0f 0x42. */
2192FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2193{
2194 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2195 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2196}
2197
2198
2199/** Opcode 0x0f 0x43. */
2200FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2201{
2202 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2203 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2204}
2205
2206
2207/** Opcode 0x0f 0x44. */
2208FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2209{
2210 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2211 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2212}
2213
2214
2215/** Opcode 0x0f 0x45. */
2216FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2217{
2218 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2219 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2220}
2221
2222
2223/** Opcode 0x0f 0x46. */
2224FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2225{
2226 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2227 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2228}
2229
2230
2231/** Opcode 0x0f 0x47. */
2232FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2233{
2234 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2235 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2236}
2237
2238
2239/** Opcode 0x0f 0x48. */
2240FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2241{
2242 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2243 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2244}
2245
2246
2247/** Opcode 0x0f 0x49. */
2248FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2249{
2250 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2251 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2252}
2253
2254
2255/** Opcode 0x0f 0x4a. */
2256FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2257{
2258 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2259 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2260}
2261
2262
2263/** Opcode 0x0f 0x4b. */
2264FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2265{
2266 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2267 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2268}
2269
2270
2271/** Opcode 0x0f 0x4c. */
2272FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2273{
2274 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2275 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2276}
2277
2278
2279/** Opcode 0x0f 0x4d. */
2280FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2281{
2282 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2283 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2284}
2285
2286
2287/** Opcode 0x0f 0x4e. */
2288FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2289{
2290 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2291 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2292}
2293
2294
2295/** Opcode 0x0f 0x4f. */
2296FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2297{
2298 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2299 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2300}
2301
2302#undef CMOV_X
2303
2304/** Opcode 0x0f 0x50. */
2305FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2306/** Opcode 0x0f 0x51. */
2307FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2308/** Opcode 0x0f 0x52. */
2309FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2310/** Opcode 0x0f 0x53. */
2311FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2312/** Opcode 0x0f 0x54. */
2313FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2314/** Opcode 0x0f 0x55. */
2315FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2316/** Opcode 0x0f 0x56. */
2317FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2318/** Opcode 0x0f 0x57. */
2319FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2320/** Opcode 0x0f 0x58. */
2321FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2322/** Opcode 0x0f 0x59. */
2323FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2324/** Opcode 0x0f 0x5a. */
2325FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2326/** Opcode 0x0f 0x5b. */
2327FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2328/** Opcode 0x0f 0x5c. */
2329FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2330/** Opcode 0x0f 0x5d. */
2331FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2332/** Opcode 0x0f 0x5e. */
2333FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2334/** Opcode 0x0f 0x5f. */
2335FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2336
2337
2338/**
2339 * Common worker for SSE2 and MMX instructions on the forms:
2340 * pxxxx xmm1, xmm2/mem128
2341 * pxxxx mm1, mm2/mem32
2342 *
2343 * The 2nd operand is the first half of a register, which in the memory case
2344 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2345 * memory accessed for MMX.
2346 *
2347 * Exceptions type 4.
2348 */
2349FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2350{
2351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2352 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2353 {
2354 case IEM_OP_PRF_SIZE_OP: /* SSE */
2355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2356 {
2357 /*
2358 * Register, register.
2359 */
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_BEGIN(2, 0);
2362 IEM_MC_ARG(uint128_t *, pDst, 0);
2363 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_PREPARE_SSE_USAGE();
2366 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2367 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2368 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 else
2373 {
2374 /*
2375 * Register, memory.
2376 */
2377 IEM_MC_BEGIN(2, 2);
2378 IEM_MC_ARG(uint128_t *, pDst, 0);
2379 IEM_MC_LOCAL(uint64_t, uSrc);
2380 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2386 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2387
2388 IEM_MC_PREPARE_SSE_USAGE();
2389 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2390 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2391
2392 IEM_MC_ADVANCE_RIP();
2393 IEM_MC_END();
2394 }
2395 return VINF_SUCCESS;
2396
2397 case 0: /* MMX */
2398 if (!pImpl->pfnU64)
2399 return IEMOP_RAISE_INVALID_OPCODE();
2400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2401 {
2402 /*
2403 * Register, register.
2404 */
2405 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2406 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2408 IEM_MC_BEGIN(2, 0);
2409 IEM_MC_ARG(uint64_t *, pDst, 0);
2410 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2411 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2412 IEM_MC_PREPARE_FPU_USAGE();
2413 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2414 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2415 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2416 IEM_MC_ADVANCE_RIP();
2417 IEM_MC_END();
2418 }
2419 else
2420 {
2421 /*
2422 * Register, memory.
2423 */
2424 IEM_MC_BEGIN(2, 2);
2425 IEM_MC_ARG(uint64_t *, pDst, 0);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2429
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2432 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2433 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2434
2435 IEM_MC_PREPARE_FPU_USAGE();
2436 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2437 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2438
2439 IEM_MC_ADVANCE_RIP();
2440 IEM_MC_END();
2441 }
2442 return VINF_SUCCESS;
2443
2444 default:
2445 return IEMOP_RAISE_INVALID_OPCODE();
2446 }
2447}
2448
2449
2450/** Opcode 0x0f 0x60. */
2451FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2452{
2453 IEMOP_MNEMONIC(punpcklbw, "punpcklbw");
2454 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2455}
2456
2457
2458/** Opcode 0x0f 0x61. */
2459FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2460{
2461 IEMOP_MNEMONIC(punpcklwd, "punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2462 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2463}
2464
2465
2466/** Opcode 0x0f 0x62. */
2467FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2468{
2469 IEMOP_MNEMONIC(punpckldq, "punpckldq");
2470 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2471}
2472
2473
2474/** Opcode 0x0f 0x63. */
2475FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2476/** Opcode 0x0f 0x64. */
2477FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2478/** Opcode 0x0f 0x65. */
2479FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2480/** Opcode 0x0f 0x66. */
2481FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2482/** Opcode 0x0f 0x67. */
2483FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2484
2485
2486/**
2487 * Common worker for SSE2 and MMX instructions on the forms:
2488 * pxxxx xmm1, xmm2/mem128
2489 * pxxxx mm1, mm2/mem64
2490 *
2491 * The 2nd operand is the second half of a register, which in the memory case
2492 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2493 * where it may read the full 128 bits or only the upper 64 bits.
2494 *
2495 * Exceptions type 4.
2496 */
2497FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2498{
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2501 {
2502 case IEM_OP_PRF_SIZE_OP: /* SSE */
2503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2504 {
2505 /*
2506 * Register, register.
2507 */
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_BEGIN(2, 0);
2510 IEM_MC_ARG(uint128_t *, pDst, 0);
2511 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2512 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2513 IEM_MC_PREPARE_SSE_USAGE();
2514 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2515 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2516 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2517 IEM_MC_ADVANCE_RIP();
2518 IEM_MC_END();
2519 }
2520 else
2521 {
2522 /*
2523 * Register, memory.
2524 */
2525 IEM_MC_BEGIN(2, 2);
2526 IEM_MC_ARG(uint128_t *, pDst, 0);
2527 IEM_MC_LOCAL(uint128_t, uSrc);
2528 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2535
2536 IEM_MC_PREPARE_SSE_USAGE();
2537 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2538 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2539
2540 IEM_MC_ADVANCE_RIP();
2541 IEM_MC_END();
2542 }
2543 return VINF_SUCCESS;
2544
2545 case 0: /* MMX */
2546 if (!pImpl->pfnU64)
2547 return IEMOP_RAISE_INVALID_OPCODE();
2548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2549 {
2550 /*
2551 * Register, register.
2552 */
2553 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2554 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2556 IEM_MC_BEGIN(2, 0);
2557 IEM_MC_ARG(uint64_t *, pDst, 0);
2558 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2560 IEM_MC_PREPARE_FPU_USAGE();
2561 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2562 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2563 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2564 IEM_MC_ADVANCE_RIP();
2565 IEM_MC_END();
2566 }
2567 else
2568 {
2569 /*
2570 * Register, memory.
2571 */
2572 IEM_MC_BEGIN(2, 2);
2573 IEM_MC_ARG(uint64_t *, pDst, 0);
2574 IEM_MC_LOCAL(uint64_t, uSrc);
2575 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2577
2578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2582
2583 IEM_MC_PREPARE_FPU_USAGE();
2584 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2585 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2586
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 return VINF_SUCCESS;
2591
2592 default:
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594 }
2595}
2596
2597
2598/** Opcode 0x0f 0x68. */
2599FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2600{
2601 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2602 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2603}
2604
2605
2606/** Opcode 0x0f 0x69. */
2607FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2608{
2609 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2610 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2611}
2612
2613
2614/** Opcode 0x0f 0x6a. */
2615FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2616{
2617 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2618 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2619}
2620
2621/** Opcode 0x0f 0x6b. */
2622FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2623
2624
2625/** Opcode 0x0f 0x6c. */
2626FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2627{
2628 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq");
2629 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2630}
2631
2632
2633/** Opcode 0x0f 0x6d. */
2634FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2635{
2636 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2637 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2638}
2639
2640
2641/** Opcode 0x0f 0x6e. */
2642FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2646 {
2647 case IEM_OP_PRF_SIZE_OP: /* SSE */
2648 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2649 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2650 else
2651 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2653 {
2654 /* XMM, greg*/
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_BEGIN(0, 1);
2657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2660 {
2661 IEM_MC_LOCAL(uint64_t, u64Tmp);
2662 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2663 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2664 }
2665 else
2666 {
2667 IEM_MC_LOCAL(uint32_t, u32Tmp);
2668 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2669 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2670 }
2671 IEM_MC_ADVANCE_RIP();
2672 IEM_MC_END();
2673 }
2674 else
2675 {
2676 /* XMM, [mem] */
2677 IEM_MC_BEGIN(0, 2);
2678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2679 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2683 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2684 {
2685 IEM_MC_LOCAL(uint64_t, u64Tmp);
2686 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2687 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2688 }
2689 else
2690 {
2691 IEM_MC_LOCAL(uint32_t, u32Tmp);
2692 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2693 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2694 }
2695 IEM_MC_ADVANCE_RIP();
2696 IEM_MC_END();
2697 }
2698 return VINF_SUCCESS;
2699
2700 case 0: /* MMX */
2701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2702 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2703 else
2704 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2706 {
2707 /* MMX, greg */
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEM_MC_BEGIN(0, 1);
2710 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2712 IEM_MC_LOCAL(uint64_t, u64Tmp);
2713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2714 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2715 else
2716 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2717 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2718 IEM_MC_ADVANCE_RIP();
2719 IEM_MC_END();
2720 }
2721 else
2722 {
2723 /* MMX, [mem] */
2724 IEM_MC_BEGIN(0, 2);
2725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2726 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2730 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2731 {
2732 IEM_MC_LOCAL(uint64_t, u64Tmp);
2733 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2734 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2735 }
2736 else
2737 {
2738 IEM_MC_LOCAL(uint32_t, u32Tmp);
2739 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2740 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2741 }
2742 IEM_MC_ADVANCE_RIP();
2743 IEM_MC_END();
2744 }
2745 return VINF_SUCCESS;
2746
2747 default:
2748 return IEMOP_RAISE_INVALID_OPCODE();
2749 }
2750}
2751
2752
2753/** Opcode 0x0f 0x6f. */
2754FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2755{
2756 bool fAligned = false;
2757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2758 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2759 {
2760 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2761 fAligned = true;
2762 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2763 if (fAligned)
2764 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2765 else
2766 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2768 {
2769 /*
2770 * Register, register.
2771 */
2772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2773 IEM_MC_BEGIN(0, 0);
2774 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2775 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2776 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2777 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 else
2782 {
2783 /*
2784 * Register, memory.
2785 */
2786 IEM_MC_BEGIN(0, 2);
2787 IEM_MC_LOCAL(uint128_t, u128Tmp);
2788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2789
2790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2792 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 if (fAligned)
2795 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2796 else
2797 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2798 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2799
2800 IEM_MC_ADVANCE_RIP();
2801 IEM_MC_END();
2802 }
2803 return VINF_SUCCESS;
2804
2805 case 0: /* MMX */
2806 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2808 {
2809 /*
2810 * Register, register.
2811 */
2812 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2813 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEM_MC_BEGIN(0, 1);
2816 IEM_MC_LOCAL(uint64_t, u64Tmp);
2817 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2819 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2820 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2821 IEM_MC_ADVANCE_RIP();
2822 IEM_MC_END();
2823 }
2824 else
2825 {
2826 /*
2827 * Register, memory.
2828 */
2829 IEM_MC_BEGIN(0, 2);
2830 IEM_MC_LOCAL(uint64_t, u64Tmp);
2831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2832
2833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2835 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2836 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2837 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2838 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2839
2840 IEM_MC_ADVANCE_RIP();
2841 IEM_MC_END();
2842 }
2843 return VINF_SUCCESS;
2844
2845 default:
2846 return IEMOP_RAISE_INVALID_OPCODE();
2847 }
2848}
2849
2850
2851/** Opcode 0x0f 0x70. The immediate here is evil! */
2852FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2853{
2854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2855 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2856 {
2857 case IEM_OP_PRF_SIZE_OP: /* SSE */
2858 case IEM_OP_PRF_REPNZ: /* SSE */
2859 case IEM_OP_PRF_REPZ: /* SSE */
2860 {
2861 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2862 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2863 {
2864 case IEM_OP_PRF_SIZE_OP:
2865 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
2866 pfnAImpl = iemAImpl_pshufd;
2867 break;
2868 case IEM_OP_PRF_REPNZ:
2869 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
2870 pfnAImpl = iemAImpl_pshuflw;
2871 break;
2872 case IEM_OP_PRF_REPZ:
2873 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
2874 pfnAImpl = iemAImpl_pshufhw;
2875 break;
2876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2877 }
2878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2879 {
2880 /*
2881 * Register, register.
2882 */
2883 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2885
2886 IEM_MC_BEGIN(3, 0);
2887 IEM_MC_ARG(uint128_t *, pDst, 0);
2888 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2889 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2891 IEM_MC_PREPARE_SSE_USAGE();
2892 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2893 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2894 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2895 IEM_MC_ADVANCE_RIP();
2896 IEM_MC_END();
2897 }
2898 else
2899 {
2900 /*
2901 * Register, memory.
2902 */
2903 IEM_MC_BEGIN(3, 2);
2904 IEM_MC_ARG(uint128_t *, pDst, 0);
2905 IEM_MC_LOCAL(uint128_t, uSrc);
2906 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2908
2909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2910 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2911 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2914
2915 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2916 IEM_MC_PREPARE_SSE_USAGE();
2917 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2918 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2919
2920 IEM_MC_ADVANCE_RIP();
2921 IEM_MC_END();
2922 }
2923 return VINF_SUCCESS;
2924 }
2925
2926 case 0: /* MMX Extension */
2927 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2929 {
2930 /*
2931 * Register, register.
2932 */
2933 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935
2936 IEM_MC_BEGIN(3, 0);
2937 IEM_MC_ARG(uint64_t *, pDst, 0);
2938 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2939 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2940 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2941 IEM_MC_PREPARE_FPU_USAGE();
2942 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2943 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2944 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2945 IEM_MC_ADVANCE_RIP();
2946 IEM_MC_END();
2947 }
2948 else
2949 {
2950 /*
2951 * Register, memory.
2952 */
2953 IEM_MC_BEGIN(3, 2);
2954 IEM_MC_ARG(uint64_t *, pDst, 0);
2955 IEM_MC_LOCAL(uint64_t, uSrc);
2956 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2958
2959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2960 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2961 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2964
2965 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2966 IEM_MC_PREPARE_FPU_USAGE();
2967 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2968 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2969
2970 IEM_MC_ADVANCE_RIP();
2971 IEM_MC_END();
2972 }
2973 return VINF_SUCCESS;
2974
2975 default:
2976 return IEMOP_RAISE_INVALID_OPCODE();
2977 }
2978}
2979
2980
2981/** Opcode 0x0f 0x71 11/2. */
2982FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2983
2984/** Opcode 0x66 0x0f 0x71 11/2. */
2985FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2986
2987/** Opcode 0x0f 0x71 11/4. */
2988FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2989
2990/** Opcode 0x66 0x0f 0x71 11/4. */
2991FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2992
2993/** Opcode 0x0f 0x71 11/6. */
2994FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2995
2996/** Opcode 0x66 0x0f 0x71 11/6. */
2997FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2998
2999
3000/** Opcode 0x0f 0x71. */
3001FNIEMOP_DEF(iemOp_Grp12)
3002{
3003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3004 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3005 return IEMOP_RAISE_INVALID_OPCODE();
3006 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3007 {
3008 case 0: case 1: case 3: case 5: case 7:
3009 return IEMOP_RAISE_INVALID_OPCODE();
3010 case 2:
3011 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3012 {
3013 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3014 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3015 default: return IEMOP_RAISE_INVALID_OPCODE();
3016 }
3017 case 4:
3018 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3019 {
3020 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3021 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3022 default: return IEMOP_RAISE_INVALID_OPCODE();
3023 }
3024 case 6:
3025 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3026 {
3027 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3028 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3029 default: return IEMOP_RAISE_INVALID_OPCODE();
3030 }
3031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3032 }
3033}
3034
3035
3036/** Opcode 0x0f 0x72 11/2. */
3037FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3038
3039/** Opcode 0x66 0x0f 0x72 11/2. */
3040FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3041
3042/** Opcode 0x0f 0x72 11/4. */
3043FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3044
3045/** Opcode 0x66 0x0f 0x72 11/4. */
3046FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3047
3048/** Opcode 0x0f 0x72 11/6. */
3049FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3050
3051/** Opcode 0x66 0x0f 0x72 11/6. */
3052FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3053
3054
3055/** Opcode 0x0f 0x72. */
3056FNIEMOP_DEF(iemOp_Grp13)
3057{
3058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3059 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3060 return IEMOP_RAISE_INVALID_OPCODE();
3061 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3062 {
3063 case 0: case 1: case 3: case 5: case 7:
3064 return IEMOP_RAISE_INVALID_OPCODE();
3065 case 2:
3066 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3067 {
3068 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3069 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3070 default: return IEMOP_RAISE_INVALID_OPCODE();
3071 }
3072 case 4:
3073 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3074 {
3075 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3076 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3077 default: return IEMOP_RAISE_INVALID_OPCODE();
3078 }
3079 case 6:
3080 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3081 {
3082 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3083 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3084 default: return IEMOP_RAISE_INVALID_OPCODE();
3085 }
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088}
3089
3090
3091/** Opcode 0x0f 0x73 11/2. */
3092FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3093
3094/** Opcode 0x66 0x0f 0x73 11/2. */
3095FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3096
3097/** Opcode 0x66 0x0f 0x73 11/3. */
3098FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3099
3100/** Opcode 0x0f 0x73 11/6. */
3101FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3102
3103/** Opcode 0x66 0x0f 0x73 11/6. */
3104FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3105
3106/** Opcode 0x66 0x0f 0x73 11/7. */
3107FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3108
3109
3110/** Opcode 0x0f 0x73. */
3111FNIEMOP_DEF(iemOp_Grp14)
3112{
3113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3114 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3115 return IEMOP_RAISE_INVALID_OPCODE();
3116 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3117 {
3118 case 0: case 1: case 4: case 5:
3119 return IEMOP_RAISE_INVALID_OPCODE();
3120 case 2:
3121 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3122 {
3123 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3124 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3125 default: return IEMOP_RAISE_INVALID_OPCODE();
3126 }
3127 case 3:
3128 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3129 {
3130 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3131 default: return IEMOP_RAISE_INVALID_OPCODE();
3132 }
3133 case 6:
3134 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3135 {
3136 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3137 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3138 default: return IEMOP_RAISE_INVALID_OPCODE();
3139 }
3140 case 7:
3141 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3142 {
3143 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3144 default: return IEMOP_RAISE_INVALID_OPCODE();
3145 }
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148}
3149
3150
3151/**
3152 * Common worker for SSE2 and MMX instructions on the forms:
3153 * pxxx mm1, mm2/mem64
3154 * pxxx xmm1, xmm2/mem128
3155 *
3156 * Proper alignment of the 128-bit operand is enforced.
3157 * Exceptions type 4. SSE2 and MMX cpuid checks.
3158 */
3159FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3160{
3161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3162 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3163 {
3164 case IEM_OP_PRF_SIZE_OP: /* SSE */
3165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3166 {
3167 /*
3168 * Register, register.
3169 */
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171 IEM_MC_BEGIN(2, 0);
3172 IEM_MC_ARG(uint128_t *, pDst, 0);
3173 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3174 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3175 IEM_MC_PREPARE_SSE_USAGE();
3176 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3177 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3178 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3179 IEM_MC_ADVANCE_RIP();
3180 IEM_MC_END();
3181 }
3182 else
3183 {
3184 /*
3185 * Register, memory.
3186 */
3187 IEM_MC_BEGIN(2, 2);
3188 IEM_MC_ARG(uint128_t *, pDst, 0);
3189 IEM_MC_LOCAL(uint128_t, uSrc);
3190 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3192
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3196 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3197
3198 IEM_MC_PREPARE_SSE_USAGE();
3199 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3200 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3201
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 }
3205 return VINF_SUCCESS;
3206
3207 case 0: /* MMX */
3208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3209 {
3210 /*
3211 * Register, register.
3212 */
3213 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3214 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3216 IEM_MC_BEGIN(2, 0);
3217 IEM_MC_ARG(uint64_t *, pDst, 0);
3218 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3219 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3220 IEM_MC_PREPARE_FPU_USAGE();
3221 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3222 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3223 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3224 IEM_MC_ADVANCE_RIP();
3225 IEM_MC_END();
3226 }
3227 else
3228 {
3229 /*
3230 * Register, memory.
3231 */
3232 IEM_MC_BEGIN(2, 2);
3233 IEM_MC_ARG(uint64_t *, pDst, 0);
3234 IEM_MC_LOCAL(uint64_t, uSrc);
3235 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3241 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3242
3243 IEM_MC_PREPARE_FPU_USAGE();
3244 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3245 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3246
3247 IEM_MC_ADVANCE_RIP();
3248 IEM_MC_END();
3249 }
3250 return VINF_SUCCESS;
3251
3252 default:
3253 return IEMOP_RAISE_INVALID_OPCODE();
3254 }
3255}
3256
3257
3258/** Opcode 0x0f 0x74. */
3259FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3260{
3261 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3262 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3263}
3264
3265
3266/** Opcode 0x0f 0x75. */
3267FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3268{
3269 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3270 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3271}
3272
3273
3274/** Opcode 0x0f 0x76. */
3275FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3276{
3277 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3278 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3279}
3280
3281
3282/** Opcode 0x0f 0x77. */
3283FNIEMOP_STUB(iemOp_emms);
3284/** Opcode 0x0f 0x78. */
3285FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3286/** Opcode 0x0f 0x79. */
3287FNIEMOP_UD_STUB(iemOp_vmwrite);
3288/** Opcode 0x0f 0x7c. */
3289FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3290/** Opcode 0x0f 0x7d. */
3291FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3292
3293
3294/** Opcode 0x0f 0x7e. */
3295FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3296{
3297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3298 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3299 {
3300 case IEM_OP_PRF_SIZE_OP: /* SSE */
3301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3302 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3303 else
3304 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3306 {
3307 /* greg, XMM */
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 IEM_MC_BEGIN(0, 1);
3310 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3312 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3313 {
3314 IEM_MC_LOCAL(uint64_t, u64Tmp);
3315 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3316 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3317 }
3318 else
3319 {
3320 IEM_MC_LOCAL(uint32_t, u32Tmp);
3321 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3322 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3323 }
3324 IEM_MC_ADVANCE_RIP();
3325 IEM_MC_END();
3326 }
3327 else
3328 {
3329 /* [mem], XMM */
3330 IEM_MC_BEGIN(0, 2);
3331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3335 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3337 {
3338 IEM_MC_LOCAL(uint64_t, u64Tmp);
3339 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3340 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3341 }
3342 else
3343 {
3344 IEM_MC_LOCAL(uint32_t, u32Tmp);
3345 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3346 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3347 }
3348 IEM_MC_ADVANCE_RIP();
3349 IEM_MC_END();
3350 }
3351 return VINF_SUCCESS;
3352
3353 case 0: /* MMX */
3354 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3355 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3356 else
3357 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3358 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3359 {
3360 /* greg, MMX */
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_BEGIN(0, 1);
3363 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3365 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3366 {
3367 IEM_MC_LOCAL(uint64_t, u64Tmp);
3368 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3369 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3370 }
3371 else
3372 {
3373 IEM_MC_LOCAL(uint32_t, u32Tmp);
3374 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3375 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3376 }
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 }
3380 else
3381 {
3382 /* [mem], MMX */
3383 IEM_MC_BEGIN(0, 2);
3384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3389 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3390 {
3391 IEM_MC_LOCAL(uint64_t, u64Tmp);
3392 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3393 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3394 }
3395 else
3396 {
3397 IEM_MC_LOCAL(uint32_t, u32Tmp);
3398 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3399 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3400 }
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 return VINF_SUCCESS;
3405
3406 default:
3407 return IEMOP_RAISE_INVALID_OPCODE();
3408 }
3409}
3410
3411
3412/** Opcode 0x0f 0x7f. */
3413FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3414{
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 bool fAligned = false;
3417 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3418 {
3419 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3420 fAligned = true;
3421 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3422 if (fAligned)
3423 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3424 else
3425 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3427 {
3428 /*
3429 * Register, register.
3430 */
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3432 IEM_MC_BEGIN(0, 0);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3436 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3437 IEM_MC_ADVANCE_RIP();
3438 IEM_MC_END();
3439 }
3440 else
3441 {
3442 /*
3443 * Register, memory.
3444 */
3445 IEM_MC_BEGIN(0, 2);
3446 IEM_MC_LOCAL(uint128_t, u128Tmp);
3447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3453
3454 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3455 if (fAligned)
3456 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3457 else
3458 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3459
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 }
3463 return VINF_SUCCESS;
3464
3465 case 0: /* MMX */
3466 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3467
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /*
3471 * Register, register.
3472 */
3473 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3474 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3476 IEM_MC_BEGIN(0, 1);
3477 IEM_MC_LOCAL(uint64_t, u64Tmp);
3478 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3479 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3480 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3481 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /*
3488 * Register, memory.
3489 */
3490 IEM_MC_BEGIN(0, 2);
3491 IEM_MC_LOCAL(uint64_t, u64Tmp);
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3498
3499 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3500 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3501
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 return VINF_SUCCESS;
3506
3507 default:
3508 return IEMOP_RAISE_INVALID_OPCODE();
3509 }
3510}
3511
3512
3513
3514/** Opcode 0x0f 0x80. */
3515FNIEMOP_DEF(iemOp_jo_Jv)
3516{
3517 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3518 IEMOP_HLP_MIN_386();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3520 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3521 {
3522 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524
3525 IEM_MC_BEGIN(0, 0);
3526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3527 IEM_MC_REL_JMP_S16(i16Imm);
3528 } IEM_MC_ELSE() {
3529 IEM_MC_ADVANCE_RIP();
3530 } IEM_MC_ENDIF();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537
3538 IEM_MC_BEGIN(0, 0);
3539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3540 IEM_MC_REL_JMP_S32(i32Imm);
3541 } IEM_MC_ELSE() {
3542 IEM_MC_ADVANCE_RIP();
3543 } IEM_MC_ENDIF();
3544 IEM_MC_END();
3545 }
3546 return VINF_SUCCESS;
3547}
3548
3549
3550/** Opcode 0x0f 0x81. */
3551FNIEMOP_DEF(iemOp_jno_Jv)
3552{
3553 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3554 IEMOP_HLP_MIN_386();
3555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3556 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3557 {
3558 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3563 IEM_MC_ADVANCE_RIP();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S16(i16Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3576 IEM_MC_ADVANCE_RIP();
3577 } IEM_MC_ELSE() {
3578 IEM_MC_REL_JMP_S32(i32Imm);
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581 }
3582 return VINF_SUCCESS;
3583}
3584
3585
3586/** Opcode 0x0f 0x82. */
3587FNIEMOP_DEF(iemOp_jc_Jv)
3588{
3589 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3590 IEMOP_HLP_MIN_386();
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3592 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3593 {
3594 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596
3597 IEM_MC_BEGIN(0, 0);
3598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3599 IEM_MC_REL_JMP_S16(i16Imm);
3600 } IEM_MC_ELSE() {
3601 IEM_MC_ADVANCE_RIP();
3602 } IEM_MC_ENDIF();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3609
3610 IEM_MC_BEGIN(0, 0);
3611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3612 IEM_MC_REL_JMP_S32(i32Imm);
3613 } IEM_MC_ELSE() {
3614 IEM_MC_ADVANCE_RIP();
3615 } IEM_MC_ENDIF();
3616 IEM_MC_END();
3617 }
3618 return VINF_SUCCESS;
3619}
3620
3621
3622/** Opcode 0x0f 0x83. */
3623FNIEMOP_DEF(iemOp_jnc_Jv)
3624{
3625 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3626 IEMOP_HLP_MIN_386();
3627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3628 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3629 {
3630 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0);
3634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3635 IEM_MC_ADVANCE_RIP();
3636 } IEM_MC_ELSE() {
3637 IEM_MC_REL_JMP_S16(i16Imm);
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640 }
3641 else
3642 {
3643 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645
3646 IEM_MC_BEGIN(0, 0);
3647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3648 IEM_MC_ADVANCE_RIP();
3649 } IEM_MC_ELSE() {
3650 IEM_MC_REL_JMP_S32(i32Imm);
3651 } IEM_MC_ENDIF();
3652 IEM_MC_END();
3653 }
3654 return VINF_SUCCESS;
3655}
3656
3657
3658/** Opcode 0x0f 0x84. */
3659FNIEMOP_DEF(iemOp_je_Jv)
3660{
3661 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3662 IEMOP_HLP_MIN_386();
3663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3664 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3665 {
3666 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3668
3669 IEM_MC_BEGIN(0, 0);
3670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3671 IEM_MC_REL_JMP_S16(i16Imm);
3672 } IEM_MC_ELSE() {
3673 IEM_MC_ADVANCE_RIP();
3674 } IEM_MC_ENDIF();
3675 IEM_MC_END();
3676 }
3677 else
3678 {
3679 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681
3682 IEM_MC_BEGIN(0, 0);
3683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3684 IEM_MC_REL_JMP_S32(i32Imm);
3685 } IEM_MC_ELSE() {
3686 IEM_MC_ADVANCE_RIP();
3687 } IEM_MC_ENDIF();
3688 IEM_MC_END();
3689 }
3690 return VINF_SUCCESS;
3691}
3692
3693
3694/** Opcode 0x0f 0x85. */
3695FNIEMOP_DEF(iemOp_jne_Jv)
3696{
3697 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3698 IEMOP_HLP_MIN_386();
3699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3700 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3701 {
3702 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3704
3705 IEM_MC_BEGIN(0, 0);
3706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3707 IEM_MC_ADVANCE_RIP();
3708 } IEM_MC_ELSE() {
3709 IEM_MC_REL_JMP_S16(i16Imm);
3710 } IEM_MC_ENDIF();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3720 IEM_MC_ADVANCE_RIP();
3721 } IEM_MC_ELSE() {
3722 IEM_MC_REL_JMP_S32(i32Imm);
3723 } IEM_MC_ENDIF();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729
3730/** Opcode 0x0f 0x86. */
3731FNIEMOP_DEF(iemOp_jbe_Jv)
3732{
3733 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3734 IEMOP_HLP_MIN_386();
3735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3737 {
3738 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740
3741 IEM_MC_BEGIN(0, 0);
3742 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3743 IEM_MC_REL_JMP_S16(i16Imm);
3744 } IEM_MC_ELSE() {
3745 IEM_MC_ADVANCE_RIP();
3746 } IEM_MC_ENDIF();
3747 IEM_MC_END();
3748 }
3749 else
3750 {
3751 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0);
3755 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3756 IEM_MC_REL_JMP_S32(i32Imm);
3757 } IEM_MC_ELSE() {
3758 IEM_MC_ADVANCE_RIP();
3759 } IEM_MC_ENDIF();
3760 IEM_MC_END();
3761 }
3762 return VINF_SUCCESS;
3763}
3764
3765
3766/** Opcode 0x0f 0x87. */
3767FNIEMOP_DEF(iemOp_jnbe_Jv)
3768{
3769 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3770 IEMOP_HLP_MIN_386();
3771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3772 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3773 {
3774 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776
3777 IEM_MC_BEGIN(0, 0);
3778 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3779 IEM_MC_ADVANCE_RIP();
3780 } IEM_MC_ELSE() {
3781 IEM_MC_REL_JMP_S16(i16Imm);
3782 } IEM_MC_ENDIF();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789
3790 IEM_MC_BEGIN(0, 0);
3791 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3792 IEM_MC_ADVANCE_RIP();
3793 } IEM_MC_ELSE() {
3794 IEM_MC_REL_JMP_S32(i32Imm);
3795 } IEM_MC_ENDIF();
3796 IEM_MC_END();
3797 }
3798 return VINF_SUCCESS;
3799}
3800
3801
3802/** Opcode 0x0f 0x88. */
3803FNIEMOP_DEF(iemOp_js_Jv)
3804{
3805 IEMOP_MNEMONIC(js_Jv, "js Jv");
3806 IEMOP_HLP_MIN_386();
3807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3809 {
3810 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3812
3813 IEM_MC_BEGIN(0, 0);
3814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3815 IEM_MC_REL_JMP_S16(i16Imm);
3816 } IEM_MC_ELSE() {
3817 IEM_MC_ADVANCE_RIP();
3818 } IEM_MC_ENDIF();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825
3826 IEM_MC_BEGIN(0, 0);
3827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3828 IEM_MC_REL_JMP_S32(i32Imm);
3829 } IEM_MC_ELSE() {
3830 IEM_MC_ADVANCE_RIP();
3831 } IEM_MC_ENDIF();
3832 IEM_MC_END();
3833 }
3834 return VINF_SUCCESS;
3835}
3836
3837
3838/** Opcode 0x0f 0x89. */
3839FNIEMOP_DEF(iemOp_jns_Jv)
3840{
3841 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3842 IEMOP_HLP_MIN_386();
3843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3844 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3845 {
3846 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3848
3849 IEM_MC_BEGIN(0, 0);
3850 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3851 IEM_MC_ADVANCE_RIP();
3852 } IEM_MC_ELSE() {
3853 IEM_MC_REL_JMP_S16(i16Imm);
3854 } IEM_MC_ENDIF();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861
3862 IEM_MC_BEGIN(0, 0);
3863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3864 IEM_MC_ADVANCE_RIP();
3865 } IEM_MC_ELSE() {
3866 IEM_MC_REL_JMP_S32(i32Imm);
3867 } IEM_MC_ENDIF();
3868 IEM_MC_END();
3869 }
3870 return VINF_SUCCESS;
3871}
3872
3873
3874/** Opcode 0x0f 0x8a. */
3875FNIEMOP_DEF(iemOp_jp_Jv)
3876{
3877 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3878 IEMOP_HLP_MIN_386();
3879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3881 {
3882 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3884
3885 IEM_MC_BEGIN(0, 0);
3886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3887 IEM_MC_REL_JMP_S16(i16Imm);
3888 } IEM_MC_ELSE() {
3889 IEM_MC_ADVANCE_RIP();
3890 } IEM_MC_ENDIF();
3891 IEM_MC_END();
3892 }
3893 else
3894 {
3895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3897
3898 IEM_MC_BEGIN(0, 0);
3899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3900 IEM_MC_REL_JMP_S32(i32Imm);
3901 } IEM_MC_ELSE() {
3902 IEM_MC_ADVANCE_RIP();
3903 } IEM_MC_ENDIF();
3904 IEM_MC_END();
3905 }
3906 return VINF_SUCCESS;
3907}
3908
3909
3910/** Opcode 0x0f 0x8b. */
3911FNIEMOP_DEF(iemOp_jnp_Jv)
3912{
3913 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3914 IEMOP_HLP_MIN_386();
3915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3916 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3917 {
3918 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920
3921 IEM_MC_BEGIN(0, 0);
3922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3923 IEM_MC_ADVANCE_RIP();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_REL_JMP_S16(i16Imm);
3926 } IEM_MC_ENDIF();
3927 IEM_MC_END();
3928 }
3929 else
3930 {
3931 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0);
3935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3936 IEM_MC_ADVANCE_RIP();
3937 } IEM_MC_ELSE() {
3938 IEM_MC_REL_JMP_S32(i32Imm);
3939 } IEM_MC_ENDIF();
3940 IEM_MC_END();
3941 }
3942 return VINF_SUCCESS;
3943}
3944
3945
3946/** Opcode 0x0f 0x8c. */
3947FNIEMOP_DEF(iemOp_jl_Jv)
3948{
3949 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3950 IEMOP_HLP_MIN_386();
3951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3952 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3953 {
3954 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3956
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3959 IEM_MC_REL_JMP_S16(i16Imm);
3960 } IEM_MC_ELSE() {
3961 IEM_MC_ADVANCE_RIP();
3962 } IEM_MC_ENDIF();
3963 IEM_MC_END();
3964 }
3965 else
3966 {
3967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3969
3970 IEM_MC_BEGIN(0, 0);
3971 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3972 IEM_MC_REL_JMP_S32(i32Imm);
3973 } IEM_MC_ELSE() {
3974 IEM_MC_ADVANCE_RIP();
3975 } IEM_MC_ENDIF();
3976 IEM_MC_END();
3977 }
3978 return VINF_SUCCESS;
3979}
3980
3981
3982/** Opcode 0x0f 0x8d. */
3983FNIEMOP_DEF(iemOp_jnl_Jv)
3984{
3985 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3986 IEMOP_HLP_MIN_386();
3987 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3988 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3989 {
3990 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3992
3993 IEM_MC_BEGIN(0, 0);
3994 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3995 IEM_MC_ADVANCE_RIP();
3996 } IEM_MC_ELSE() {
3997 IEM_MC_REL_JMP_S16(i16Imm);
3998 } IEM_MC_ENDIF();
3999 IEM_MC_END();
4000 }
4001 else
4002 {
4003 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005
4006 IEM_MC_BEGIN(0, 0);
4007 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4008 IEM_MC_ADVANCE_RIP();
4009 } IEM_MC_ELSE() {
4010 IEM_MC_REL_JMP_S32(i32Imm);
4011 } IEM_MC_ENDIF();
4012 IEM_MC_END();
4013 }
4014 return VINF_SUCCESS;
4015}
4016
4017
4018/** Opcode 0x0f 0x8e. */
4019FNIEMOP_DEF(iemOp_jle_Jv)
4020{
4021 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4022 IEMOP_HLP_MIN_386();
4023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4024 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4025 {
4026 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4028
4029 IEM_MC_BEGIN(0, 0);
4030 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4031 IEM_MC_REL_JMP_S16(i16Imm);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_ADVANCE_RIP();
4034 } IEM_MC_ENDIF();
4035 IEM_MC_END();
4036 }
4037 else
4038 {
4039 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4041
4042 IEM_MC_BEGIN(0, 0);
4043 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4044 IEM_MC_REL_JMP_S32(i32Imm);
4045 } IEM_MC_ELSE() {
4046 IEM_MC_ADVANCE_RIP();
4047 } IEM_MC_ENDIF();
4048 IEM_MC_END();
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0x0f 0x8f. */
4055FNIEMOP_DEF(iemOp_jnle_Jv)
4056{
4057 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4058 IEMOP_HLP_MIN_386();
4059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4061 {
4062 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4064
4065 IEM_MC_BEGIN(0, 0);
4066 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4067 IEM_MC_ADVANCE_RIP();
4068 } IEM_MC_ELSE() {
4069 IEM_MC_REL_JMP_S16(i16Imm);
4070 } IEM_MC_ENDIF();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077
4078 IEM_MC_BEGIN(0, 0);
4079 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4080 IEM_MC_ADVANCE_RIP();
4081 } IEM_MC_ELSE() {
4082 IEM_MC_REL_JMP_S32(i32Imm);
4083 } IEM_MC_ENDIF();
4084 IEM_MC_END();
4085 }
4086 return VINF_SUCCESS;
4087}
4088
4089
4090/** Opcode 0x0f 0x90. */
4091FNIEMOP_DEF(iemOp_seto_Eb)
4092{
4093 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4094 IEMOP_HLP_MIN_386();
4095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4096
4097 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4098 * any way. AMD says it's "unused", whatever that means. We're
4099 * ignoring for now. */
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 {
4102 /* register target */
4103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4104 IEM_MC_BEGIN(0, 0);
4105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4107 } IEM_MC_ELSE() {
4108 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4109 } IEM_MC_ENDIF();
4110 IEM_MC_ADVANCE_RIP();
4111 IEM_MC_END();
4112 }
4113 else
4114 {
4115 /* memory target */
4116 IEM_MC_BEGIN(0, 1);
4117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4122 } IEM_MC_ELSE() {
4123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4124 } IEM_MC_ENDIF();
4125 IEM_MC_ADVANCE_RIP();
4126 IEM_MC_END();
4127 }
4128 return VINF_SUCCESS;
4129}
4130
4131
4132/** Opcode 0x0f 0x91. */
4133FNIEMOP_DEF(iemOp_setno_Eb)
4134{
4135 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4136 IEMOP_HLP_MIN_386();
4137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4138
4139 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4140 * any way. AMD says it's "unused", whatever that means. We're
4141 * ignoring for now. */
4142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4143 {
4144 /* register target */
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146 IEM_MC_BEGIN(0, 0);
4147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4149 } IEM_MC_ELSE() {
4150 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4151 } IEM_MC_ENDIF();
4152 IEM_MC_ADVANCE_RIP();
4153 IEM_MC_END();
4154 }
4155 else
4156 {
4157 /* memory target */
4158 IEM_MC_BEGIN(0, 1);
4159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4164 } IEM_MC_ELSE() {
4165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4166 } IEM_MC_ENDIF();
4167 IEM_MC_ADVANCE_RIP();
4168 IEM_MC_END();
4169 }
4170 return VINF_SUCCESS;
4171}
4172
4173
4174/** Opcode 0x0f 0x92. */
4175FNIEMOP_DEF(iemOp_setc_Eb)
4176{
4177 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4178 IEMOP_HLP_MIN_386();
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180
4181 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4182 * any way. AMD says it's "unused", whatever that means. We're
4183 * ignoring for now. */
4184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4185 {
4186 /* register target */
4187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4188 IEM_MC_BEGIN(0, 0);
4189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4190 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4191 } IEM_MC_ELSE() {
4192 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4193 } IEM_MC_ENDIF();
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 }
4197 else
4198 {
4199 /* memory target */
4200 IEM_MC_BEGIN(0, 1);
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4206 } IEM_MC_ELSE() {
4207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x93. */
4217FNIEMOP_DEF(iemOp_setnc_Eb)
4218{
4219 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4220 IEMOP_HLP_MIN_386();
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222
4223 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4224 * any way. AMD says it's "unused", whatever that means. We're
4225 * ignoring for now. */
4226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4227 {
4228 /* register target */
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4230 IEM_MC_BEGIN(0, 0);
4231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4232 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4233 } IEM_MC_ELSE() {
4234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4235 } IEM_MC_ENDIF();
4236 IEM_MC_ADVANCE_RIP();
4237 IEM_MC_END();
4238 }
4239 else
4240 {
4241 /* memory target */
4242 IEM_MC_BEGIN(0, 1);
4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4247 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4248 } IEM_MC_ELSE() {
4249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4250 } IEM_MC_ENDIF();
4251 IEM_MC_ADVANCE_RIP();
4252 IEM_MC_END();
4253 }
4254 return VINF_SUCCESS;
4255}
4256
4257
4258/** Opcode 0x0f 0x94. */
4259FNIEMOP_DEF(iemOp_sete_Eb)
4260{
4261 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4262 IEMOP_HLP_MIN_386();
4263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4264
4265 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4266 * any way. AMD says it's "unused", whatever that means. We're
4267 * ignoring for now. */
4268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4269 {
4270 /* register target */
4271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4272 IEM_MC_BEGIN(0, 0);
4273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4274 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4275 } IEM_MC_ELSE() {
4276 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4277 } IEM_MC_ENDIF();
4278 IEM_MC_ADVANCE_RIP();
4279 IEM_MC_END();
4280 }
4281 else
4282 {
4283 /* memory target */
4284 IEM_MC_BEGIN(0, 1);
4285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4290 } IEM_MC_ELSE() {
4291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4292 } IEM_MC_ENDIF();
4293 IEM_MC_ADVANCE_RIP();
4294 IEM_MC_END();
4295 }
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** Opcode 0x0f 0x95. */
4301FNIEMOP_DEF(iemOp_setne_Eb)
4302{
4303 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4304 IEMOP_HLP_MIN_386();
4305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4306
4307 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4308 * any way. AMD says it's "unused", whatever that means. We're
4309 * ignoring for now. */
4310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4311 {
4312 /* register target */
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314 IEM_MC_BEGIN(0, 0);
4315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 /* memory target */
4326 IEM_MC_BEGIN(0, 1);
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4332 } IEM_MC_ELSE() {
4333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4334 } IEM_MC_ENDIF();
4335 IEM_MC_ADVANCE_RIP();
4336 IEM_MC_END();
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0x0f 0x96. */
4343FNIEMOP_DEF(iemOp_setbe_Eb)
4344{
4345 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4346 IEMOP_HLP_MIN_386();
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348
4349 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4350 * any way. AMD says it's "unused", whatever that means. We're
4351 * ignoring for now. */
4352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4353 {
4354 /* register target */
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356 IEM_MC_BEGIN(0, 0);
4357 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4358 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4359 } IEM_MC_ELSE() {
4360 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4361 } IEM_MC_ENDIF();
4362 IEM_MC_ADVANCE_RIP();
4363 IEM_MC_END();
4364 }
4365 else
4366 {
4367 /* memory target */
4368 IEM_MC_BEGIN(0, 1);
4369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4373 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4374 } IEM_MC_ELSE() {
4375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_ADVANCE_RIP();
4378 IEM_MC_END();
4379 }
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/** Opcode 0x0f 0x97. */
4385FNIEMOP_DEF(iemOp_setnbe_Eb)
4386{
4387 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4388 IEMOP_HLP_MIN_386();
4389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4390
4391 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4392 * any way. AMD says it's "unused", whatever that means. We're
4393 * ignoring for now. */
4394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4395 {
4396 /* register target */
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_BEGIN(0, 0);
4399 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4400 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4401 } IEM_MC_ELSE() {
4402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4403 } IEM_MC_ENDIF();
4404 IEM_MC_ADVANCE_RIP();
4405 IEM_MC_END();
4406 }
4407 else
4408 {
4409 /* memory target */
4410 IEM_MC_BEGIN(0, 1);
4411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4414 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4416 } IEM_MC_ELSE() {
4417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4418 } IEM_MC_ENDIF();
4419 IEM_MC_ADVANCE_RIP();
4420 IEM_MC_END();
4421 }
4422 return VINF_SUCCESS;
4423}
4424
4425
4426/** Opcode 0x0f 0x98. */
4427FNIEMOP_DEF(iemOp_sets_Eb)
4428{
4429 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4430 IEMOP_HLP_MIN_386();
4431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4432
4433 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4434 * any way. AMD says it's "unused", whatever that means. We're
4435 * ignoring for now. */
4436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4437 {
4438 /* register target */
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4440 IEM_MC_BEGIN(0, 0);
4441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4442 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4443 } IEM_MC_ELSE() {
4444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4445 } IEM_MC_ENDIF();
4446 IEM_MC_ADVANCE_RIP();
4447 IEM_MC_END();
4448 }
4449 else
4450 {
4451 /* memory target */
4452 IEM_MC_BEGIN(0, 1);
4453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4458 } IEM_MC_ELSE() {
4459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4460 } IEM_MC_ENDIF();
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x99. */
4469FNIEMOP_DEF(iemOp_setns_Eb)
4470{
4471 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4472 IEMOP_HLP_MIN_386();
4473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4474
4475 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4476 * any way. AMD says it's "unused", whatever that means. We're
4477 * ignoring for now. */
4478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4479 {
4480 /* register target */
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482 IEM_MC_BEGIN(0, 0);
4483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4484 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4485 } IEM_MC_ELSE() {
4486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4487 } IEM_MC_ENDIF();
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 }
4491 else
4492 {
4493 /* memory target */
4494 IEM_MC_BEGIN(0, 1);
4495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4500 } IEM_MC_ELSE() {
4501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4502 } IEM_MC_ENDIF();
4503 IEM_MC_ADVANCE_RIP();
4504 IEM_MC_END();
4505 }
4506 return VINF_SUCCESS;
4507}
4508
4509
4510/** Opcode 0x0f 0x9a. */
4511FNIEMOP_DEF(iemOp_setp_Eb)
4512{
4513 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4514 IEMOP_HLP_MIN_386();
4515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4516
4517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4518 * any way. AMD says it's "unused", whatever that means. We're
4519 * ignoring for now. */
4520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4521 {
4522 /* register target */
4523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4524 IEM_MC_BEGIN(0, 0);
4525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4526 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4527 } IEM_MC_ELSE() {
4528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4529 } IEM_MC_ENDIF();
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 else
4534 {
4535 /* memory target */
4536 IEM_MC_BEGIN(0, 1);
4537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4542 } IEM_MC_ELSE() {
4543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4544 } IEM_MC_ENDIF();
4545 IEM_MC_ADVANCE_RIP();
4546 IEM_MC_END();
4547 }
4548 return VINF_SUCCESS;
4549}
4550
4551
4552/** Opcode 0x0f 0x9b. */
4553FNIEMOP_DEF(iemOp_setnp_Eb)
4554{
4555 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4556 IEMOP_HLP_MIN_386();
4557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4558
4559 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4560 * any way. AMD says it's "unused", whatever that means. We're
4561 * ignoring for now. */
4562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4563 {
4564 /* register target */
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566 IEM_MC_BEGIN(0, 0);
4567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4568 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4569 } IEM_MC_ELSE() {
4570 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4571 } IEM_MC_ENDIF();
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 }
4575 else
4576 {
4577 /* memory target */
4578 IEM_MC_BEGIN(0, 1);
4579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4583 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4584 } IEM_MC_ELSE() {
4585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4586 } IEM_MC_ENDIF();
4587 IEM_MC_ADVANCE_RIP();
4588 IEM_MC_END();
4589 }
4590 return VINF_SUCCESS;
4591}
4592
4593
4594/** Opcode 0x0f 0x9c. */
4595FNIEMOP_DEF(iemOp_setl_Eb)
4596{
4597 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4598 IEMOP_HLP_MIN_386();
4599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4600
4601 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4602 * any way. AMD says it's "unused", whatever that means. We're
4603 * ignoring for now. */
4604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4605 {
4606 /* register target */
4607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4608 IEM_MC_BEGIN(0, 0);
4609 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4610 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4611 } IEM_MC_ELSE() {
4612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4613 } IEM_MC_ENDIF();
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 }
4617 else
4618 {
4619 /* memory target */
4620 IEM_MC_BEGIN(0, 1);
4621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4625 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4626 } IEM_MC_ELSE() {
4627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4628 } IEM_MC_ENDIF();
4629 IEM_MC_ADVANCE_RIP();
4630 IEM_MC_END();
4631 }
4632 return VINF_SUCCESS;
4633}
4634
4635
4636/** Opcode 0x0f 0x9d. */
4637FNIEMOP_DEF(iemOp_setnl_Eb)
4638{
4639 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4640 IEMOP_HLP_MIN_386();
4641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4642
4643 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4644 * any way. AMD says it's "unused", whatever that means. We're
4645 * ignoring for now. */
4646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4647 {
4648 /* register target */
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_BEGIN(0, 0);
4651 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4652 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4653 } IEM_MC_ELSE() {
4654 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4655 } IEM_MC_ENDIF();
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 else
4660 {
4661 /* memory target */
4662 IEM_MC_BEGIN(0, 1);
4663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4666 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4667 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4668 } IEM_MC_ELSE() {
4669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4670 } IEM_MC_ENDIF();
4671 IEM_MC_ADVANCE_RIP();
4672 IEM_MC_END();
4673 }
4674 return VINF_SUCCESS;
4675}
4676
4677
4678/** Opcode 0x0f 0x9e. */
4679FNIEMOP_DEF(iemOp_setle_Eb)
4680{
4681 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4682 IEMOP_HLP_MIN_386();
4683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4684
4685 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4686 * any way. AMD says it's "unused", whatever that means. We're
4687 * ignoring for now. */
4688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4689 {
4690 /* register target */
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692 IEM_MC_BEGIN(0, 0);
4693 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4694 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4695 } IEM_MC_ELSE() {
4696 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4697 } IEM_MC_ENDIF();
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 /* memory target */
4704 IEM_MC_BEGIN(0, 1);
4705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4708 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4709 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4710 } IEM_MC_ELSE() {
4711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4712 } IEM_MC_ENDIF();
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 }
4716 return VINF_SUCCESS;
4717}
4718
4719
4720/** Opcode 0x0f 0x9f. */
4721FNIEMOP_DEF(iemOp_setnle_Eb)
4722{
4723 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4724 IEMOP_HLP_MIN_386();
4725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4726
4727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4728 * any way. AMD says it's "unused", whatever that means. We're
4729 * ignoring for now. */
4730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4731 {
4732 /* register target */
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4736 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4737 } IEM_MC_ELSE() {
4738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4739 } IEM_MC_ENDIF();
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 else
4744 {
4745 /* memory target */
4746 IEM_MC_BEGIN(0, 1);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4751 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4752 } IEM_MC_ELSE() {
4753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4754 } IEM_MC_ENDIF();
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 }
4758 return VINF_SUCCESS;
4759}
4760
4761
4762/**
4763 * Common 'push segment-register' helper.
4764 */
4765FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4766{
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768 if (iReg < X86_SREG_FS)
4769 IEMOP_HLP_NO_64BIT();
4770 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4771
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0, 1);
4776 IEM_MC_LOCAL(uint16_t, u16Value);
4777 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4778 IEM_MC_PUSH_U16(u16Value);
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 break;
4782
4783 case IEMMODE_32BIT:
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(uint32_t, u32Value);
4786 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4787 IEM_MC_PUSH_U32_SREG(u32Value);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 break;
4791
4792 case IEMMODE_64BIT:
4793 IEM_MC_BEGIN(0, 1);
4794 IEM_MC_LOCAL(uint64_t, u64Value);
4795 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4796 IEM_MC_PUSH_U64(u64Value);
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 break;
4800 }
4801
4802 return VINF_SUCCESS;
4803}
4804
4805
4806/** Opcode 0x0f 0xa0. */
4807FNIEMOP_DEF(iemOp_push_fs)
4808{
4809 IEMOP_MNEMONIC(push_fs, "push fs");
4810 IEMOP_HLP_MIN_386();
4811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4812 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4813}
4814
4815
4816/** Opcode 0x0f 0xa1. */
4817FNIEMOP_DEF(iemOp_pop_fs)
4818{
4819 IEMOP_MNEMONIC(pop_fs, "pop fs");
4820 IEMOP_HLP_MIN_386();
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4823}
4824
4825
4826/** Opcode 0x0f 0xa2. */
4827FNIEMOP_DEF(iemOp_cpuid)
4828{
4829 IEMOP_MNEMONIC(cpuid, "cpuid");
4830 IEMOP_HLP_MIN_486(); /* not all 486es. */
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4833}
4834
4835
4836/**
4837 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4838 * iemOp_bts_Ev_Gv.
4839 */
4840FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4841{
4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4843 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4844
4845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4846 {
4847 /* register destination. */
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849 switch (pVCpu->iem.s.enmEffOpSize)
4850 {
4851 case IEMMODE_16BIT:
4852 IEM_MC_BEGIN(3, 0);
4853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4854 IEM_MC_ARG(uint16_t, u16Src, 1);
4855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4856
4857 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4858 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4859 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4860 IEM_MC_REF_EFLAGS(pEFlags);
4861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4862
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 return VINF_SUCCESS;
4866
4867 case IEMMODE_32BIT:
4868 IEM_MC_BEGIN(3, 0);
4869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4870 IEM_MC_ARG(uint32_t, u32Src, 1);
4871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4872
4873 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4874 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4875 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4876 IEM_MC_REF_EFLAGS(pEFlags);
4877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4878
4879 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 case IEMMODE_64BIT:
4885 IEM_MC_BEGIN(3, 0);
4886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4887 IEM_MC_ARG(uint64_t, u64Src, 1);
4888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4889
4890 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4891 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4892 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4893 IEM_MC_REF_EFLAGS(pEFlags);
4894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4895
4896 IEM_MC_ADVANCE_RIP();
4897 IEM_MC_END();
4898 return VINF_SUCCESS;
4899
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4901 }
4902 }
4903 else
4904 {
4905 /* memory destination. */
4906
4907 uint32_t fAccess;
4908 if (pImpl->pfnLockedU16)
4909 fAccess = IEM_ACCESS_DATA_RW;
4910 else /* BT */
4911 fAccess = IEM_ACCESS_DATA_R;
4912
4913 NOREF(fAccess);
4914
4915 /** @todo test negative bit offsets! */
4916 switch (pVCpu->iem.s.enmEffOpSize)
4917 {
4918 case IEMMODE_16BIT:
4919 IEM_MC_BEGIN(3, 2);
4920 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4921 IEM_MC_ARG(uint16_t, u16Src, 1);
4922 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4924 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4925
4926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4927 if (pImpl->pfnLockedU16)
4928 IEMOP_HLP_DONE_DECODING();
4929 else
4930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4931 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4932 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4933 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4934 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4935 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4936 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4937 IEM_MC_FETCH_EFLAGS(EFlags);
4938
4939 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4940 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4942 else
4943 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4944 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4945
4946 IEM_MC_COMMIT_EFLAGS(EFlags);
4947 IEM_MC_ADVANCE_RIP();
4948 IEM_MC_END();
4949 return VINF_SUCCESS;
4950
4951 case IEMMODE_32BIT:
4952 IEM_MC_BEGIN(3, 2);
4953 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4954 IEM_MC_ARG(uint32_t, u32Src, 1);
4955 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4957 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4958
4959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4960 if (pImpl->pfnLockedU16)
4961 IEMOP_HLP_DONE_DECODING();
4962 else
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4964 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4965 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4966 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4967 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4968 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4969 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4970 IEM_MC_FETCH_EFLAGS(EFlags);
4971
4972 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4973 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4975 else
4976 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4977 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4978
4979 IEM_MC_COMMIT_EFLAGS(EFlags);
4980 IEM_MC_ADVANCE_RIP();
4981 IEM_MC_END();
4982 return VINF_SUCCESS;
4983
4984 case IEMMODE_64BIT:
4985 IEM_MC_BEGIN(3, 2);
4986 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4987 IEM_MC_ARG(uint64_t, u64Src, 1);
4988 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4990 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4991
4992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4993 if (pImpl->pfnLockedU16)
4994 IEMOP_HLP_DONE_DECODING();
4995 else
4996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4997 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4998 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4999 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5000 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5001 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5002 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5003 IEM_MC_FETCH_EFLAGS(EFlags);
5004
5005 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5006 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5008 else
5009 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5011
5012 IEM_MC_COMMIT_EFLAGS(EFlags);
5013 IEM_MC_ADVANCE_RIP();
5014 IEM_MC_END();
5015 return VINF_SUCCESS;
5016
5017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5018 }
5019 }
5020}
5021
5022
5023/** Opcode 0x0f 0xa3. */
5024FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5025{
5026 IEMOP_MNEMONIC(bt_Gv_Gv, "bt Gv,Gv");
5027 IEMOP_HLP_MIN_386();
5028 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5029}
5030
5031
5032/**
5033 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5034 */
5035FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5036{
5037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5038 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5039
5040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5041 {
5042 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5044
5045 switch (pVCpu->iem.s.enmEffOpSize)
5046 {
5047 case IEMMODE_16BIT:
5048 IEM_MC_BEGIN(4, 0);
5049 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5050 IEM_MC_ARG(uint16_t, u16Src, 1);
5051 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5052 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5053
5054 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5055 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5056 IEM_MC_REF_EFLAGS(pEFlags);
5057 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5058
5059 IEM_MC_ADVANCE_RIP();
5060 IEM_MC_END();
5061 return VINF_SUCCESS;
5062
5063 case IEMMODE_32BIT:
5064 IEM_MC_BEGIN(4, 0);
5065 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5066 IEM_MC_ARG(uint32_t, u32Src, 1);
5067 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5068 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5069
5070 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5071 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5072 IEM_MC_REF_EFLAGS(pEFlags);
5073 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5074
5075 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5076 IEM_MC_ADVANCE_RIP();
5077 IEM_MC_END();
5078 return VINF_SUCCESS;
5079
5080 case IEMMODE_64BIT:
5081 IEM_MC_BEGIN(4, 0);
5082 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5083 IEM_MC_ARG(uint64_t, u64Src, 1);
5084 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5085 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5086
5087 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5088 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5089 IEM_MC_REF_EFLAGS(pEFlags);
5090 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5091
5092 IEM_MC_ADVANCE_RIP();
5093 IEM_MC_END();
5094 return VINF_SUCCESS;
5095
5096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5097 }
5098 }
5099 else
5100 {
5101 switch (pVCpu->iem.s.enmEffOpSize)
5102 {
5103 case IEMMODE_16BIT:
5104 IEM_MC_BEGIN(4, 2);
5105 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5106 IEM_MC_ARG(uint16_t, u16Src, 1);
5107 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5108 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5110
5111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5112 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5113 IEM_MC_ASSIGN(cShiftArg, cShift);
5114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5115 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5116 IEM_MC_FETCH_EFLAGS(EFlags);
5117 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5118 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5119
5120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5121 IEM_MC_COMMIT_EFLAGS(EFlags);
5122 IEM_MC_ADVANCE_RIP();
5123 IEM_MC_END();
5124 return VINF_SUCCESS;
5125
5126 case IEMMODE_32BIT:
5127 IEM_MC_BEGIN(4, 2);
5128 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5129 IEM_MC_ARG(uint32_t, u32Src, 1);
5130 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5131 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5133
5134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5135 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5136 IEM_MC_ASSIGN(cShiftArg, cShift);
5137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5138 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5139 IEM_MC_FETCH_EFLAGS(EFlags);
5140 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5141 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5142
5143 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5144 IEM_MC_COMMIT_EFLAGS(EFlags);
5145 IEM_MC_ADVANCE_RIP();
5146 IEM_MC_END();
5147 return VINF_SUCCESS;
5148
5149 case IEMMODE_64BIT:
5150 IEM_MC_BEGIN(4, 2);
5151 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5152 IEM_MC_ARG(uint64_t, u64Src, 1);
5153 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5154 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5156
5157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5158 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5159 IEM_MC_ASSIGN(cShiftArg, cShift);
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5162 IEM_MC_FETCH_EFLAGS(EFlags);
5163 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5164 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5165
5166 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5167 IEM_MC_COMMIT_EFLAGS(EFlags);
5168 IEM_MC_ADVANCE_RIP();
5169 IEM_MC_END();
5170 return VINF_SUCCESS;
5171
5172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5173 }
5174 }
5175}
5176
5177
5178/**
5179 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5180 */
5181FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5182{
5183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5185
5186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5187 {
5188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5189
5190 switch (pVCpu->iem.s.enmEffOpSize)
5191 {
5192 case IEMMODE_16BIT:
5193 IEM_MC_BEGIN(4, 0);
5194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5195 IEM_MC_ARG(uint16_t, u16Src, 1);
5196 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5197 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5198
5199 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5200 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5201 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5202 IEM_MC_REF_EFLAGS(pEFlags);
5203 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5204
5205 IEM_MC_ADVANCE_RIP();
5206 IEM_MC_END();
5207 return VINF_SUCCESS;
5208
5209 case IEMMODE_32BIT:
5210 IEM_MC_BEGIN(4, 0);
5211 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5212 IEM_MC_ARG(uint32_t, u32Src, 1);
5213 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5214 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5215
5216 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5217 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5218 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5219 IEM_MC_REF_EFLAGS(pEFlags);
5220 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5221
5222 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5223 IEM_MC_ADVANCE_RIP();
5224 IEM_MC_END();
5225 return VINF_SUCCESS;
5226
5227 case IEMMODE_64BIT:
5228 IEM_MC_BEGIN(4, 0);
5229 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5230 IEM_MC_ARG(uint64_t, u64Src, 1);
5231 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5232 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5233
5234 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5235 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5236 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5237 IEM_MC_REF_EFLAGS(pEFlags);
5238 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5239
5240 IEM_MC_ADVANCE_RIP();
5241 IEM_MC_END();
5242 return VINF_SUCCESS;
5243
5244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5245 }
5246 }
5247 else
5248 {
5249 switch (pVCpu->iem.s.enmEffOpSize)
5250 {
5251 case IEMMODE_16BIT:
5252 IEM_MC_BEGIN(4, 2);
5253 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5254 IEM_MC_ARG(uint16_t, u16Src, 1);
5255 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5256 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5258
5259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5261 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5262 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5263 IEM_MC_FETCH_EFLAGS(EFlags);
5264 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5265 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5266
5267 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5268 IEM_MC_COMMIT_EFLAGS(EFlags);
5269 IEM_MC_ADVANCE_RIP();
5270 IEM_MC_END();
5271 return VINF_SUCCESS;
5272
5273 case IEMMODE_32BIT:
5274 IEM_MC_BEGIN(4, 2);
5275 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5276 IEM_MC_ARG(uint32_t, u32Src, 1);
5277 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5278 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5280
5281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5283 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5284 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5285 IEM_MC_FETCH_EFLAGS(EFlags);
5286 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5287 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5288
5289 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5290 IEM_MC_COMMIT_EFLAGS(EFlags);
5291 IEM_MC_ADVANCE_RIP();
5292 IEM_MC_END();
5293 return VINF_SUCCESS;
5294
5295 case IEMMODE_64BIT:
5296 IEM_MC_BEGIN(4, 2);
5297 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5298 IEM_MC_ARG(uint64_t, u64Src, 1);
5299 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5300 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5302
5303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5305 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5306 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5307 IEM_MC_FETCH_EFLAGS(EFlags);
5308 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5309 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5310
5311 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5312 IEM_MC_COMMIT_EFLAGS(EFlags);
5313 IEM_MC_ADVANCE_RIP();
5314 IEM_MC_END();
5315 return VINF_SUCCESS;
5316
5317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5318 }
5319 }
5320}
5321
5322
5323
5324/** Opcode 0x0f 0xa4. */
5325FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5326{
5327 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5328 IEMOP_HLP_MIN_386();
5329 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5330}
5331
5332
5333/** Opcode 0x0f 0xa5. */
5334FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5335{
5336 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5337 IEMOP_HLP_MIN_386();
5338 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5339}
5340
5341
5342/** Opcode 0x0f 0xa8. */
5343FNIEMOP_DEF(iemOp_push_gs)
5344{
5345 IEMOP_MNEMONIC(push_gs, "push gs");
5346 IEMOP_HLP_MIN_386();
5347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5348 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5349}
5350
5351
5352/** Opcode 0x0f 0xa9. */
5353FNIEMOP_DEF(iemOp_pop_gs)
5354{
5355 IEMOP_MNEMONIC(pop_gs, "pop gs");
5356 IEMOP_HLP_MIN_386();
5357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5358 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5359}
5360
5361
5362/** Opcode 0x0f 0xaa. */
5363FNIEMOP_STUB(iemOp_rsm);
5364//IEMOP_HLP_MIN_386();
5365
5366
5367/** Opcode 0x0f 0xab. */
5368FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5369{
5370 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5371 IEMOP_HLP_MIN_386();
5372 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5373}
5374
5375
5376/** Opcode 0x0f 0xac. */
5377FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5378{
5379 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5380 IEMOP_HLP_MIN_386();
5381 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5382}
5383
5384
5385/** Opcode 0x0f 0xad. */
5386FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5387{
5388 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5389 IEMOP_HLP_MIN_386();
5390 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5391}
5392
5393
5394/** Opcode 0x0f 0xae mem/0. */
5395FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5396{
5397 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5398 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5399 return IEMOP_RAISE_INVALID_OPCODE();
5400
5401 IEM_MC_BEGIN(3, 1);
5402 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5403 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5404 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5407 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5408 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5409 IEM_MC_END();
5410 return VINF_SUCCESS;
5411}
5412
5413
5414/** Opcode 0x0f 0xae mem/1. */
5415FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5416{
5417 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5418 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5419 return IEMOP_RAISE_INVALID_OPCODE();
5420
5421 IEM_MC_BEGIN(3, 1);
5422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5423 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5428 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5429 IEM_MC_END();
5430 return VINF_SUCCESS;
5431}
5432
5433
5434/** Opcode 0x0f 0xae mem/2. */
5435FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5436
5437/** Opcode 0x0f 0xae mem/3. */
5438FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5439
5440/** Opcode 0x0f 0xae mem/4. */
5441FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5442
5443/** Opcode 0x0f 0xae mem/5. */
5444FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5445
5446/** Opcode 0x0f 0xae mem/6. */
5447FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5448
5449/** Opcode 0x0f 0xae mem/7. */
5450FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5451
5452
5453/** Opcode 0x0f 0xae 11b/5. */
5454FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5455{
5456 RT_NOREF_PV(bRm);
5457 IEMOP_MNEMONIC(lfence, "lfence");
5458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5459 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5460 return IEMOP_RAISE_INVALID_OPCODE();
5461
5462 IEM_MC_BEGIN(0, 0);
5463 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5464 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5465 else
5466 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5467 IEM_MC_ADVANCE_RIP();
5468 IEM_MC_END();
5469 return VINF_SUCCESS;
5470}
5471
5472
5473/** Opcode 0x0f 0xae 11b/6. */
5474FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5475{
5476 RT_NOREF_PV(bRm);
5477 IEMOP_MNEMONIC(mfence, "mfence");
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5480 return IEMOP_RAISE_INVALID_OPCODE();
5481
5482 IEM_MC_BEGIN(0, 0);
5483 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5484 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5485 else
5486 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5487 IEM_MC_ADVANCE_RIP();
5488 IEM_MC_END();
5489 return VINF_SUCCESS;
5490}
5491
5492
5493/** Opcode 0x0f 0xae 11b/7. */
5494FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5495{
5496 RT_NOREF_PV(bRm);
5497 IEMOP_MNEMONIC(sfence, "sfence");
5498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5499 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5500 return IEMOP_RAISE_INVALID_OPCODE();
5501
5502 IEM_MC_BEGIN(0, 0);
5503 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5504 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5505 else
5506 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5507 IEM_MC_ADVANCE_RIP();
5508 IEM_MC_END();
5509 return VINF_SUCCESS;
5510}
5511
5512
5513/** Opcode 0xf3 0x0f 0xae 11b/0. */
5514FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5515
5516/** Opcode 0xf3 0x0f 0xae 11b/1. */
5517FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5518
5519/** Opcode 0xf3 0x0f 0xae 11b/2. */
5520FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5521
5522/** Opcode 0xf3 0x0f 0xae 11b/3. */
5523FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5524
5525
5526/** Opcode 0x0f 0xae. */
5527FNIEMOP_DEF(iemOp_Grp15)
5528{
5529 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5531 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5532 {
5533 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5534 {
5535 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5536 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5537 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5538 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5539 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5540 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5541 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5542 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5544 }
5545 }
5546 else
5547 {
5548 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5549 {
5550 case 0:
5551 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5552 {
5553 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5554 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5555 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5556 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5557 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5558 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5559 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5560 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5562 }
5563 break;
5564
5565 case IEM_OP_PRF_REPZ:
5566 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5567 {
5568 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5569 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5570 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5571 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5572 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5573 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5574 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5575 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5577 }
5578 break;
5579
5580 default:
5581 return IEMOP_RAISE_INVALID_OPCODE();
5582 }
5583 }
5584}
5585
5586
5587/** Opcode 0x0f 0xaf. */
5588FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5589{
5590 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5591 IEMOP_HLP_MIN_386();
5592 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5593 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5594}
5595
5596
5597/** Opcode 0x0f 0xb0. */
5598FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5599{
5600 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5601 IEMOP_HLP_MIN_486();
5602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5603
5604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5605 {
5606 IEMOP_HLP_DONE_DECODING();
5607 IEM_MC_BEGIN(4, 0);
5608 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5609 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5610 IEM_MC_ARG(uint8_t, u8Src, 2);
5611 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5612
5613 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5614 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5615 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5616 IEM_MC_REF_EFLAGS(pEFlags);
5617 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5618 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5619 else
5620 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5621
5622 IEM_MC_ADVANCE_RIP();
5623 IEM_MC_END();
5624 }
5625 else
5626 {
5627 IEM_MC_BEGIN(4, 3);
5628 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5629 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5630 IEM_MC_ARG(uint8_t, u8Src, 2);
5631 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5633 IEM_MC_LOCAL(uint8_t, u8Al);
5634
5635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5636 IEMOP_HLP_DONE_DECODING();
5637 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5638 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5639 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5640 IEM_MC_FETCH_EFLAGS(EFlags);
5641 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5642 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5643 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5644 else
5645 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5646
5647 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5648 IEM_MC_COMMIT_EFLAGS(EFlags);
5649 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5650 IEM_MC_ADVANCE_RIP();
5651 IEM_MC_END();
5652 }
5653 return VINF_SUCCESS;
5654}
5655
5656/** Opcode 0x0f 0xb1. */
5657FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5658{
5659 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5660 IEMOP_HLP_MIN_486();
5661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5662
5663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5664 {
5665 IEMOP_HLP_DONE_DECODING();
5666 switch (pVCpu->iem.s.enmEffOpSize)
5667 {
5668 case IEMMODE_16BIT:
5669 IEM_MC_BEGIN(4, 0);
5670 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5671 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5672 IEM_MC_ARG(uint16_t, u16Src, 2);
5673 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5674
5675 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5676 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5677 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5678 IEM_MC_REF_EFLAGS(pEFlags);
5679 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5680 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5681 else
5682 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5683
5684 IEM_MC_ADVANCE_RIP();
5685 IEM_MC_END();
5686 return VINF_SUCCESS;
5687
5688 case IEMMODE_32BIT:
5689 IEM_MC_BEGIN(4, 0);
5690 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5691 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5692 IEM_MC_ARG(uint32_t, u32Src, 2);
5693 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5694
5695 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5696 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5697 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5698 IEM_MC_REF_EFLAGS(pEFlags);
5699 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5700 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5701 else
5702 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5703
5704 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5705 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5706 IEM_MC_ADVANCE_RIP();
5707 IEM_MC_END();
5708 return VINF_SUCCESS;
5709
5710 case IEMMODE_64BIT:
5711 IEM_MC_BEGIN(4, 0);
5712 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5713 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5714#ifdef RT_ARCH_X86
5715 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5716#else
5717 IEM_MC_ARG(uint64_t, u64Src, 2);
5718#endif
5719 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5720
5721 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5722 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5723 IEM_MC_REF_EFLAGS(pEFlags);
5724#ifdef RT_ARCH_X86
5725 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5726 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5727 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5728 else
5729 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5730#else
5731 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5732 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5733 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5734 else
5735 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5736#endif
5737
5738 IEM_MC_ADVANCE_RIP();
5739 IEM_MC_END();
5740 return VINF_SUCCESS;
5741
5742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5743 }
5744 }
5745 else
5746 {
5747 switch (pVCpu->iem.s.enmEffOpSize)
5748 {
5749 case IEMMODE_16BIT:
5750 IEM_MC_BEGIN(4, 3);
5751 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5752 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5753 IEM_MC_ARG(uint16_t, u16Src, 2);
5754 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5756 IEM_MC_LOCAL(uint16_t, u16Ax);
5757
5758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5759 IEMOP_HLP_DONE_DECODING();
5760 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5761 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5762 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5763 IEM_MC_FETCH_EFLAGS(EFlags);
5764 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5765 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5766 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5767 else
5768 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5769
5770 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5771 IEM_MC_COMMIT_EFLAGS(EFlags);
5772 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5773 IEM_MC_ADVANCE_RIP();
5774 IEM_MC_END();
5775 return VINF_SUCCESS;
5776
5777 case IEMMODE_32BIT:
5778 IEM_MC_BEGIN(4, 3);
5779 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5780 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5781 IEM_MC_ARG(uint32_t, u32Src, 2);
5782 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5784 IEM_MC_LOCAL(uint32_t, u32Eax);
5785
5786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5787 IEMOP_HLP_DONE_DECODING();
5788 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5789 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5790 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5791 IEM_MC_FETCH_EFLAGS(EFlags);
5792 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5793 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5794 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5795 else
5796 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5797
5798 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5799 IEM_MC_COMMIT_EFLAGS(EFlags);
5800 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5801 IEM_MC_ADVANCE_RIP();
5802 IEM_MC_END();
5803 return VINF_SUCCESS;
5804
5805 case IEMMODE_64BIT:
5806 IEM_MC_BEGIN(4, 3);
5807 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5808 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5809#ifdef RT_ARCH_X86
5810 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5811#else
5812 IEM_MC_ARG(uint64_t, u64Src, 2);
5813#endif
5814 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5816 IEM_MC_LOCAL(uint64_t, u64Rax);
5817
5818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5819 IEMOP_HLP_DONE_DECODING();
5820 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5821 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5822 IEM_MC_FETCH_EFLAGS(EFlags);
5823 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5824#ifdef RT_ARCH_X86
5825 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5826 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5827 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5828 else
5829 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5830#else
5831 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5832 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5834 else
5835 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5836#endif
5837
5838 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5839 IEM_MC_COMMIT_EFLAGS(EFlags);
5840 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5841 IEM_MC_ADVANCE_RIP();
5842 IEM_MC_END();
5843 return VINF_SUCCESS;
5844
5845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5846 }
5847 }
5848}
5849
5850
5851FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5852{
5853 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5854 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5855
5856 switch (pVCpu->iem.s.enmEffOpSize)
5857 {
5858 case IEMMODE_16BIT:
5859 IEM_MC_BEGIN(5, 1);
5860 IEM_MC_ARG(uint16_t, uSel, 0);
5861 IEM_MC_ARG(uint16_t, offSeg, 1);
5862 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5863 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5864 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5865 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5868 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5869 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5870 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5871 IEM_MC_END();
5872 return VINF_SUCCESS;
5873
5874 case IEMMODE_32BIT:
5875 IEM_MC_BEGIN(5, 1);
5876 IEM_MC_ARG(uint16_t, uSel, 0);
5877 IEM_MC_ARG(uint32_t, offSeg, 1);
5878 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5879 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5880 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5881 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5884 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5885 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5886 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5887 IEM_MC_END();
5888 return VINF_SUCCESS;
5889
5890 case IEMMODE_64BIT:
5891 IEM_MC_BEGIN(5, 1);
5892 IEM_MC_ARG(uint16_t, uSel, 0);
5893 IEM_MC_ARG(uint64_t, offSeg, 1);
5894 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5895 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5896 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5897 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5900 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5901 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5902 else
5903 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5904 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5905 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5906 IEM_MC_END();
5907 return VINF_SUCCESS;
5908
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911}
5912
5913
5914/** Opcode 0x0f 0xb2. */
5915FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5916{
5917 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5918 IEMOP_HLP_MIN_386();
5919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5920 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5921 return IEMOP_RAISE_INVALID_OPCODE();
5922 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5923}
5924
5925
5926/** Opcode 0x0f 0xb3. */
5927FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5928{
5929 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5930 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5931}
5932
5933
5934/** Opcode 0x0f 0xb4. */
5935FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5936{
5937 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5938 IEMOP_HLP_MIN_386();
5939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5940 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5941 return IEMOP_RAISE_INVALID_OPCODE();
5942 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5943}
5944
5945
5946/** Opcode 0x0f 0xb5. */
5947FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5948{
5949 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5950 IEMOP_HLP_MIN_386();
5951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5952 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5953 return IEMOP_RAISE_INVALID_OPCODE();
5954 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5955}
5956
5957
5958/** Opcode 0x0f 0xb6. */
5959FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5960{
5961 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5962 IEMOP_HLP_MIN_386();
5963
5964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5965
5966 /*
5967 * If rm is denoting a register, no more instruction bytes.
5968 */
5969 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5970 {
5971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5972 switch (pVCpu->iem.s.enmEffOpSize)
5973 {
5974 case IEMMODE_16BIT:
5975 IEM_MC_BEGIN(0, 1);
5976 IEM_MC_LOCAL(uint16_t, u16Value);
5977 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5978 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5979 IEM_MC_ADVANCE_RIP();
5980 IEM_MC_END();
5981 return VINF_SUCCESS;
5982
5983 case IEMMODE_32BIT:
5984 IEM_MC_BEGIN(0, 1);
5985 IEM_MC_LOCAL(uint32_t, u32Value);
5986 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5987 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5988 IEM_MC_ADVANCE_RIP();
5989 IEM_MC_END();
5990 return VINF_SUCCESS;
5991
5992 case IEMMODE_64BIT:
5993 IEM_MC_BEGIN(0, 1);
5994 IEM_MC_LOCAL(uint64_t, u64Value);
5995 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5996 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5997 IEM_MC_ADVANCE_RIP();
5998 IEM_MC_END();
5999 return VINF_SUCCESS;
6000
6001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6002 }
6003 }
6004 else
6005 {
6006 /*
6007 * We're loading a register from memory.
6008 */
6009 switch (pVCpu->iem.s.enmEffOpSize)
6010 {
6011 case IEMMODE_16BIT:
6012 IEM_MC_BEGIN(0, 2);
6013 IEM_MC_LOCAL(uint16_t, u16Value);
6014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6017 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6018 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6019 IEM_MC_ADVANCE_RIP();
6020 IEM_MC_END();
6021 return VINF_SUCCESS;
6022
6023 case IEMMODE_32BIT:
6024 IEM_MC_BEGIN(0, 2);
6025 IEM_MC_LOCAL(uint32_t, u32Value);
6026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6029 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6030 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6031 IEM_MC_ADVANCE_RIP();
6032 IEM_MC_END();
6033 return VINF_SUCCESS;
6034
6035 case IEMMODE_64BIT:
6036 IEM_MC_BEGIN(0, 2);
6037 IEM_MC_LOCAL(uint64_t, u64Value);
6038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6041 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6042 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6043 IEM_MC_ADVANCE_RIP();
6044 IEM_MC_END();
6045 return VINF_SUCCESS;
6046
6047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6048 }
6049 }
6050}
6051
6052
6053/** Opcode 0x0f 0xb7. */
6054FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6055{
6056 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6057 IEMOP_HLP_MIN_386();
6058
6059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6060
6061 /** @todo Not entirely sure how the operand size prefix is handled here,
6062 * assuming that it will be ignored. Would be nice to have a few
6063 * test for this. */
6064 /*
6065 * If rm is denoting a register, no more instruction bytes.
6066 */
6067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6068 {
6069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6070 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6071 {
6072 IEM_MC_BEGIN(0, 1);
6073 IEM_MC_LOCAL(uint32_t, u32Value);
6074 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6075 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6076 IEM_MC_ADVANCE_RIP();
6077 IEM_MC_END();
6078 }
6079 else
6080 {
6081 IEM_MC_BEGIN(0, 1);
6082 IEM_MC_LOCAL(uint64_t, u64Value);
6083 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6084 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6085 IEM_MC_ADVANCE_RIP();
6086 IEM_MC_END();
6087 }
6088 }
6089 else
6090 {
6091 /*
6092 * We're loading a register from memory.
6093 */
6094 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6095 {
6096 IEM_MC_BEGIN(0, 2);
6097 IEM_MC_LOCAL(uint32_t, u32Value);
6098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6101 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6102 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6103 IEM_MC_ADVANCE_RIP();
6104 IEM_MC_END();
6105 }
6106 else
6107 {
6108 IEM_MC_BEGIN(0, 2);
6109 IEM_MC_LOCAL(uint64_t, u64Value);
6110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6113 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6114 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6115 IEM_MC_ADVANCE_RIP();
6116 IEM_MC_END();
6117 }
6118 }
6119 return VINF_SUCCESS;
6120}
6121
6122
6123/** Opcode 0x0f 0xb8. */
6124FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
6125
6126
6127/** Opcode 0x0f 0xb9. */
6128FNIEMOP_DEF(iemOp_Grp10)
6129{
6130 Log(("iemOp_Grp10 -> #UD\n"));
6131 return IEMOP_RAISE_INVALID_OPCODE();
6132}
6133
6134
6135/** Opcode 0x0f 0xba. */
6136FNIEMOP_DEF(iemOp_Grp8)
6137{
6138 IEMOP_HLP_MIN_386();
6139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6140 PCIEMOPBINSIZES pImpl;
6141 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6142 {
6143 case 0: case 1: case 2: case 3:
6144 return IEMOP_RAISE_INVALID_OPCODE();
6145 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6146 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6147 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6148 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6150 }
6151 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6152
6153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6154 {
6155 /* register destination. */
6156 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6158
6159 switch (pVCpu->iem.s.enmEffOpSize)
6160 {
6161 case IEMMODE_16BIT:
6162 IEM_MC_BEGIN(3, 0);
6163 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6164 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6165 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6166
6167 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6168 IEM_MC_REF_EFLAGS(pEFlags);
6169 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6170
6171 IEM_MC_ADVANCE_RIP();
6172 IEM_MC_END();
6173 return VINF_SUCCESS;
6174
6175 case IEMMODE_32BIT:
6176 IEM_MC_BEGIN(3, 0);
6177 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6178 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6179 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6180
6181 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6182 IEM_MC_REF_EFLAGS(pEFlags);
6183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6184
6185 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6186 IEM_MC_ADVANCE_RIP();
6187 IEM_MC_END();
6188 return VINF_SUCCESS;
6189
6190 case IEMMODE_64BIT:
6191 IEM_MC_BEGIN(3, 0);
6192 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6193 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6194 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6195
6196 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6197 IEM_MC_REF_EFLAGS(pEFlags);
6198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6199
6200 IEM_MC_ADVANCE_RIP();
6201 IEM_MC_END();
6202 return VINF_SUCCESS;
6203
6204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6205 }
6206 }
6207 else
6208 {
6209 /* memory destination. */
6210
6211 uint32_t fAccess;
6212 if (pImpl->pfnLockedU16)
6213 fAccess = IEM_ACCESS_DATA_RW;
6214 else /* BT */
6215 fAccess = IEM_ACCESS_DATA_R;
6216
6217 /** @todo test negative bit offsets! */
6218 switch (pVCpu->iem.s.enmEffOpSize)
6219 {
6220 case IEMMODE_16BIT:
6221 IEM_MC_BEGIN(3, 1);
6222 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6223 IEM_MC_ARG(uint16_t, u16Src, 1);
6224 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6226
6227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6228 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6229 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6230 if (pImpl->pfnLockedU16)
6231 IEMOP_HLP_DONE_DECODING();
6232 else
6233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6234 IEM_MC_FETCH_EFLAGS(EFlags);
6235 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6236 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6237 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6238 else
6239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6240 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6241
6242 IEM_MC_COMMIT_EFLAGS(EFlags);
6243 IEM_MC_ADVANCE_RIP();
6244 IEM_MC_END();
6245 return VINF_SUCCESS;
6246
6247 case IEMMODE_32BIT:
6248 IEM_MC_BEGIN(3, 1);
6249 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6250 IEM_MC_ARG(uint32_t, u32Src, 1);
6251 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6253
6254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6255 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6256 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6257 if (pImpl->pfnLockedU16)
6258 IEMOP_HLP_DONE_DECODING();
6259 else
6260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6261 IEM_MC_FETCH_EFLAGS(EFlags);
6262 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6265 else
6266 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6267 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6268
6269 IEM_MC_COMMIT_EFLAGS(EFlags);
6270 IEM_MC_ADVANCE_RIP();
6271 IEM_MC_END();
6272 return VINF_SUCCESS;
6273
6274 case IEMMODE_64BIT:
6275 IEM_MC_BEGIN(3, 1);
6276 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6277 IEM_MC_ARG(uint64_t, u64Src, 1);
6278 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6280
6281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6282 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6283 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6284 if (pImpl->pfnLockedU16)
6285 IEMOP_HLP_DONE_DECODING();
6286 else
6287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6288 IEM_MC_FETCH_EFLAGS(EFlags);
6289 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6290 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6291 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6292 else
6293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6294 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6295
6296 IEM_MC_COMMIT_EFLAGS(EFlags);
6297 IEM_MC_ADVANCE_RIP();
6298 IEM_MC_END();
6299 return VINF_SUCCESS;
6300
6301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6302 }
6303 }
6304
6305}
6306
6307
6308/** Opcode 0x0f 0xbb. */
6309FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6310{
6311 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6312 IEMOP_HLP_MIN_386();
6313 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6314}
6315
6316
6317/** Opcode 0x0f 0xbc. */
6318FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6319{
6320 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6321 IEMOP_HLP_MIN_386();
6322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6323 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6324}
6325
6326
6327/** Opcode 0x0f 0xbd. */
6328FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6329{
6330 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6331 IEMOP_HLP_MIN_386();
6332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6333 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6334}
6335
6336
6337/** Opcode 0x0f 0xbe. */
6338FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6339{
6340 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6341 IEMOP_HLP_MIN_386();
6342
6343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6344
6345 /*
6346 * If rm is denoting a register, no more instruction bytes.
6347 */
6348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6349 {
6350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6351 switch (pVCpu->iem.s.enmEffOpSize)
6352 {
6353 case IEMMODE_16BIT:
6354 IEM_MC_BEGIN(0, 1);
6355 IEM_MC_LOCAL(uint16_t, u16Value);
6356 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6357 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6358 IEM_MC_ADVANCE_RIP();
6359 IEM_MC_END();
6360 return VINF_SUCCESS;
6361
6362 case IEMMODE_32BIT:
6363 IEM_MC_BEGIN(0, 1);
6364 IEM_MC_LOCAL(uint32_t, u32Value);
6365 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6366 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6367 IEM_MC_ADVANCE_RIP();
6368 IEM_MC_END();
6369 return VINF_SUCCESS;
6370
6371 case IEMMODE_64BIT:
6372 IEM_MC_BEGIN(0, 1);
6373 IEM_MC_LOCAL(uint64_t, u64Value);
6374 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6375 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6376 IEM_MC_ADVANCE_RIP();
6377 IEM_MC_END();
6378 return VINF_SUCCESS;
6379
6380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6381 }
6382 }
6383 else
6384 {
6385 /*
6386 * We're loading a register from memory.
6387 */
6388 switch (pVCpu->iem.s.enmEffOpSize)
6389 {
6390 case IEMMODE_16BIT:
6391 IEM_MC_BEGIN(0, 2);
6392 IEM_MC_LOCAL(uint16_t, u16Value);
6393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6396 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6397 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6398 IEM_MC_ADVANCE_RIP();
6399 IEM_MC_END();
6400 return VINF_SUCCESS;
6401
6402 case IEMMODE_32BIT:
6403 IEM_MC_BEGIN(0, 2);
6404 IEM_MC_LOCAL(uint32_t, u32Value);
6405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6408 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6409 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6410 IEM_MC_ADVANCE_RIP();
6411 IEM_MC_END();
6412 return VINF_SUCCESS;
6413
6414 case IEMMODE_64BIT:
6415 IEM_MC_BEGIN(0, 2);
6416 IEM_MC_LOCAL(uint64_t, u64Value);
6417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6420 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6421 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6422 IEM_MC_ADVANCE_RIP();
6423 IEM_MC_END();
6424 return VINF_SUCCESS;
6425
6426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6427 }
6428 }
6429}
6430
6431
6432/** Opcode 0x0f 0xbf. */
6433FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6434{
6435 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6436 IEMOP_HLP_MIN_386();
6437
6438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6439
6440 /** @todo Not entirely sure how the operand size prefix is handled here,
6441 * assuming that it will be ignored. Would be nice to have a few
6442 * test for this. */
6443 /*
6444 * If rm is denoting a register, no more instruction bytes.
6445 */
6446 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6447 {
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6450 {
6451 IEM_MC_BEGIN(0, 1);
6452 IEM_MC_LOCAL(uint32_t, u32Value);
6453 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6454 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6455 IEM_MC_ADVANCE_RIP();
6456 IEM_MC_END();
6457 }
6458 else
6459 {
6460 IEM_MC_BEGIN(0, 1);
6461 IEM_MC_LOCAL(uint64_t, u64Value);
6462 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6463 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6464 IEM_MC_ADVANCE_RIP();
6465 IEM_MC_END();
6466 }
6467 }
6468 else
6469 {
6470 /*
6471 * We're loading a register from memory.
6472 */
6473 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6474 {
6475 IEM_MC_BEGIN(0, 2);
6476 IEM_MC_LOCAL(uint32_t, u32Value);
6477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6481 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6482 IEM_MC_ADVANCE_RIP();
6483 IEM_MC_END();
6484 }
6485 else
6486 {
6487 IEM_MC_BEGIN(0, 2);
6488 IEM_MC_LOCAL(uint64_t, u64Value);
6489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6492 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6493 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6494 IEM_MC_ADVANCE_RIP();
6495 IEM_MC_END();
6496 }
6497 }
6498 return VINF_SUCCESS;
6499}
6500
6501
6502/** Opcode 0x0f 0xc0. */
6503FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6504{
6505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6506 IEMOP_HLP_MIN_486();
6507 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6508
6509 /*
6510 * If rm is denoting a register, no more instruction bytes.
6511 */
6512 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6513 {
6514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6515
6516 IEM_MC_BEGIN(3, 0);
6517 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6518 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6519 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6520
6521 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6522 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6523 IEM_MC_REF_EFLAGS(pEFlags);
6524 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6525
6526 IEM_MC_ADVANCE_RIP();
6527 IEM_MC_END();
6528 }
6529 else
6530 {
6531 /*
6532 * We're accessing memory.
6533 */
6534 IEM_MC_BEGIN(3, 3);
6535 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6536 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6537 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6538 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6540
6541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6542 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6543 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6544 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6545 IEM_MC_FETCH_EFLAGS(EFlags);
6546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6547 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6548 else
6549 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6550
6551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6552 IEM_MC_COMMIT_EFLAGS(EFlags);
6553 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6554 IEM_MC_ADVANCE_RIP();
6555 IEM_MC_END();
6556 return VINF_SUCCESS;
6557 }
6558 return VINF_SUCCESS;
6559}
6560
6561
6562/** Opcode 0x0f 0xc1. */
6563FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6564{
6565 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6566 IEMOP_HLP_MIN_486();
6567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6568
6569 /*
6570 * If rm is denoting a register, no more instruction bytes.
6571 */
6572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6573 {
6574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6575
6576 switch (pVCpu->iem.s.enmEffOpSize)
6577 {
6578 case IEMMODE_16BIT:
6579 IEM_MC_BEGIN(3, 0);
6580 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6581 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6582 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6583
6584 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6585 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6586 IEM_MC_REF_EFLAGS(pEFlags);
6587 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6588
6589 IEM_MC_ADVANCE_RIP();
6590 IEM_MC_END();
6591 return VINF_SUCCESS;
6592
6593 case IEMMODE_32BIT:
6594 IEM_MC_BEGIN(3, 0);
6595 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6596 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6597 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6598
6599 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6600 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6601 IEM_MC_REF_EFLAGS(pEFlags);
6602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6603
6604 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6605 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6606 IEM_MC_ADVANCE_RIP();
6607 IEM_MC_END();
6608 return VINF_SUCCESS;
6609
6610 case IEMMODE_64BIT:
6611 IEM_MC_BEGIN(3, 0);
6612 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6613 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6614 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6615
6616 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6617 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6618 IEM_MC_REF_EFLAGS(pEFlags);
6619 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6620
6621 IEM_MC_ADVANCE_RIP();
6622 IEM_MC_END();
6623 return VINF_SUCCESS;
6624
6625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6626 }
6627 }
6628 else
6629 {
6630 /*
6631 * We're accessing memory.
6632 */
6633 switch (pVCpu->iem.s.enmEffOpSize)
6634 {
6635 case IEMMODE_16BIT:
6636 IEM_MC_BEGIN(3, 3);
6637 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6638 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6639 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6640 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6642
6643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6644 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6645 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6646 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6647 IEM_MC_FETCH_EFLAGS(EFlags);
6648 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6649 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6650 else
6651 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6652
6653 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6654 IEM_MC_COMMIT_EFLAGS(EFlags);
6655 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6656 IEM_MC_ADVANCE_RIP();
6657 IEM_MC_END();
6658 return VINF_SUCCESS;
6659
6660 case IEMMODE_32BIT:
6661 IEM_MC_BEGIN(3, 3);
6662 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6663 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6664 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6665 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6667
6668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6669 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6670 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6671 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6672 IEM_MC_FETCH_EFLAGS(EFlags);
6673 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6674 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6675 else
6676 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6677
6678 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6679 IEM_MC_COMMIT_EFLAGS(EFlags);
6680 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6681 IEM_MC_ADVANCE_RIP();
6682 IEM_MC_END();
6683 return VINF_SUCCESS;
6684
6685 case IEMMODE_64BIT:
6686 IEM_MC_BEGIN(3, 3);
6687 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6688 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6689 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6690 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6692
6693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6694 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6695 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6696 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6697 IEM_MC_FETCH_EFLAGS(EFlags);
6698 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6699 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6700 else
6701 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6702
6703 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6704 IEM_MC_COMMIT_EFLAGS(EFlags);
6705 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6706 IEM_MC_ADVANCE_RIP();
6707 IEM_MC_END();
6708 return VINF_SUCCESS;
6709
6710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6711 }
6712 }
6713}
6714
6715/** Opcode 0x0f 0xc2. */
6716FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6717
6718
6719/** Opcode 0x0f 0xc3. */
6720FNIEMOP_DEF(iemOp_movnti_My_Gy)
6721{
6722 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6723
6724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6725
6726 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6727 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6728 {
6729 switch (pVCpu->iem.s.enmEffOpSize)
6730 {
6731 case IEMMODE_32BIT:
6732 IEM_MC_BEGIN(0, 2);
6733 IEM_MC_LOCAL(uint32_t, u32Value);
6734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6735
6736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6738 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6739 return IEMOP_RAISE_INVALID_OPCODE();
6740
6741 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6742 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6743 IEM_MC_ADVANCE_RIP();
6744 IEM_MC_END();
6745 break;
6746
6747 case IEMMODE_64BIT:
6748 IEM_MC_BEGIN(0, 2);
6749 IEM_MC_LOCAL(uint64_t, u64Value);
6750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6751
6752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6754 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6755 return IEMOP_RAISE_INVALID_OPCODE();
6756
6757 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6758 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6759 IEM_MC_ADVANCE_RIP();
6760 IEM_MC_END();
6761 break;
6762
6763 case IEMMODE_16BIT:
6764 /** @todo check this form. */
6765 return IEMOP_RAISE_INVALID_OPCODE();
6766 }
6767 }
6768 else
6769 return IEMOP_RAISE_INVALID_OPCODE();
6770 return VINF_SUCCESS;
6771}
6772
6773
6774/** Opcode 0x0f 0xc4. */
6775FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6776
6777/** Opcode 0x0f 0xc5. */
6778FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6779
6780/** Opcode 0x0f 0xc6. */
6781FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6782
6783
6784/** Opcode 0x0f 0xc7 !11/1. */
6785FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6786{
6787 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6788
6789 IEM_MC_BEGIN(4, 3);
6790 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6791 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6792 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6793 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6794 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6795 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6797
6798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6799 IEMOP_HLP_DONE_DECODING();
6800 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6801
6802 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6803 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6804 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6805
6806 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6807 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6808 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6809
6810 IEM_MC_FETCH_EFLAGS(EFlags);
6811 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6812 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6813 else
6814 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6815
6816 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6817 IEM_MC_COMMIT_EFLAGS(EFlags);
6818 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6819 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6820 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6821 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6822 IEM_MC_ENDIF();
6823 IEM_MC_ADVANCE_RIP();
6824
6825 IEM_MC_END();
6826 return VINF_SUCCESS;
6827}
6828
6829
6830/** Opcode REX.W 0x0f 0xc7 !11/1. */
6831FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6832
6833/** Opcode 0x0f 0xc7 11/6. */
6834FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6835
6836/** Opcode 0x0f 0xc7 !11/6. */
6837FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6838
6839/** Opcode 0x66 0x0f 0xc7 !11/6. */
6840FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6841
6842/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6843FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6844
6845/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6846FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6847
6848
6849/** Opcode 0x0f 0xc7. */
6850FNIEMOP_DEF(iemOp_Grp9)
6851{
6852 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6854 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6855 {
6856 case 0: case 2: case 3: case 4: case 5:
6857 return IEMOP_RAISE_INVALID_OPCODE();
6858 case 1:
6859 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6860 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6861 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6862 return IEMOP_RAISE_INVALID_OPCODE();
6863 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6864 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6865 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6866 case 6:
6867 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6868 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6869 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6870 {
6871 case 0:
6872 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6873 case IEM_OP_PRF_SIZE_OP:
6874 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6875 case IEM_OP_PRF_REPZ:
6876 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6877 default:
6878 return IEMOP_RAISE_INVALID_OPCODE();
6879 }
6880 case 7:
6881 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6882 {
6883 case 0:
6884 case IEM_OP_PRF_REPZ:
6885 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6886 default:
6887 return IEMOP_RAISE_INVALID_OPCODE();
6888 }
6889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6890 }
6891}
6892
6893
6894/**
6895 * Common 'bswap register' helper.
6896 */
6897FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6898{
6899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6900 switch (pVCpu->iem.s.enmEffOpSize)
6901 {
6902 case IEMMODE_16BIT:
6903 IEM_MC_BEGIN(1, 0);
6904 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6905 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6906 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6907 IEM_MC_ADVANCE_RIP();
6908 IEM_MC_END();
6909 return VINF_SUCCESS;
6910
6911 case IEMMODE_32BIT:
6912 IEM_MC_BEGIN(1, 0);
6913 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6914 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6915 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6916 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6917 IEM_MC_ADVANCE_RIP();
6918 IEM_MC_END();
6919 return VINF_SUCCESS;
6920
6921 case IEMMODE_64BIT:
6922 IEM_MC_BEGIN(1, 0);
6923 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6924 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6925 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6926 IEM_MC_ADVANCE_RIP();
6927 IEM_MC_END();
6928 return VINF_SUCCESS;
6929
6930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6931 }
6932}
6933
6934
6935/** Opcode 0x0f 0xc8. */
6936FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6937{
6938 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
6939 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6940 prefix. REX.B is the correct prefix it appears. For a parallel
6941 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6942 IEMOP_HLP_MIN_486();
6943 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6944}
6945
6946
6947/** Opcode 0x0f 0xc9. */
6948FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6949{
6950 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
6951 IEMOP_HLP_MIN_486();
6952 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6953}
6954
6955
6956/** Opcode 0x0f 0xca. */
6957FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6958{
6959 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
6960 IEMOP_HLP_MIN_486();
6961 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6962}
6963
6964
6965/** Opcode 0x0f 0xcb. */
6966FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6967{
6968 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
6969 IEMOP_HLP_MIN_486();
6970 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6971}
6972
6973
6974/** Opcode 0x0f 0xcc. */
6975FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6976{
6977 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
6978 IEMOP_HLP_MIN_486();
6979 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6980}
6981
6982
6983/** Opcode 0x0f 0xcd. */
6984FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6985{
6986 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
6987 IEMOP_HLP_MIN_486();
6988 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6989}
6990
6991
6992/** Opcode 0x0f 0xce. */
6993FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6994{
6995 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
6996 IEMOP_HLP_MIN_486();
6997 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6998}
6999
7000
7001/** Opcode 0x0f 0xcf. */
7002FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7003{
7004 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7005 IEMOP_HLP_MIN_486();
7006 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7007}
7008
7009
7010
7011/** Opcode 0x0f 0xd0. */
7012FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
7013/** Opcode 0x0f 0xd1. */
7014FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
7015/** Opcode 0x0f 0xd2. */
7016FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
7017/** Opcode 0x0f 0xd3. */
7018FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
7019/** Opcode 0x0f 0xd4. */
7020FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
7021/** Opcode 0x0f 0xd5. */
7022FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
7023/** Opcode 0x0f 0xd6. */
7024FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq); /** @todo Win10 w/o np may need this: 66 0f d6 0a */
7025
7026
7027/** Opcode 0x0f 0xd7. */
7028FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7029{
7030 /* Docs says register only. */
7031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7032 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7033 return IEMOP_RAISE_INVALID_OPCODE();
7034
7035 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7036 /** @todo testcase: Check that the instruction implicitly clears the high
7037 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7038 * and opcode modifications are made to work with the whole width (not
7039 * just 128). */
7040 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7041 {
7042 case IEM_OP_PRF_SIZE_OP: /* SSE */
7043 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7044 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7045 IEM_MC_BEGIN(2, 0);
7046 IEM_MC_ARG(uint64_t *, pDst, 0);
7047 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7048 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7049 IEM_MC_PREPARE_SSE_USAGE();
7050 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7051 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7052 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7053 IEM_MC_ADVANCE_RIP();
7054 IEM_MC_END();
7055 return VINF_SUCCESS;
7056
7057 case 0: /* MMX */
7058 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7059 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7060 IEM_MC_BEGIN(2, 0);
7061 IEM_MC_ARG(uint64_t *, pDst, 0);
7062 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7063 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7064 IEM_MC_PREPARE_FPU_USAGE();
7065 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7066 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7067 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7068 IEM_MC_ADVANCE_RIP();
7069 IEM_MC_END();
7070 return VINF_SUCCESS;
7071
7072 default:
7073 return IEMOP_RAISE_INVALID_OPCODE();
7074 }
7075}
7076
7077
7078/** Opcode 0x0f 0xd8. */
7079FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
7080/** Opcode 0x0f 0xd9. */
7081FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
7082/** Opcode 0x0f 0xda. */
7083FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
7084/** Opcode 0x0f 0xdb. */
7085FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
7086/** Opcode 0x0f 0xdc. */
7087FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
7088/** Opcode 0x0f 0xdd. */
7089FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
7090/** Opcode 0x0f 0xde. */
7091FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
7092/** Opcode 0x0f 0xdf. */
7093FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
7094/** Opcode 0x0f 0xe0. */
7095FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
7096/** Opcode 0x0f 0xe1. */
7097FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
7098/** Opcode 0x0f 0xe2. */
7099FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
7100/** Opcode 0x0f 0xe3. */
7101FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
7102/** Opcode 0x0f 0xe4. */
7103FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
7104/** Opcode 0x0f 0xe5. */
7105FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
7106/** Opcode 0x0f 0xe6. */
7107FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
7108
7109
7110/** Opcode 0x0f 0xe7. */
7111FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7112{
7113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7114 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7115 {
7116 /*
7117 * Register, memory.
7118 */
7119/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7120 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7121 {
7122
7123 case IEM_OP_PRF_SIZE_OP: /* SSE */
7124 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7125 IEM_MC_BEGIN(0, 2);
7126 IEM_MC_LOCAL(uint128_t, uSrc);
7127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7128
7129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7131 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7132 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7133
7134 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7135 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7136
7137 IEM_MC_ADVANCE_RIP();
7138 IEM_MC_END();
7139 break;
7140
7141 case 0: /* MMX */
7142 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7143 IEM_MC_BEGIN(0, 2);
7144 IEM_MC_LOCAL(uint64_t, uSrc);
7145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7146
7147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7149 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7150 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7151
7152 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7153 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7154
7155 IEM_MC_ADVANCE_RIP();
7156 IEM_MC_END();
7157 break;
7158
7159 default:
7160 return IEMOP_RAISE_INVALID_OPCODE();
7161 }
7162 }
7163 /* The register, register encoding is invalid. */
7164 else
7165 return IEMOP_RAISE_INVALID_OPCODE();
7166 return VINF_SUCCESS;
7167}
7168
7169
7170/** Opcode 0x0f 0xe8. */
7171FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
7172/** Opcode 0x0f 0xe9. */
7173FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
7174/** Opcode 0x0f 0xea. */
7175FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
7176/** Opcode 0x0f 0xeb. */
7177FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
7178/** Opcode 0x0f 0xec. */
7179FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
7180/** Opcode 0x0f 0xed. */
7181FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
7182/** Opcode 0x0f 0xee. */
7183FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
7184
7185
7186/** Opcode 0x0f 0xef. */
7187FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7188{
7189 IEMOP_MNEMONIC(pxor, "pxor");
7190 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7191}
7192
7193
7194/** Opcode 0x0f 0xf0. */
7195FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7196/** Opcode 0x0f 0xf1. */
7197FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7198/** Opcode 0x0f 0xf2. */
7199FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7200/** Opcode 0x0f 0xf3. */
7201FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7202/** Opcode 0x0f 0xf4. */
7203FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7204/** Opcode 0x0f 0xf5. */
7205FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7206/** Opcode 0x0f 0xf6. */
7207FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7208/** Opcode 0x0f 0xf7. */
7209FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7210/** Opcode 0x0f 0xf8. */
7211FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7212/** Opcode 0x0f 0xf9. */
7213FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7214/** Opcode 0x0f 0xfa. */
7215FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7216/** Opcode 0x0f 0xfb. */
7217FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7218/** Opcode 0x0f 0xfc. */
7219FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7220/** Opcode 0x0f 0xfd. */
7221FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7222/** Opcode 0x0f 0xfe. */
7223FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7224
7225
7226IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[256] =
7227{
7228 /* 0x00 */ iemOp_Grp6,
7229 /* 0x01 */ iemOp_Grp7,
7230 /* 0x02 */ iemOp_lar_Gv_Ew,
7231 /* 0x03 */ iemOp_lsl_Gv_Ew,
7232 /* 0x04 */ iemOp_Invalid,
7233 /* 0x05 */ iemOp_syscall,
7234 /* 0x06 */ iemOp_clts,
7235 /* 0x07 */ iemOp_sysret,
7236 /* 0x08 */ iemOp_invd,
7237 /* 0x09 */ iemOp_wbinvd,
7238 /* 0x0a */ iemOp_Invalid,
7239 /* 0x0b */ iemOp_ud2,
7240 /* 0x0c */ iemOp_Invalid,
7241 /* 0x0d */ iemOp_nop_Ev_GrpP,
7242 /* 0x0e */ iemOp_femms,
7243 /* 0x0f */ iemOp_3Dnow,
7244 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7245 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7246 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7247 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7248 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7249 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7250 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7251 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7252 /* 0x18 */ iemOp_prefetch_Grp16,
7253 /* 0x19 */ iemOp_nop_Ev,
7254 /* 0x1a */ iemOp_nop_Ev,
7255 /* 0x1b */ iemOp_nop_Ev,
7256 /* 0x1c */ iemOp_nop_Ev,
7257 /* 0x1d */ iemOp_nop_Ev,
7258 /* 0x1e */ iemOp_nop_Ev,
7259 /* 0x1f */ iemOp_nop_Ev,
7260 /* 0x20 */ iemOp_mov_Rd_Cd,
7261 /* 0x21 */ iemOp_mov_Rd_Dd,
7262 /* 0x22 */ iemOp_mov_Cd_Rd,
7263 /* 0x23 */ iemOp_mov_Dd_Rd,
7264 /* 0x24 */ iemOp_mov_Rd_Td,
7265 /* 0x25 */ iemOp_Invalid,
7266 /* 0x26 */ iemOp_mov_Td_Rd,
7267 /* 0x27 */ iemOp_Invalid,
7268 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7269 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7270 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7271 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7272 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7273 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7274 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7275 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7276 /* 0x30 */ iemOp_wrmsr,
7277 /* 0x31 */ iemOp_rdtsc,
7278 /* 0x32 */ iemOp_rdmsr,
7279 /* 0x33 */ iemOp_rdpmc,
7280 /* 0x34 */ iemOp_sysenter,
7281 /* 0x35 */ iemOp_sysexit,
7282 /* 0x36 */ iemOp_Invalid,
7283 /* 0x37 */ iemOp_getsec,
7284 /* 0x38 */ iemOp_3byte_Esc_A4,
7285 /* 0x39 */ iemOp_Invalid,
7286 /* 0x3a */ iemOp_3byte_Esc_A5,
7287 /* 0x3b */ iemOp_Invalid,
7288 /* 0x3c */ iemOp_Invalid,
7289 /* 0x3d */ iemOp_Invalid,
7290 /* 0x3e */ iemOp_Invalid,
7291 /* 0x3f */ iemOp_Invalid,
7292 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7293 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7294 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7295 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7296 /* 0x44 */ iemOp_cmove_Gv_Ev,
7297 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7298 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7299 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7300 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7301 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7302 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7303 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7304 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7305 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7306 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7307 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7308 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7309 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7310 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7311 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7312 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7313 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7314 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7315 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7316 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7317 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7318 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7319 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7320 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7321 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7322 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7323 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7324 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7325 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7326 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7327 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7328 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7329 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7330 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7331 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7332 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7333 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7334 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7335 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7336 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7337 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7338 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7339 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7340 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7341 /* 0x71 */ iemOp_Grp12,
7342 /* 0x72 */ iemOp_Grp13,
7343 /* 0x73 */ iemOp_Grp14,
7344 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7345 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7346 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7347 /* 0x77 */ iemOp_emms,
7348 /* 0x78 */ iemOp_vmread_AmdGrp17,
7349 /* 0x79 */ iemOp_vmwrite,
7350 /* 0x7a */ iemOp_Invalid,
7351 /* 0x7b */ iemOp_Invalid,
7352 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7353 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7354 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7355 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7356 /* 0x80 */ iemOp_jo_Jv,
7357 /* 0x81 */ iemOp_jno_Jv,
7358 /* 0x82 */ iemOp_jc_Jv,
7359 /* 0x83 */ iemOp_jnc_Jv,
7360 /* 0x84 */ iemOp_je_Jv,
7361 /* 0x85 */ iemOp_jne_Jv,
7362 /* 0x86 */ iemOp_jbe_Jv,
7363 /* 0x87 */ iemOp_jnbe_Jv,
7364 /* 0x88 */ iemOp_js_Jv,
7365 /* 0x89 */ iemOp_jns_Jv,
7366 /* 0x8a */ iemOp_jp_Jv,
7367 /* 0x8b */ iemOp_jnp_Jv,
7368 /* 0x8c */ iemOp_jl_Jv,
7369 /* 0x8d */ iemOp_jnl_Jv,
7370 /* 0x8e */ iemOp_jle_Jv,
7371 /* 0x8f */ iemOp_jnle_Jv,
7372 /* 0x90 */ iemOp_seto_Eb,
7373 /* 0x91 */ iemOp_setno_Eb,
7374 /* 0x92 */ iemOp_setc_Eb,
7375 /* 0x93 */ iemOp_setnc_Eb,
7376 /* 0x94 */ iemOp_sete_Eb,
7377 /* 0x95 */ iemOp_setne_Eb,
7378 /* 0x96 */ iemOp_setbe_Eb,
7379 /* 0x97 */ iemOp_setnbe_Eb,
7380 /* 0x98 */ iemOp_sets_Eb,
7381 /* 0x99 */ iemOp_setns_Eb,
7382 /* 0x9a */ iemOp_setp_Eb,
7383 /* 0x9b */ iemOp_setnp_Eb,
7384 /* 0x9c */ iemOp_setl_Eb,
7385 /* 0x9d */ iemOp_setnl_Eb,
7386 /* 0x9e */ iemOp_setle_Eb,
7387 /* 0x9f */ iemOp_setnle_Eb,
7388 /* 0xa0 */ iemOp_push_fs,
7389 /* 0xa1 */ iemOp_pop_fs,
7390 /* 0xa2 */ iemOp_cpuid,
7391 /* 0xa3 */ iemOp_bt_Ev_Gv,
7392 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7393 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7394 /* 0xa6 */ iemOp_Invalid,
7395 /* 0xa7 */ iemOp_Invalid,
7396 /* 0xa8 */ iemOp_push_gs,
7397 /* 0xa9 */ iemOp_pop_gs,
7398 /* 0xaa */ iemOp_rsm,
7399 /* 0xab */ iemOp_bts_Ev_Gv,
7400 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7401 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7402 /* 0xae */ iemOp_Grp15,
7403 /* 0xaf */ iemOp_imul_Gv_Ev,
7404 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7405 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7406 /* 0xb2 */ iemOp_lss_Gv_Mp,
7407 /* 0xb3 */ iemOp_btr_Ev_Gv,
7408 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7409 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7410 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7411 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7412 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7413 /* 0xb9 */ iemOp_Grp10,
7414 /* 0xba */ iemOp_Grp8,
7415 /* 0xbd */ iemOp_btc_Ev_Gv,
7416 /* 0xbc */ iemOp_bsf_Gv_Ev,
7417 /* 0xbd */ iemOp_bsr_Gv_Ev,
7418 /* 0xbe */ iemOp_movsx_Gv_Eb,
7419 /* 0xbf */ iemOp_movsx_Gv_Ew,
7420 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7421 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7422 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7423 /* 0xc3 */ iemOp_movnti_My_Gy,
7424 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7425 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7426 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7427 /* 0xc7 */ iemOp_Grp9,
7428 /* 0xc8 */ iemOp_bswap_rAX_r8,
7429 /* 0xc9 */ iemOp_bswap_rCX_r9,
7430 /* 0xca */ iemOp_bswap_rDX_r10,
7431 /* 0xcb */ iemOp_bswap_rBX_r11,
7432 /* 0xcc */ iemOp_bswap_rSP_r12,
7433 /* 0xcd */ iemOp_bswap_rBP_r13,
7434 /* 0xce */ iemOp_bswap_rSI_r14,
7435 /* 0xcf */ iemOp_bswap_rDI_r15,
7436 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7437 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7438 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7439 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7440 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7441 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7442 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7443 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7444 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7445 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7446 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7447 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7448 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7449 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7450 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7451 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7452 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7453 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7454 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7455 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7456 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7457 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7458 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7459 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7460 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7461 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7462 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7463 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7464 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7465 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7466 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7467 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7468 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7469 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7470 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7471 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7472 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7473 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7474 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7475 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7476 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7477 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7478 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7479 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7480 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7481 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7482 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7483 /* 0xff */ iemOp_Invalid
7484};
7485
7486/** @} */
7487
7488
7489/** @name One byte opcodes.
7490 *
7491 * @{
7492 */
7493
7494/** Opcode 0x00. */
7495FNIEMOP_DEF(iemOp_add_Eb_Gb)
7496{
7497 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
7498 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7499}
7500
7501
7502/** Opcode 0x01. */
7503FNIEMOP_DEF(iemOp_add_Ev_Gv)
7504{
7505 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
7506 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7507}
7508
7509
7510/** Opcode 0x02. */
7511FNIEMOP_DEF(iemOp_add_Gb_Eb)
7512{
7513 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
7514 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7515}
7516
7517
7518/** Opcode 0x03. */
7519FNIEMOP_DEF(iemOp_add_Gv_Ev)
7520{
7521 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
7522 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7523}
7524
7525
7526/** Opcode 0x04. */
7527FNIEMOP_DEF(iemOp_add_Al_Ib)
7528{
7529 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
7530 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7531}
7532
7533
7534/** Opcode 0x05. */
7535FNIEMOP_DEF(iemOp_add_eAX_Iz)
7536{
7537 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
7538 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7539}
7540
7541
7542/** Opcode 0x06. */
7543FNIEMOP_DEF(iemOp_push_ES)
7544{
7545 IEMOP_MNEMONIC(push_es, "push es");
7546 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7547}
7548
7549
7550/** Opcode 0x07. */
7551FNIEMOP_DEF(iemOp_pop_ES)
7552{
7553 IEMOP_MNEMONIC(pop_es, "pop es");
7554 IEMOP_HLP_NO_64BIT();
7555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7556 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
7557}
7558
7559
7560/** Opcode 0x08. */
7561FNIEMOP_DEF(iemOp_or_Eb_Gb)
7562{
7563 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
7564 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7565 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7566}
7567
7568
7569/** Opcode 0x09. */
7570FNIEMOP_DEF(iemOp_or_Ev_Gv)
7571{
7572 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
7573 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7574 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7575}
7576
7577
7578/** Opcode 0x0a. */
7579FNIEMOP_DEF(iemOp_or_Gb_Eb)
7580{
7581 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
7582 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7583 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7584}
7585
7586
7587/** Opcode 0x0b. */
7588FNIEMOP_DEF(iemOp_or_Gv_Ev)
7589{
7590 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
7591 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7592 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7593}
7594
7595
7596/** Opcode 0x0c. */
7597FNIEMOP_DEF(iemOp_or_Al_Ib)
7598{
7599 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
7600 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7601 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7602}
7603
7604
7605/** Opcode 0x0d. */
7606FNIEMOP_DEF(iemOp_or_eAX_Iz)
7607{
7608 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
7609 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7610 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7611}
7612
7613
7614/** Opcode 0x0e. */
7615FNIEMOP_DEF(iemOp_push_CS)
7616{
7617 IEMOP_MNEMONIC(push_cs, "push cs");
7618 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7619}
7620
7621
7622/** Opcode 0x0f. */
7623FNIEMOP_DEF(iemOp_2byteEscape)
7624{
7625 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7626 /** @todo PUSH CS on 8086, undefined on 80186. */
7627 IEMOP_HLP_MIN_286();
7628 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7629}
7630
7631/** Opcode 0x10. */
7632FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7633{
7634 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
7635 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7636}
7637
7638
7639/** Opcode 0x11. */
7640FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7641{
7642 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
7643 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7644}
7645
7646
7647/** Opcode 0x12. */
7648FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7649{
7650 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
7651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7652}
7653
7654
7655/** Opcode 0x13. */
7656FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7657{
7658 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
7659 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7660}
7661
7662
7663/** Opcode 0x14. */
7664FNIEMOP_DEF(iemOp_adc_Al_Ib)
7665{
7666 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
7667 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7668}
7669
7670
7671/** Opcode 0x15. */
7672FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7673{
7674 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
7675 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7676}
7677
7678
7679/** Opcode 0x16. */
7680FNIEMOP_DEF(iemOp_push_SS)
7681{
7682 IEMOP_MNEMONIC(push_ss, "push ss");
7683 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7684}
7685
7686
7687/** Opcode 0x17. */
7688FNIEMOP_DEF(iemOp_pop_SS)
7689{
7690 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692 IEMOP_HLP_NO_64BIT();
7693 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
7694}
7695
7696
7697/** Opcode 0x18. */
7698FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7699{
7700 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
7701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7702}
7703
7704
7705/** Opcode 0x19. */
7706FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7707{
7708 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
7709 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7710}
7711
7712
7713/** Opcode 0x1a. */
7714FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7715{
7716 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
7717 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7718}
7719
7720
7721/** Opcode 0x1b. */
7722FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7723{
7724 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
7725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7726}
7727
7728
7729/** Opcode 0x1c. */
7730FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7731{
7732 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
7733 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7734}
7735
7736
7737/** Opcode 0x1d. */
7738FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7739{
7740 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
7741 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7742}
7743
7744
7745/** Opcode 0x1e. */
7746FNIEMOP_DEF(iemOp_push_DS)
7747{
7748 IEMOP_MNEMONIC(push_ds, "push ds");
7749 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7750}
7751
7752
7753/** Opcode 0x1f. */
7754FNIEMOP_DEF(iemOp_pop_DS)
7755{
7756 IEMOP_MNEMONIC(pop_ds, "pop ds");
7757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7758 IEMOP_HLP_NO_64BIT();
7759 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
7760}
7761
7762
7763/** Opcode 0x20. */
7764FNIEMOP_DEF(iemOp_and_Eb_Gb)
7765{
7766 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
7767 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7768 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7769}
7770
7771
7772/** Opcode 0x21. */
7773FNIEMOP_DEF(iemOp_and_Ev_Gv)
7774{
7775 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
7776 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7777 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7778}
7779
7780
7781/** Opcode 0x22. */
7782FNIEMOP_DEF(iemOp_and_Gb_Eb)
7783{
7784 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
7785 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7786 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7787}
7788
7789
7790/** Opcode 0x23. */
7791FNIEMOP_DEF(iemOp_and_Gv_Ev)
7792{
7793 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
7794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7795 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7796}
7797
7798
7799/** Opcode 0x24. */
7800FNIEMOP_DEF(iemOp_and_Al_Ib)
7801{
7802 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
7803 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7804 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7805}
7806
7807
7808/** Opcode 0x25. */
7809FNIEMOP_DEF(iemOp_and_eAX_Iz)
7810{
7811 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
7812 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7813 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7814}
7815
7816
7817/** Opcode 0x26. */
7818FNIEMOP_DEF(iemOp_seg_ES)
7819{
7820 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7821 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
7822 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
7823
7824 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7825 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7826}
7827
7828
7829/** Opcode 0x27. */
7830FNIEMOP_DEF(iemOp_daa)
7831{
7832 IEMOP_MNEMONIC(daa_AL, "daa AL");
7833 IEMOP_HLP_NO_64BIT();
7834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7835 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7836 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7837}
7838
7839
7840/** Opcode 0x28. */
7841FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7842{
7843 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
7844 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7845}
7846
7847
7848/** Opcode 0x29. */
7849FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7850{
7851 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
7852 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7853}
7854
7855
7856/** Opcode 0x2a. */
7857FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7858{
7859 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
7860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7861}
7862
7863
7864/** Opcode 0x2b. */
7865FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7866{
7867 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
7868 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7869}
7870
7871
7872/** Opcode 0x2c. */
7873FNIEMOP_DEF(iemOp_sub_Al_Ib)
7874{
7875 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
7876 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7877}
7878
7879
7880/** Opcode 0x2d. */
7881FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7882{
7883 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
7884 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7885}
7886
7887
7888/** Opcode 0x2e. */
7889FNIEMOP_DEF(iemOp_seg_CS)
7890{
7891 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7892 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
7893 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
7894
7895 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7896 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7897}
7898
7899
7900/** Opcode 0x2f. */
7901FNIEMOP_DEF(iemOp_das)
7902{
7903 IEMOP_MNEMONIC(das_AL, "das AL");
7904 IEMOP_HLP_NO_64BIT();
7905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7906 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7907 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7908}
7909
7910
7911/** Opcode 0x30. */
7912FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7913{
7914 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
7915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7916 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7917}
7918
7919
7920/** Opcode 0x31. */
7921FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7922{
7923 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
7924 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7925 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7926}
7927
7928
7929/** Opcode 0x32. */
7930FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7931{
7932 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
7933 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7934 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7935}
7936
7937
7938/** Opcode 0x33. */
7939FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7940{
7941 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
7942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7943 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7944}
7945
7946
7947/** Opcode 0x34. */
7948FNIEMOP_DEF(iemOp_xor_Al_Ib)
7949{
7950 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
7951 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7952 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7953}
7954
7955
7956/** Opcode 0x35. */
7957FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7958{
7959 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
7960 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7961 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7962}
7963
7964
7965/** Opcode 0x36. */
7966FNIEMOP_DEF(iemOp_seg_SS)
7967{
7968 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7969 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
7970 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
7971
7972 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7973 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7974}
7975
7976
7977/** Opcode 0x37. */
7978FNIEMOP_STUB(iemOp_aaa);
7979
7980
7981/** Opcode 0x38. */
7982FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7983{
7984 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
7985 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7986}
7987
7988
7989/** Opcode 0x39. */
7990FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7991{
7992 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
7993 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7994}
7995
7996
7997/** Opcode 0x3a. */
7998FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7999{
8000 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8001 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8002}
8003
8004
8005/** Opcode 0x3b. */
8006FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8007{
8008 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8009 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8010}
8011
8012
8013/** Opcode 0x3c. */
8014FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8015{
8016 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8017 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8018}
8019
8020
8021/** Opcode 0x3d. */
8022FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8023{
8024 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8025 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8026}
8027
8028
8029/** Opcode 0x3e. */
8030FNIEMOP_DEF(iemOp_seg_DS)
8031{
8032 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8033 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8034 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8035
8036 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8037 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8038}
8039
8040
8041/** Opcode 0x3f. */
8042FNIEMOP_STUB(iemOp_aas);
8043
8044/**
8045 * Common 'inc/dec/not/neg register' helper.
8046 */
8047FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8048{
8049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8050 switch (pVCpu->iem.s.enmEffOpSize)
8051 {
8052 case IEMMODE_16BIT:
8053 IEM_MC_BEGIN(2, 0);
8054 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8055 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8056 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8057 IEM_MC_REF_EFLAGS(pEFlags);
8058 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8059 IEM_MC_ADVANCE_RIP();
8060 IEM_MC_END();
8061 return VINF_SUCCESS;
8062
8063 case IEMMODE_32BIT:
8064 IEM_MC_BEGIN(2, 0);
8065 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8066 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8067 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8068 IEM_MC_REF_EFLAGS(pEFlags);
8069 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8070 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8071 IEM_MC_ADVANCE_RIP();
8072 IEM_MC_END();
8073 return VINF_SUCCESS;
8074
8075 case IEMMODE_64BIT:
8076 IEM_MC_BEGIN(2, 0);
8077 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8078 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8079 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8080 IEM_MC_REF_EFLAGS(pEFlags);
8081 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8082 IEM_MC_ADVANCE_RIP();
8083 IEM_MC_END();
8084 return VINF_SUCCESS;
8085 }
8086 return VINF_SUCCESS;
8087}
8088
8089
8090/** Opcode 0x40. */
8091FNIEMOP_DEF(iemOp_inc_eAX)
8092{
8093 /*
8094 * This is a REX prefix in 64-bit mode.
8095 */
8096 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8097 {
8098 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8099 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8100
8101 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8102 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8103 }
8104
8105 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8106 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8107}
8108
8109
8110/** Opcode 0x41. */
8111FNIEMOP_DEF(iemOp_inc_eCX)
8112{
8113 /*
8114 * This is a REX prefix in 64-bit mode.
8115 */
8116 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8117 {
8118 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8119 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8120 pVCpu->iem.s.uRexB = 1 << 3;
8121
8122 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8123 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8124 }
8125
8126 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8127 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8128}
8129
8130
8131/** Opcode 0x42. */
8132FNIEMOP_DEF(iemOp_inc_eDX)
8133{
8134 /*
8135 * This is a REX prefix in 64-bit mode.
8136 */
8137 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8138 {
8139 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8140 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8141 pVCpu->iem.s.uRexIndex = 1 << 3;
8142
8143 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8144 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8145 }
8146
8147 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8148 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8149}
8150
8151
8152
8153/** Opcode 0x43. */
8154FNIEMOP_DEF(iemOp_inc_eBX)
8155{
8156 /*
8157 * This is a REX prefix in 64-bit mode.
8158 */
8159 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8160 {
8161 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8162 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8163 pVCpu->iem.s.uRexB = 1 << 3;
8164 pVCpu->iem.s.uRexIndex = 1 << 3;
8165
8166 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8167 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8168 }
8169
8170 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8171 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8172}
8173
8174
8175/** Opcode 0x44. */
8176FNIEMOP_DEF(iemOp_inc_eSP)
8177{
8178 /*
8179 * This is a REX prefix in 64-bit mode.
8180 */
8181 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8182 {
8183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8184 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8185 pVCpu->iem.s.uRexReg = 1 << 3;
8186
8187 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8188 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8189 }
8190
8191 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8192 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8193}
8194
8195
8196/** Opcode 0x45. */
8197FNIEMOP_DEF(iemOp_inc_eBP)
8198{
8199 /*
8200 * This is a REX prefix in 64-bit mode.
8201 */
8202 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8203 {
8204 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8205 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8206 pVCpu->iem.s.uRexReg = 1 << 3;
8207 pVCpu->iem.s.uRexB = 1 << 3;
8208
8209 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8210 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8211 }
8212
8213 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8214 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8215}
8216
8217
8218/** Opcode 0x46. */
8219FNIEMOP_DEF(iemOp_inc_eSI)
8220{
8221 /*
8222 * This is a REX prefix in 64-bit mode.
8223 */
8224 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8225 {
8226 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8227 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8228 pVCpu->iem.s.uRexReg = 1 << 3;
8229 pVCpu->iem.s.uRexIndex = 1 << 3;
8230
8231 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8232 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8233 }
8234
8235 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8236 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8237}
8238
8239
8240/** Opcode 0x47. */
8241FNIEMOP_DEF(iemOp_inc_eDI)
8242{
8243 /*
8244 * This is a REX prefix in 64-bit mode.
8245 */
8246 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8247 {
8248 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8249 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8250 pVCpu->iem.s.uRexReg = 1 << 3;
8251 pVCpu->iem.s.uRexB = 1 << 3;
8252 pVCpu->iem.s.uRexIndex = 1 << 3;
8253
8254 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8255 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8256 }
8257
8258 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
8259 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8260}
8261
8262
8263/** Opcode 0x48. */
8264FNIEMOP_DEF(iemOp_dec_eAX)
8265{
8266 /*
8267 * This is a REX prefix in 64-bit mode.
8268 */
8269 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8270 {
8271 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8272 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8273 iemRecalEffOpSize(pVCpu);
8274
8275 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8276 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8277 }
8278
8279 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
8280 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8281}
8282
8283
8284/** Opcode 0x49. */
8285FNIEMOP_DEF(iemOp_dec_eCX)
8286{
8287 /*
8288 * This is a REX prefix in 64-bit mode.
8289 */
8290 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8291 {
8292 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8293 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8294 pVCpu->iem.s.uRexB = 1 << 3;
8295 iemRecalEffOpSize(pVCpu);
8296
8297 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8298 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8299 }
8300
8301 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
8302 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8303}
8304
8305
8306/** Opcode 0x4a. */
8307FNIEMOP_DEF(iemOp_dec_eDX)
8308{
8309 /*
8310 * This is a REX prefix in 64-bit mode.
8311 */
8312 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8313 {
8314 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8315 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8316 pVCpu->iem.s.uRexIndex = 1 << 3;
8317 iemRecalEffOpSize(pVCpu);
8318
8319 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8320 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8321 }
8322
8323 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
8324 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8325}
8326
8327
8328/** Opcode 0x4b. */
8329FNIEMOP_DEF(iemOp_dec_eBX)
8330{
8331 /*
8332 * This is a REX prefix in 64-bit mode.
8333 */
8334 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8335 {
8336 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8337 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8338 pVCpu->iem.s.uRexB = 1 << 3;
8339 pVCpu->iem.s.uRexIndex = 1 << 3;
8340 iemRecalEffOpSize(pVCpu);
8341
8342 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8343 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8344 }
8345
8346 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
8347 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8348}
8349
8350
8351/** Opcode 0x4c. */
8352FNIEMOP_DEF(iemOp_dec_eSP)
8353{
8354 /*
8355 * This is a REX prefix in 64-bit mode.
8356 */
8357 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8358 {
8359 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8360 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8361 pVCpu->iem.s.uRexReg = 1 << 3;
8362 iemRecalEffOpSize(pVCpu);
8363
8364 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8365 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8366 }
8367
8368 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
8369 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8370}
8371
8372
8373/** Opcode 0x4d. */
8374FNIEMOP_DEF(iemOp_dec_eBP)
8375{
8376 /*
8377 * This is a REX prefix in 64-bit mode.
8378 */
8379 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8380 {
8381 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8382 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8383 pVCpu->iem.s.uRexReg = 1 << 3;
8384 pVCpu->iem.s.uRexB = 1 << 3;
8385 iemRecalEffOpSize(pVCpu);
8386
8387 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8388 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8389 }
8390
8391 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
8392 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8393}
8394
8395
8396/** Opcode 0x4e. */
8397FNIEMOP_DEF(iemOp_dec_eSI)
8398{
8399 /*
8400 * This is a REX prefix in 64-bit mode.
8401 */
8402 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8403 {
8404 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8405 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8406 pVCpu->iem.s.uRexReg = 1 << 3;
8407 pVCpu->iem.s.uRexIndex = 1 << 3;
8408 iemRecalEffOpSize(pVCpu);
8409
8410 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8411 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8412 }
8413
8414 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
8415 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8416}
8417
8418
8419/** Opcode 0x4f. */
8420FNIEMOP_DEF(iemOp_dec_eDI)
8421{
8422 /*
8423 * This is a REX prefix in 64-bit mode.
8424 */
8425 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8426 {
8427 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8428 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8429 pVCpu->iem.s.uRexReg = 1 << 3;
8430 pVCpu->iem.s.uRexB = 1 << 3;
8431 pVCpu->iem.s.uRexIndex = 1 << 3;
8432 iemRecalEffOpSize(pVCpu);
8433
8434 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8435 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8436 }
8437
8438 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
8439 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8440}
8441
8442
8443/**
8444 * Common 'push register' helper.
8445 */
8446FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8447{
8448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8449 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8450 {
8451 iReg |= pVCpu->iem.s.uRexB;
8452 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8453 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8454 }
8455
8456 switch (pVCpu->iem.s.enmEffOpSize)
8457 {
8458 case IEMMODE_16BIT:
8459 IEM_MC_BEGIN(0, 1);
8460 IEM_MC_LOCAL(uint16_t, u16Value);
8461 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8462 IEM_MC_PUSH_U16(u16Value);
8463 IEM_MC_ADVANCE_RIP();
8464 IEM_MC_END();
8465 break;
8466
8467 case IEMMODE_32BIT:
8468 IEM_MC_BEGIN(0, 1);
8469 IEM_MC_LOCAL(uint32_t, u32Value);
8470 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8471 IEM_MC_PUSH_U32(u32Value);
8472 IEM_MC_ADVANCE_RIP();
8473 IEM_MC_END();
8474 break;
8475
8476 case IEMMODE_64BIT:
8477 IEM_MC_BEGIN(0, 1);
8478 IEM_MC_LOCAL(uint64_t, u64Value);
8479 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8480 IEM_MC_PUSH_U64(u64Value);
8481 IEM_MC_ADVANCE_RIP();
8482 IEM_MC_END();
8483 break;
8484 }
8485
8486 return VINF_SUCCESS;
8487}
8488
8489
8490/** Opcode 0x50. */
8491FNIEMOP_DEF(iemOp_push_eAX)
8492{
8493 IEMOP_MNEMONIC(push_rAX, "push rAX");
8494 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8495}
8496
8497
8498/** Opcode 0x51. */
8499FNIEMOP_DEF(iemOp_push_eCX)
8500{
8501 IEMOP_MNEMONIC(push_rCX, "push rCX");
8502 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8503}
8504
8505
8506/** Opcode 0x52. */
8507FNIEMOP_DEF(iemOp_push_eDX)
8508{
8509 IEMOP_MNEMONIC(push_rDX, "push rDX");
8510 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8511}
8512
8513
8514/** Opcode 0x53. */
8515FNIEMOP_DEF(iemOp_push_eBX)
8516{
8517 IEMOP_MNEMONIC(push_rBX, "push rBX");
8518 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8519}
8520
8521
8522/** Opcode 0x54. */
8523FNIEMOP_DEF(iemOp_push_eSP)
8524{
8525 IEMOP_MNEMONIC(push_rSP, "push rSP");
8526 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
8527 {
8528 IEM_MC_BEGIN(0, 1);
8529 IEM_MC_LOCAL(uint16_t, u16Value);
8530 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8531 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8532 IEM_MC_PUSH_U16(u16Value);
8533 IEM_MC_ADVANCE_RIP();
8534 IEM_MC_END();
8535 }
8536 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8537}
8538
8539
8540/** Opcode 0x55. */
8541FNIEMOP_DEF(iemOp_push_eBP)
8542{
8543 IEMOP_MNEMONIC(push_rBP, "push rBP");
8544 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8545}
8546
8547
8548/** Opcode 0x56. */
8549FNIEMOP_DEF(iemOp_push_eSI)
8550{
8551 IEMOP_MNEMONIC(push_rSI, "push rSI");
8552 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8553}
8554
8555
8556/** Opcode 0x57. */
8557FNIEMOP_DEF(iemOp_push_eDI)
8558{
8559 IEMOP_MNEMONIC(push_rDI, "push rDI");
8560 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8561}
8562
8563
8564/**
8565 * Common 'pop register' helper.
8566 */
8567FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8568{
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8571 {
8572 iReg |= pVCpu->iem.s.uRexB;
8573 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8574 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8575 }
8576
8577 switch (pVCpu->iem.s.enmEffOpSize)
8578 {
8579 case IEMMODE_16BIT:
8580 IEM_MC_BEGIN(0, 1);
8581 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8582 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8583 IEM_MC_POP_U16(pu16Dst);
8584 IEM_MC_ADVANCE_RIP();
8585 IEM_MC_END();
8586 break;
8587
8588 case IEMMODE_32BIT:
8589 IEM_MC_BEGIN(0, 1);
8590 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8591 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8592 IEM_MC_POP_U32(pu32Dst);
8593 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8594 IEM_MC_ADVANCE_RIP();
8595 IEM_MC_END();
8596 break;
8597
8598 case IEMMODE_64BIT:
8599 IEM_MC_BEGIN(0, 1);
8600 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8601 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8602 IEM_MC_POP_U64(pu64Dst);
8603 IEM_MC_ADVANCE_RIP();
8604 IEM_MC_END();
8605 break;
8606 }
8607
8608 return VINF_SUCCESS;
8609}
8610
8611
8612/** Opcode 0x58. */
8613FNIEMOP_DEF(iemOp_pop_eAX)
8614{
8615 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
8616 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8617}
8618
8619
8620/** Opcode 0x59. */
8621FNIEMOP_DEF(iemOp_pop_eCX)
8622{
8623 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
8624 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8625}
8626
8627
8628/** Opcode 0x5a. */
8629FNIEMOP_DEF(iemOp_pop_eDX)
8630{
8631 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
8632 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8633}
8634
8635
8636/** Opcode 0x5b. */
8637FNIEMOP_DEF(iemOp_pop_eBX)
8638{
8639 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
8640 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8641}
8642
8643
8644/** Opcode 0x5c. */
8645FNIEMOP_DEF(iemOp_pop_eSP)
8646{
8647 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
8648 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8649 {
8650 if (pVCpu->iem.s.uRexB)
8651 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8652 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8653 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8654 }
8655
8656 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8657 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8658 /** @todo add testcase for this instruction. */
8659 switch (pVCpu->iem.s.enmEffOpSize)
8660 {
8661 case IEMMODE_16BIT:
8662 IEM_MC_BEGIN(0, 1);
8663 IEM_MC_LOCAL(uint16_t, u16Dst);
8664 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8665 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8666 IEM_MC_ADVANCE_RIP();
8667 IEM_MC_END();
8668 break;
8669
8670 case IEMMODE_32BIT:
8671 IEM_MC_BEGIN(0, 1);
8672 IEM_MC_LOCAL(uint32_t, u32Dst);
8673 IEM_MC_POP_U32(&u32Dst);
8674 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8675 IEM_MC_ADVANCE_RIP();
8676 IEM_MC_END();
8677 break;
8678
8679 case IEMMODE_64BIT:
8680 IEM_MC_BEGIN(0, 1);
8681 IEM_MC_LOCAL(uint64_t, u64Dst);
8682 IEM_MC_POP_U64(&u64Dst);
8683 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8684 IEM_MC_ADVANCE_RIP();
8685 IEM_MC_END();
8686 break;
8687 }
8688
8689 return VINF_SUCCESS;
8690}
8691
8692
8693/** Opcode 0x5d. */
8694FNIEMOP_DEF(iemOp_pop_eBP)
8695{
8696 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
8697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8698}
8699
8700
8701/** Opcode 0x5e. */
8702FNIEMOP_DEF(iemOp_pop_eSI)
8703{
8704 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
8705 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8706}
8707
8708
8709/** Opcode 0x5f. */
8710FNIEMOP_DEF(iemOp_pop_eDI)
8711{
8712 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
8713 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8714}
8715
8716
8717/** Opcode 0x60. */
8718FNIEMOP_DEF(iemOp_pusha)
8719{
8720 IEMOP_MNEMONIC(pusha, "pusha");
8721 IEMOP_HLP_MIN_186();
8722 IEMOP_HLP_NO_64BIT();
8723 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8724 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8725 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8726 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8727}
8728
8729
8730/** Opcode 0x61. */
8731FNIEMOP_DEF(iemOp_popa)
8732{
8733 IEMOP_MNEMONIC(popa, "popa");
8734 IEMOP_HLP_MIN_186();
8735 IEMOP_HLP_NO_64BIT();
8736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8737 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8738 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8739 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8740}
8741
8742
8743/** Opcode 0x62. */
8744FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8745// IEMOP_HLP_MIN_186();
8746
8747
8748/** Opcode 0x63 - non-64-bit modes. */
8749FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8750{
8751 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
8752 IEMOP_HLP_MIN_286();
8753 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8755
8756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8757 {
8758 /* Register */
8759 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8760 IEM_MC_BEGIN(3, 0);
8761 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8762 IEM_MC_ARG(uint16_t, u16Src, 1);
8763 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8764
8765 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8766 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8767 IEM_MC_REF_EFLAGS(pEFlags);
8768 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8769
8770 IEM_MC_ADVANCE_RIP();
8771 IEM_MC_END();
8772 }
8773 else
8774 {
8775 /* Memory */
8776 IEM_MC_BEGIN(3, 2);
8777 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8778 IEM_MC_ARG(uint16_t, u16Src, 1);
8779 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8781
8782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8783 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8784 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8785 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8786 IEM_MC_FETCH_EFLAGS(EFlags);
8787 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8788
8789 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8790 IEM_MC_COMMIT_EFLAGS(EFlags);
8791 IEM_MC_ADVANCE_RIP();
8792 IEM_MC_END();
8793 }
8794 return VINF_SUCCESS;
8795
8796}
8797
8798
8799/** Opcode 0x63.
8800 * @note This is a weird one. It works like a regular move instruction if
8801 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8802 * @todo This definitely needs a testcase to verify the odd cases. */
8803FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8804{
8805 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8806
8807 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
8808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8809
8810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8811 {
8812 /*
8813 * Register to register.
8814 */
8815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8816 IEM_MC_BEGIN(0, 1);
8817 IEM_MC_LOCAL(uint64_t, u64Value);
8818 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8819 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8820 IEM_MC_ADVANCE_RIP();
8821 IEM_MC_END();
8822 }
8823 else
8824 {
8825 /*
8826 * We're loading a register from memory.
8827 */
8828 IEM_MC_BEGIN(0, 2);
8829 IEM_MC_LOCAL(uint64_t, u64Value);
8830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8833 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8834 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8835 IEM_MC_ADVANCE_RIP();
8836 IEM_MC_END();
8837 }
8838 return VINF_SUCCESS;
8839}
8840
8841
8842/** Opcode 0x64. */
8843FNIEMOP_DEF(iemOp_seg_FS)
8844{
8845 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8846 IEMOP_HLP_MIN_386();
8847
8848 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
8849 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
8850
8851 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8852 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8853}
8854
8855
8856/** Opcode 0x65. */
8857FNIEMOP_DEF(iemOp_seg_GS)
8858{
8859 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8860 IEMOP_HLP_MIN_386();
8861
8862 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
8863 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
8864
8865 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8866 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8867}
8868
8869
8870/** Opcode 0x66. */
8871FNIEMOP_DEF(iemOp_op_size)
8872{
8873 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8874 IEMOP_HLP_MIN_386();
8875
8876 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
8877 iemRecalEffOpSize(pVCpu);
8878
8879 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8880 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8881}
8882
8883
8884/** Opcode 0x67. */
8885FNIEMOP_DEF(iemOp_addr_size)
8886{
8887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8888 IEMOP_HLP_MIN_386();
8889
8890 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8891 switch (pVCpu->iem.s.enmDefAddrMode)
8892 {
8893 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8894 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
8895 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8896 default: AssertFailed();
8897 }
8898
8899 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8900 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8901}
8902
8903
8904/** Opcode 0x68. */
8905FNIEMOP_DEF(iemOp_push_Iz)
8906{
8907 IEMOP_MNEMONIC(push_Iz, "push Iz");
8908 IEMOP_HLP_MIN_186();
8909 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8910 switch (pVCpu->iem.s.enmEffOpSize)
8911 {
8912 case IEMMODE_16BIT:
8913 {
8914 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8916 IEM_MC_BEGIN(0,0);
8917 IEM_MC_PUSH_U16(u16Imm);
8918 IEM_MC_ADVANCE_RIP();
8919 IEM_MC_END();
8920 return VINF_SUCCESS;
8921 }
8922
8923 case IEMMODE_32BIT:
8924 {
8925 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927 IEM_MC_BEGIN(0,0);
8928 IEM_MC_PUSH_U32(u32Imm);
8929 IEM_MC_ADVANCE_RIP();
8930 IEM_MC_END();
8931 return VINF_SUCCESS;
8932 }
8933
8934 case IEMMODE_64BIT:
8935 {
8936 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8938 IEM_MC_BEGIN(0,0);
8939 IEM_MC_PUSH_U64(u64Imm);
8940 IEM_MC_ADVANCE_RIP();
8941 IEM_MC_END();
8942 return VINF_SUCCESS;
8943 }
8944
8945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8946 }
8947}
8948
8949
8950/** Opcode 0x69. */
8951FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8952{
8953 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8954 IEMOP_HLP_MIN_186();
8955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8956 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8957
8958 switch (pVCpu->iem.s.enmEffOpSize)
8959 {
8960 case IEMMODE_16BIT:
8961 {
8962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8963 {
8964 /* register operand */
8965 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8967
8968 IEM_MC_BEGIN(3, 1);
8969 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8970 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8971 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8972 IEM_MC_LOCAL(uint16_t, u16Tmp);
8973
8974 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8975 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8976 IEM_MC_REF_EFLAGS(pEFlags);
8977 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8978 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
8979
8980 IEM_MC_ADVANCE_RIP();
8981 IEM_MC_END();
8982 }
8983 else
8984 {
8985 /* memory operand */
8986 IEM_MC_BEGIN(3, 2);
8987 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8988 IEM_MC_ARG(uint16_t, u16Src, 1);
8989 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8990 IEM_MC_LOCAL(uint16_t, u16Tmp);
8991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8992
8993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8994 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8995 IEM_MC_ASSIGN(u16Src, u16Imm);
8996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8997 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8998 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8999 IEM_MC_REF_EFLAGS(pEFlags);
9000 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9001 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9002
9003 IEM_MC_ADVANCE_RIP();
9004 IEM_MC_END();
9005 }
9006 return VINF_SUCCESS;
9007 }
9008
9009 case IEMMODE_32BIT:
9010 {
9011 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9012 {
9013 /* register operand */
9014 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9016
9017 IEM_MC_BEGIN(3, 1);
9018 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9019 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9020 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9021 IEM_MC_LOCAL(uint32_t, u32Tmp);
9022
9023 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9024 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9025 IEM_MC_REF_EFLAGS(pEFlags);
9026 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9027 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9028
9029 IEM_MC_ADVANCE_RIP();
9030 IEM_MC_END();
9031 }
9032 else
9033 {
9034 /* memory operand */
9035 IEM_MC_BEGIN(3, 2);
9036 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9037 IEM_MC_ARG(uint32_t, u32Src, 1);
9038 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9039 IEM_MC_LOCAL(uint32_t, u32Tmp);
9040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9041
9042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9043 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9044 IEM_MC_ASSIGN(u32Src, u32Imm);
9045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9046 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9047 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9048 IEM_MC_REF_EFLAGS(pEFlags);
9049 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9050 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9051
9052 IEM_MC_ADVANCE_RIP();
9053 IEM_MC_END();
9054 }
9055 return VINF_SUCCESS;
9056 }
9057
9058 case IEMMODE_64BIT:
9059 {
9060 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9061 {
9062 /* register operand */
9063 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9065
9066 IEM_MC_BEGIN(3, 1);
9067 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9068 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9069 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9070 IEM_MC_LOCAL(uint64_t, u64Tmp);
9071
9072 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9073 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9074 IEM_MC_REF_EFLAGS(pEFlags);
9075 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9076 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9077
9078 IEM_MC_ADVANCE_RIP();
9079 IEM_MC_END();
9080 }
9081 else
9082 {
9083 /* memory operand */
9084 IEM_MC_BEGIN(3, 2);
9085 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9086 IEM_MC_ARG(uint64_t, u64Src, 1);
9087 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9088 IEM_MC_LOCAL(uint64_t, u64Tmp);
9089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9090
9091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9092 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9093 IEM_MC_ASSIGN(u64Src, u64Imm);
9094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9095 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9096 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9097 IEM_MC_REF_EFLAGS(pEFlags);
9098 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9099 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9100
9101 IEM_MC_ADVANCE_RIP();
9102 IEM_MC_END();
9103 }
9104 return VINF_SUCCESS;
9105 }
9106 }
9107 AssertFailedReturn(VERR_IEM_IPE_9);
9108}
9109
9110
9111/** Opcode 0x6a. */
9112FNIEMOP_DEF(iemOp_push_Ib)
9113{
9114 IEMOP_MNEMONIC(push_Ib, "push Ib");
9115 IEMOP_HLP_MIN_186();
9116 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9118 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9119
9120 IEM_MC_BEGIN(0,0);
9121 switch (pVCpu->iem.s.enmEffOpSize)
9122 {
9123 case IEMMODE_16BIT:
9124 IEM_MC_PUSH_U16(i8Imm);
9125 break;
9126 case IEMMODE_32BIT:
9127 IEM_MC_PUSH_U32(i8Imm);
9128 break;
9129 case IEMMODE_64BIT:
9130 IEM_MC_PUSH_U64(i8Imm);
9131 break;
9132 }
9133 IEM_MC_ADVANCE_RIP();
9134 IEM_MC_END();
9135 return VINF_SUCCESS;
9136}
9137
9138
9139/** Opcode 0x6b. */
9140FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9141{
9142 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9143 IEMOP_HLP_MIN_186();
9144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9145 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9146
9147 switch (pVCpu->iem.s.enmEffOpSize)
9148 {
9149 case IEMMODE_16BIT:
9150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9151 {
9152 /* register operand */
9153 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9155
9156 IEM_MC_BEGIN(3, 1);
9157 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9158 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9159 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9160 IEM_MC_LOCAL(uint16_t, u16Tmp);
9161
9162 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9163 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9164 IEM_MC_REF_EFLAGS(pEFlags);
9165 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9166 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9167
9168 IEM_MC_ADVANCE_RIP();
9169 IEM_MC_END();
9170 }
9171 else
9172 {
9173 /* memory operand */
9174 IEM_MC_BEGIN(3, 2);
9175 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9176 IEM_MC_ARG(uint16_t, u16Src, 1);
9177 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9178 IEM_MC_LOCAL(uint16_t, u16Tmp);
9179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9180
9181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9182 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9183 IEM_MC_ASSIGN(u16Src, u16Imm);
9184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9185 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9186 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9187 IEM_MC_REF_EFLAGS(pEFlags);
9188 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9189 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9190
9191 IEM_MC_ADVANCE_RIP();
9192 IEM_MC_END();
9193 }
9194 return VINF_SUCCESS;
9195
9196 case IEMMODE_32BIT:
9197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9198 {
9199 /* register operand */
9200 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9202
9203 IEM_MC_BEGIN(3, 1);
9204 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9205 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9206 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9207 IEM_MC_LOCAL(uint32_t, u32Tmp);
9208
9209 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9210 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9211 IEM_MC_REF_EFLAGS(pEFlags);
9212 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9213 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9214
9215 IEM_MC_ADVANCE_RIP();
9216 IEM_MC_END();
9217 }
9218 else
9219 {
9220 /* memory operand */
9221 IEM_MC_BEGIN(3, 2);
9222 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9223 IEM_MC_ARG(uint32_t, u32Src, 1);
9224 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9225 IEM_MC_LOCAL(uint32_t, u32Tmp);
9226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9227
9228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9229 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9230 IEM_MC_ASSIGN(u32Src, u32Imm);
9231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9232 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9233 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9234 IEM_MC_REF_EFLAGS(pEFlags);
9235 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9236 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9237
9238 IEM_MC_ADVANCE_RIP();
9239 IEM_MC_END();
9240 }
9241 return VINF_SUCCESS;
9242
9243 case IEMMODE_64BIT:
9244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9245 {
9246 /* register operand */
9247 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9249
9250 IEM_MC_BEGIN(3, 1);
9251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9252 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9254 IEM_MC_LOCAL(uint64_t, u64Tmp);
9255
9256 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9257 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9258 IEM_MC_REF_EFLAGS(pEFlags);
9259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9260 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9261
9262 IEM_MC_ADVANCE_RIP();
9263 IEM_MC_END();
9264 }
9265 else
9266 {
9267 /* memory operand */
9268 IEM_MC_BEGIN(3, 2);
9269 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9270 IEM_MC_ARG(uint64_t, u64Src, 1);
9271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9272 IEM_MC_LOCAL(uint64_t, u64Tmp);
9273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9274
9275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9276 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9277 IEM_MC_ASSIGN(u64Src, u64Imm);
9278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9279 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9280 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9281 IEM_MC_REF_EFLAGS(pEFlags);
9282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9283 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9284
9285 IEM_MC_ADVANCE_RIP();
9286 IEM_MC_END();
9287 }
9288 return VINF_SUCCESS;
9289 }
9290 AssertFailedReturn(VERR_IEM_IPE_8);
9291}
9292
9293
9294/** Opcode 0x6c. */
9295FNIEMOP_DEF(iemOp_insb_Yb_DX)
9296{
9297 IEMOP_HLP_MIN_186();
9298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9299 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9300 {
9301 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
9302 switch (pVCpu->iem.s.enmEffAddrMode)
9303 {
9304 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9305 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9306 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9308 }
9309 }
9310 else
9311 {
9312 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
9313 switch (pVCpu->iem.s.enmEffAddrMode)
9314 {
9315 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9316 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9317 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9319 }
9320 }
9321}
9322
9323
9324/** Opcode 0x6d. */
9325FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9326{
9327 IEMOP_HLP_MIN_186();
9328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9329 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9330 {
9331 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
9332 switch (pVCpu->iem.s.enmEffOpSize)
9333 {
9334 case IEMMODE_16BIT:
9335 switch (pVCpu->iem.s.enmEffAddrMode)
9336 {
9337 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9338 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9339 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9341 }
9342 break;
9343 case IEMMODE_64BIT:
9344 case IEMMODE_32BIT:
9345 switch (pVCpu->iem.s.enmEffAddrMode)
9346 {
9347 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9348 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9349 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9351 }
9352 break;
9353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9354 }
9355 }
9356 else
9357 {
9358 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
9359 switch (pVCpu->iem.s.enmEffOpSize)
9360 {
9361 case IEMMODE_16BIT:
9362 switch (pVCpu->iem.s.enmEffAddrMode)
9363 {
9364 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9365 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9366 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9368 }
9369 break;
9370 case IEMMODE_64BIT:
9371 case IEMMODE_32BIT:
9372 switch (pVCpu->iem.s.enmEffAddrMode)
9373 {
9374 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9375 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9376 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9378 }
9379 break;
9380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9381 }
9382 }
9383}
9384
9385
9386/** Opcode 0x6e. */
9387FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9388{
9389 IEMOP_HLP_MIN_186();
9390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9391 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9392 {
9393 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
9394 switch (pVCpu->iem.s.enmEffAddrMode)
9395 {
9396 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9397 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9398 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9400 }
9401 }
9402 else
9403 {
9404 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
9405 switch (pVCpu->iem.s.enmEffAddrMode)
9406 {
9407 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9408 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9409 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9411 }
9412 }
9413}
9414
9415
9416/** Opcode 0x6f. */
9417FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9418{
9419 IEMOP_HLP_MIN_186();
9420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9421 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9422 {
9423 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
9424 switch (pVCpu->iem.s.enmEffOpSize)
9425 {
9426 case IEMMODE_16BIT:
9427 switch (pVCpu->iem.s.enmEffAddrMode)
9428 {
9429 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9430 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9431 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9433 }
9434 break;
9435 case IEMMODE_64BIT:
9436 case IEMMODE_32BIT:
9437 switch (pVCpu->iem.s.enmEffAddrMode)
9438 {
9439 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9440 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9441 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9443 }
9444 break;
9445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9446 }
9447 }
9448 else
9449 {
9450 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
9451 switch (pVCpu->iem.s.enmEffOpSize)
9452 {
9453 case IEMMODE_16BIT:
9454 switch (pVCpu->iem.s.enmEffAddrMode)
9455 {
9456 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9457 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9458 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9460 }
9461 break;
9462 case IEMMODE_64BIT:
9463 case IEMMODE_32BIT:
9464 switch (pVCpu->iem.s.enmEffAddrMode)
9465 {
9466 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9467 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9468 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9470 }
9471 break;
9472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9473 }
9474 }
9475}
9476
9477
9478/** Opcode 0x70. */
9479FNIEMOP_DEF(iemOp_jo_Jb)
9480{
9481 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
9482 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9484 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9485
9486 IEM_MC_BEGIN(0, 0);
9487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9488 IEM_MC_REL_JMP_S8(i8Imm);
9489 } IEM_MC_ELSE() {
9490 IEM_MC_ADVANCE_RIP();
9491 } IEM_MC_ENDIF();
9492 IEM_MC_END();
9493 return VINF_SUCCESS;
9494}
9495
9496
9497/** Opcode 0x71. */
9498FNIEMOP_DEF(iemOp_jno_Jb)
9499{
9500 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
9501 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9503 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9504
9505 IEM_MC_BEGIN(0, 0);
9506 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9507 IEM_MC_ADVANCE_RIP();
9508 } IEM_MC_ELSE() {
9509 IEM_MC_REL_JMP_S8(i8Imm);
9510 } IEM_MC_ENDIF();
9511 IEM_MC_END();
9512 return VINF_SUCCESS;
9513}
9514
9515/** Opcode 0x72. */
9516FNIEMOP_DEF(iemOp_jc_Jb)
9517{
9518 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
9519 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9521 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9522
9523 IEM_MC_BEGIN(0, 0);
9524 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9525 IEM_MC_REL_JMP_S8(i8Imm);
9526 } IEM_MC_ELSE() {
9527 IEM_MC_ADVANCE_RIP();
9528 } IEM_MC_ENDIF();
9529 IEM_MC_END();
9530 return VINF_SUCCESS;
9531}
9532
9533
9534/** Opcode 0x73. */
9535FNIEMOP_DEF(iemOp_jnc_Jb)
9536{
9537 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
9538 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9540 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9541
9542 IEM_MC_BEGIN(0, 0);
9543 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9544 IEM_MC_ADVANCE_RIP();
9545 } IEM_MC_ELSE() {
9546 IEM_MC_REL_JMP_S8(i8Imm);
9547 } IEM_MC_ENDIF();
9548 IEM_MC_END();
9549 return VINF_SUCCESS;
9550}
9551
9552
9553/** Opcode 0x74. */
9554FNIEMOP_DEF(iemOp_je_Jb)
9555{
9556 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
9557 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9559 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9560
9561 IEM_MC_BEGIN(0, 0);
9562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9563 IEM_MC_REL_JMP_S8(i8Imm);
9564 } IEM_MC_ELSE() {
9565 IEM_MC_ADVANCE_RIP();
9566 } IEM_MC_ENDIF();
9567 IEM_MC_END();
9568 return VINF_SUCCESS;
9569}
9570
9571
9572/** Opcode 0x75. */
9573FNIEMOP_DEF(iemOp_jne_Jb)
9574{
9575 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
9576 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9578 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9579
9580 IEM_MC_BEGIN(0, 0);
9581 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9582 IEM_MC_ADVANCE_RIP();
9583 } IEM_MC_ELSE() {
9584 IEM_MC_REL_JMP_S8(i8Imm);
9585 } IEM_MC_ENDIF();
9586 IEM_MC_END();
9587 return VINF_SUCCESS;
9588}
9589
9590
9591/** Opcode 0x76. */
9592FNIEMOP_DEF(iemOp_jbe_Jb)
9593{
9594 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
9595 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9597 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9598
9599 IEM_MC_BEGIN(0, 0);
9600 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9601 IEM_MC_REL_JMP_S8(i8Imm);
9602 } IEM_MC_ELSE() {
9603 IEM_MC_ADVANCE_RIP();
9604 } IEM_MC_ENDIF();
9605 IEM_MC_END();
9606 return VINF_SUCCESS;
9607}
9608
9609
9610/** Opcode 0x77. */
9611FNIEMOP_DEF(iemOp_jnbe_Jb)
9612{
9613 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
9614 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9617
9618 IEM_MC_BEGIN(0, 0);
9619 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9620 IEM_MC_ADVANCE_RIP();
9621 } IEM_MC_ELSE() {
9622 IEM_MC_REL_JMP_S8(i8Imm);
9623 } IEM_MC_ENDIF();
9624 IEM_MC_END();
9625 return VINF_SUCCESS;
9626}
9627
9628
9629/** Opcode 0x78. */
9630FNIEMOP_DEF(iemOp_js_Jb)
9631{
9632 IEMOP_MNEMONIC(js_Jb, "js Jb");
9633 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9635 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9636
9637 IEM_MC_BEGIN(0, 0);
9638 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9639 IEM_MC_REL_JMP_S8(i8Imm);
9640 } IEM_MC_ELSE() {
9641 IEM_MC_ADVANCE_RIP();
9642 } IEM_MC_ENDIF();
9643 IEM_MC_END();
9644 return VINF_SUCCESS;
9645}
9646
9647
9648/** Opcode 0x79. */
9649FNIEMOP_DEF(iemOp_jns_Jb)
9650{
9651 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
9652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9654 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9655
9656 IEM_MC_BEGIN(0, 0);
9657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9658 IEM_MC_ADVANCE_RIP();
9659 } IEM_MC_ELSE() {
9660 IEM_MC_REL_JMP_S8(i8Imm);
9661 } IEM_MC_ENDIF();
9662 IEM_MC_END();
9663 return VINF_SUCCESS;
9664}
9665
9666
9667/** Opcode 0x7a. */
9668FNIEMOP_DEF(iemOp_jp_Jb)
9669{
9670 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
9671 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9673 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9674
9675 IEM_MC_BEGIN(0, 0);
9676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9677 IEM_MC_REL_JMP_S8(i8Imm);
9678 } IEM_MC_ELSE() {
9679 IEM_MC_ADVANCE_RIP();
9680 } IEM_MC_ENDIF();
9681 IEM_MC_END();
9682 return VINF_SUCCESS;
9683}
9684
9685
9686/** Opcode 0x7b. */
9687FNIEMOP_DEF(iemOp_jnp_Jb)
9688{
9689 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
9690 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9692 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9693
9694 IEM_MC_BEGIN(0, 0);
9695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9696 IEM_MC_ADVANCE_RIP();
9697 } IEM_MC_ELSE() {
9698 IEM_MC_REL_JMP_S8(i8Imm);
9699 } IEM_MC_ENDIF();
9700 IEM_MC_END();
9701 return VINF_SUCCESS;
9702}
9703
9704
9705/** Opcode 0x7c. */
9706FNIEMOP_DEF(iemOp_jl_Jb)
9707{
9708 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
9709 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9711 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9712
9713 IEM_MC_BEGIN(0, 0);
9714 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9715 IEM_MC_REL_JMP_S8(i8Imm);
9716 } IEM_MC_ELSE() {
9717 IEM_MC_ADVANCE_RIP();
9718 } IEM_MC_ENDIF();
9719 IEM_MC_END();
9720 return VINF_SUCCESS;
9721}
9722
9723
9724/** Opcode 0x7d. */
9725FNIEMOP_DEF(iemOp_jnl_Jb)
9726{
9727 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
9728 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9730 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9731
9732 IEM_MC_BEGIN(0, 0);
9733 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9734 IEM_MC_ADVANCE_RIP();
9735 } IEM_MC_ELSE() {
9736 IEM_MC_REL_JMP_S8(i8Imm);
9737 } IEM_MC_ENDIF();
9738 IEM_MC_END();
9739 return VINF_SUCCESS;
9740}
9741
9742
9743/** Opcode 0x7e. */
9744FNIEMOP_DEF(iemOp_jle_Jb)
9745{
9746 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
9747 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9749 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9750
9751 IEM_MC_BEGIN(0, 0);
9752 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9753 IEM_MC_REL_JMP_S8(i8Imm);
9754 } IEM_MC_ELSE() {
9755 IEM_MC_ADVANCE_RIP();
9756 } IEM_MC_ENDIF();
9757 IEM_MC_END();
9758 return VINF_SUCCESS;
9759}
9760
9761
9762/** Opcode 0x7f. */
9763FNIEMOP_DEF(iemOp_jnle_Jb)
9764{
9765 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
9766 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9768 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9769
9770 IEM_MC_BEGIN(0, 0);
9771 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9772 IEM_MC_ADVANCE_RIP();
9773 } IEM_MC_ELSE() {
9774 IEM_MC_REL_JMP_S8(i8Imm);
9775 } IEM_MC_ENDIF();
9776 IEM_MC_END();
9777 return VINF_SUCCESS;
9778}
9779
9780
9781/** Opcode 0x80. */
9782FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9783{
9784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9785 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9786 {
9787 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
9788 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
9789 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
9790 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
9791 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
9792 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
9793 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
9794 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
9795 }
9796 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9797
9798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9799 {
9800 /* register target */
9801 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9803 IEM_MC_BEGIN(3, 0);
9804 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9805 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9806 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9807
9808 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9809 IEM_MC_REF_EFLAGS(pEFlags);
9810 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9811
9812 IEM_MC_ADVANCE_RIP();
9813 IEM_MC_END();
9814 }
9815 else
9816 {
9817 /* memory target */
9818 uint32_t fAccess;
9819 if (pImpl->pfnLockedU8)
9820 fAccess = IEM_ACCESS_DATA_RW;
9821 else /* CMP */
9822 fAccess = IEM_ACCESS_DATA_R;
9823 IEM_MC_BEGIN(3, 2);
9824 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9825 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9827
9828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9829 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9830 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9831 if (pImpl->pfnLockedU8)
9832 IEMOP_HLP_DONE_DECODING();
9833 else
9834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9835
9836 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9837 IEM_MC_FETCH_EFLAGS(EFlags);
9838 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9839 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9840 else
9841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9842
9843 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9844 IEM_MC_COMMIT_EFLAGS(EFlags);
9845 IEM_MC_ADVANCE_RIP();
9846 IEM_MC_END();
9847 }
9848 return VINF_SUCCESS;
9849}
9850
9851
9852/** Opcode 0x81. */
9853FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9854{
9855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9856 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9857 {
9858 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
9859 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
9860 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
9861 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
9862 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
9863 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
9864 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
9865 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
9866 }
9867 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9868
9869 switch (pVCpu->iem.s.enmEffOpSize)
9870 {
9871 case IEMMODE_16BIT:
9872 {
9873 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9874 {
9875 /* register target */
9876 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9878 IEM_MC_BEGIN(3, 0);
9879 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9880 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9881 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9882
9883 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9884 IEM_MC_REF_EFLAGS(pEFlags);
9885 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9886
9887 IEM_MC_ADVANCE_RIP();
9888 IEM_MC_END();
9889 }
9890 else
9891 {
9892 /* memory target */
9893 uint32_t fAccess;
9894 if (pImpl->pfnLockedU16)
9895 fAccess = IEM_ACCESS_DATA_RW;
9896 else /* CMP, TEST */
9897 fAccess = IEM_ACCESS_DATA_R;
9898 IEM_MC_BEGIN(3, 2);
9899 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9900 IEM_MC_ARG(uint16_t, u16Src, 1);
9901 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9903
9904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9905 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9906 IEM_MC_ASSIGN(u16Src, u16Imm);
9907 if (pImpl->pfnLockedU16)
9908 IEMOP_HLP_DONE_DECODING();
9909 else
9910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9911 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9912 IEM_MC_FETCH_EFLAGS(EFlags);
9913 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9915 else
9916 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9917
9918 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9919 IEM_MC_COMMIT_EFLAGS(EFlags);
9920 IEM_MC_ADVANCE_RIP();
9921 IEM_MC_END();
9922 }
9923 break;
9924 }
9925
9926 case IEMMODE_32BIT:
9927 {
9928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9929 {
9930 /* register target */
9931 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9933 IEM_MC_BEGIN(3, 0);
9934 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9935 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9936 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9937
9938 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9939 IEM_MC_REF_EFLAGS(pEFlags);
9940 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9941 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9942
9943 IEM_MC_ADVANCE_RIP();
9944 IEM_MC_END();
9945 }
9946 else
9947 {
9948 /* memory target */
9949 uint32_t fAccess;
9950 if (pImpl->pfnLockedU32)
9951 fAccess = IEM_ACCESS_DATA_RW;
9952 else /* CMP, TEST */
9953 fAccess = IEM_ACCESS_DATA_R;
9954 IEM_MC_BEGIN(3, 2);
9955 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9956 IEM_MC_ARG(uint32_t, u32Src, 1);
9957 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9959
9960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9961 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9962 IEM_MC_ASSIGN(u32Src, u32Imm);
9963 if (pImpl->pfnLockedU32)
9964 IEMOP_HLP_DONE_DECODING();
9965 else
9966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9967 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9968 IEM_MC_FETCH_EFLAGS(EFlags);
9969 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9970 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9971 else
9972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9973
9974 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9975 IEM_MC_COMMIT_EFLAGS(EFlags);
9976 IEM_MC_ADVANCE_RIP();
9977 IEM_MC_END();
9978 }
9979 break;
9980 }
9981
9982 case IEMMODE_64BIT:
9983 {
9984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9985 {
9986 /* register target */
9987 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9989 IEM_MC_BEGIN(3, 0);
9990 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9991 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9992 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9993
9994 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9995 IEM_MC_REF_EFLAGS(pEFlags);
9996 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9997
9998 IEM_MC_ADVANCE_RIP();
9999 IEM_MC_END();
10000 }
10001 else
10002 {
10003 /* memory target */
10004 uint32_t fAccess;
10005 if (pImpl->pfnLockedU64)
10006 fAccess = IEM_ACCESS_DATA_RW;
10007 else /* CMP */
10008 fAccess = IEM_ACCESS_DATA_R;
10009 IEM_MC_BEGIN(3, 2);
10010 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10011 IEM_MC_ARG(uint64_t, u64Src, 1);
10012 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10014
10015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10016 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10017 if (pImpl->pfnLockedU64)
10018 IEMOP_HLP_DONE_DECODING();
10019 else
10020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10021 IEM_MC_ASSIGN(u64Src, u64Imm);
10022 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10023 IEM_MC_FETCH_EFLAGS(EFlags);
10024 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10026 else
10027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10028
10029 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10030 IEM_MC_COMMIT_EFLAGS(EFlags);
10031 IEM_MC_ADVANCE_RIP();
10032 IEM_MC_END();
10033 }
10034 break;
10035 }
10036 }
10037 return VINF_SUCCESS;
10038}
10039
10040
10041/** Opcode 0x82. */
10042FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10043{
10044 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10045 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10046}
10047
10048
10049/** Opcode 0x83. */
10050FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10051{
10052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10053 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10054 {
10055 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10056 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10057 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10058 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10059 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10060 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10061 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10062 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10063 }
10064 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10065 to the 386 even if absent in the intel reference manuals and some
10066 3rd party opcode listings. */
10067 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10068
10069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10070 {
10071 /*
10072 * Register target
10073 */
10074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10076 switch (pVCpu->iem.s.enmEffOpSize)
10077 {
10078 case IEMMODE_16BIT:
10079 {
10080 IEM_MC_BEGIN(3, 0);
10081 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10082 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10084
10085 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10086 IEM_MC_REF_EFLAGS(pEFlags);
10087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10088
10089 IEM_MC_ADVANCE_RIP();
10090 IEM_MC_END();
10091 break;
10092 }
10093
10094 case IEMMODE_32BIT:
10095 {
10096 IEM_MC_BEGIN(3, 0);
10097 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10098 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10099 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10100
10101 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10102 IEM_MC_REF_EFLAGS(pEFlags);
10103 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10104 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10105
10106 IEM_MC_ADVANCE_RIP();
10107 IEM_MC_END();
10108 break;
10109 }
10110
10111 case IEMMODE_64BIT:
10112 {
10113 IEM_MC_BEGIN(3, 0);
10114 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10115 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10116 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10117
10118 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10119 IEM_MC_REF_EFLAGS(pEFlags);
10120 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10121
10122 IEM_MC_ADVANCE_RIP();
10123 IEM_MC_END();
10124 break;
10125 }
10126 }
10127 }
10128 else
10129 {
10130 /*
10131 * Memory target.
10132 */
10133 uint32_t fAccess;
10134 if (pImpl->pfnLockedU16)
10135 fAccess = IEM_ACCESS_DATA_RW;
10136 else /* CMP */
10137 fAccess = IEM_ACCESS_DATA_R;
10138
10139 switch (pVCpu->iem.s.enmEffOpSize)
10140 {
10141 case IEMMODE_16BIT:
10142 {
10143 IEM_MC_BEGIN(3, 2);
10144 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10145 IEM_MC_ARG(uint16_t, u16Src, 1);
10146 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10148
10149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10150 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10151 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10152 if (pImpl->pfnLockedU16)
10153 IEMOP_HLP_DONE_DECODING();
10154 else
10155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10156 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10157 IEM_MC_FETCH_EFLAGS(EFlags);
10158 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10159 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10160 else
10161 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10162
10163 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10164 IEM_MC_COMMIT_EFLAGS(EFlags);
10165 IEM_MC_ADVANCE_RIP();
10166 IEM_MC_END();
10167 break;
10168 }
10169
10170 case IEMMODE_32BIT:
10171 {
10172 IEM_MC_BEGIN(3, 2);
10173 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10174 IEM_MC_ARG(uint32_t, u32Src, 1);
10175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10177
10178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10179 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10180 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10181 if (pImpl->pfnLockedU32)
10182 IEMOP_HLP_DONE_DECODING();
10183 else
10184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10185 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10186 IEM_MC_FETCH_EFLAGS(EFlags);
10187 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10188 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10189 else
10190 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10191
10192 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10193 IEM_MC_COMMIT_EFLAGS(EFlags);
10194 IEM_MC_ADVANCE_RIP();
10195 IEM_MC_END();
10196 break;
10197 }
10198
10199 case IEMMODE_64BIT:
10200 {
10201 IEM_MC_BEGIN(3, 2);
10202 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10203 IEM_MC_ARG(uint64_t, u64Src, 1);
10204 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10206
10207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10208 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10209 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10210 if (pImpl->pfnLockedU64)
10211 IEMOP_HLP_DONE_DECODING();
10212 else
10213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10214 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10215 IEM_MC_FETCH_EFLAGS(EFlags);
10216 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10217 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10218 else
10219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10220
10221 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10222 IEM_MC_COMMIT_EFLAGS(EFlags);
10223 IEM_MC_ADVANCE_RIP();
10224 IEM_MC_END();
10225 break;
10226 }
10227 }
10228 }
10229 return VINF_SUCCESS;
10230}
10231
10232
10233/** Opcode 0x84. */
10234FNIEMOP_DEF(iemOp_test_Eb_Gb)
10235{
10236 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
10237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10239}
10240
10241
10242/** Opcode 0x85. */
10243FNIEMOP_DEF(iemOp_test_Ev_Gv)
10244{
10245 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
10246 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10247 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10248}
10249
10250
10251/** Opcode 0x86. */
10252FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10253{
10254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10255 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
10256
10257 /*
10258 * If rm is denoting a register, no more instruction bytes.
10259 */
10260 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10261 {
10262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10263
10264 IEM_MC_BEGIN(0, 2);
10265 IEM_MC_LOCAL(uint8_t, uTmp1);
10266 IEM_MC_LOCAL(uint8_t, uTmp2);
10267
10268 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10269 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10270 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10271 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10272
10273 IEM_MC_ADVANCE_RIP();
10274 IEM_MC_END();
10275 }
10276 else
10277 {
10278 /*
10279 * We're accessing memory.
10280 */
10281/** @todo the register must be committed separately! */
10282 IEM_MC_BEGIN(2, 2);
10283 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10284 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10286
10287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10288 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10289 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10290 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10291 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10292
10293 IEM_MC_ADVANCE_RIP();
10294 IEM_MC_END();
10295 }
10296 return VINF_SUCCESS;
10297}
10298
10299
10300/** Opcode 0x87. */
10301FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10302{
10303 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
10304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10305
10306 /*
10307 * If rm is denoting a register, no more instruction bytes.
10308 */
10309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10310 {
10311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10312
10313 switch (pVCpu->iem.s.enmEffOpSize)
10314 {
10315 case IEMMODE_16BIT:
10316 IEM_MC_BEGIN(0, 2);
10317 IEM_MC_LOCAL(uint16_t, uTmp1);
10318 IEM_MC_LOCAL(uint16_t, uTmp2);
10319
10320 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10321 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10322 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10323 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10324
10325 IEM_MC_ADVANCE_RIP();
10326 IEM_MC_END();
10327 return VINF_SUCCESS;
10328
10329 case IEMMODE_32BIT:
10330 IEM_MC_BEGIN(0, 2);
10331 IEM_MC_LOCAL(uint32_t, uTmp1);
10332 IEM_MC_LOCAL(uint32_t, uTmp2);
10333
10334 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10335 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10336 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10337 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10338
10339 IEM_MC_ADVANCE_RIP();
10340 IEM_MC_END();
10341 return VINF_SUCCESS;
10342
10343 case IEMMODE_64BIT:
10344 IEM_MC_BEGIN(0, 2);
10345 IEM_MC_LOCAL(uint64_t, uTmp1);
10346 IEM_MC_LOCAL(uint64_t, uTmp2);
10347
10348 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10349 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10350 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10351 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10352
10353 IEM_MC_ADVANCE_RIP();
10354 IEM_MC_END();
10355 return VINF_SUCCESS;
10356
10357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10358 }
10359 }
10360 else
10361 {
10362 /*
10363 * We're accessing memory.
10364 */
10365 switch (pVCpu->iem.s.enmEffOpSize)
10366 {
10367/** @todo the register must be committed separately! */
10368 case IEMMODE_16BIT:
10369 IEM_MC_BEGIN(2, 2);
10370 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10371 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10373
10374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10375 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10376 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10377 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10378 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10379
10380 IEM_MC_ADVANCE_RIP();
10381 IEM_MC_END();
10382 return VINF_SUCCESS;
10383
10384 case IEMMODE_32BIT:
10385 IEM_MC_BEGIN(2, 2);
10386 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10387 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10389
10390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10391 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10392 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10393 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10394 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10395
10396 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10397 IEM_MC_ADVANCE_RIP();
10398 IEM_MC_END();
10399 return VINF_SUCCESS;
10400
10401 case IEMMODE_64BIT:
10402 IEM_MC_BEGIN(2, 2);
10403 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10404 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10406
10407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10408 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10409 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10410 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10411 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10412
10413 IEM_MC_ADVANCE_RIP();
10414 IEM_MC_END();
10415 return VINF_SUCCESS;
10416
10417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10418 }
10419 }
10420}
10421
10422
10423/** Opcode 0x88. */
10424FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10425{
10426 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
10427
10428 uint8_t bRm;
10429 IEM_OPCODE_GET_NEXT_U8(&bRm);
10430
10431 /*
10432 * If rm is denoting a register, no more instruction bytes.
10433 */
10434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10435 {
10436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10437 IEM_MC_BEGIN(0, 1);
10438 IEM_MC_LOCAL(uint8_t, u8Value);
10439 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10440 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
10441 IEM_MC_ADVANCE_RIP();
10442 IEM_MC_END();
10443 }
10444 else
10445 {
10446 /*
10447 * We're writing a register to memory.
10448 */
10449 IEM_MC_BEGIN(0, 2);
10450 IEM_MC_LOCAL(uint8_t, u8Value);
10451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10454 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10455 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
10456 IEM_MC_ADVANCE_RIP();
10457 IEM_MC_END();
10458 }
10459 return VINF_SUCCESS;
10460
10461}
10462
10463
10464/** Opcode 0x89. */
10465FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10466{
10467 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
10468
10469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10470
10471 /*
10472 * If rm is denoting a register, no more instruction bytes.
10473 */
10474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10475 {
10476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10477 switch (pVCpu->iem.s.enmEffOpSize)
10478 {
10479 case IEMMODE_16BIT:
10480 IEM_MC_BEGIN(0, 1);
10481 IEM_MC_LOCAL(uint16_t, u16Value);
10482 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10483 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10484 IEM_MC_ADVANCE_RIP();
10485 IEM_MC_END();
10486 break;
10487
10488 case IEMMODE_32BIT:
10489 IEM_MC_BEGIN(0, 1);
10490 IEM_MC_LOCAL(uint32_t, u32Value);
10491 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10492 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10493 IEM_MC_ADVANCE_RIP();
10494 IEM_MC_END();
10495 break;
10496
10497 case IEMMODE_64BIT:
10498 IEM_MC_BEGIN(0, 1);
10499 IEM_MC_LOCAL(uint64_t, u64Value);
10500 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10501 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10502 IEM_MC_ADVANCE_RIP();
10503 IEM_MC_END();
10504 break;
10505 }
10506 }
10507 else
10508 {
10509 /*
10510 * We're writing a register to memory.
10511 */
10512 switch (pVCpu->iem.s.enmEffOpSize)
10513 {
10514 case IEMMODE_16BIT:
10515 IEM_MC_BEGIN(0, 2);
10516 IEM_MC_LOCAL(uint16_t, u16Value);
10517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10520 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10521 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10522 IEM_MC_ADVANCE_RIP();
10523 IEM_MC_END();
10524 break;
10525
10526 case IEMMODE_32BIT:
10527 IEM_MC_BEGIN(0, 2);
10528 IEM_MC_LOCAL(uint32_t, u32Value);
10529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10532 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10533 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
10534 IEM_MC_ADVANCE_RIP();
10535 IEM_MC_END();
10536 break;
10537
10538 case IEMMODE_64BIT:
10539 IEM_MC_BEGIN(0, 2);
10540 IEM_MC_LOCAL(uint64_t, u64Value);
10541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10544 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10545 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
10546 IEM_MC_ADVANCE_RIP();
10547 IEM_MC_END();
10548 break;
10549 }
10550 }
10551 return VINF_SUCCESS;
10552}
10553
10554
10555/** Opcode 0x8a. */
10556FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10557{
10558 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
10559
10560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10561
10562 /*
10563 * If rm is denoting a register, no more instruction bytes.
10564 */
10565 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10566 {
10567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10568 IEM_MC_BEGIN(0, 1);
10569 IEM_MC_LOCAL(uint8_t, u8Value);
10570 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10571 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10572 IEM_MC_ADVANCE_RIP();
10573 IEM_MC_END();
10574 }
10575 else
10576 {
10577 /*
10578 * We're loading a register from memory.
10579 */
10580 IEM_MC_BEGIN(0, 2);
10581 IEM_MC_LOCAL(uint8_t, u8Value);
10582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10585 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10586 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10587 IEM_MC_ADVANCE_RIP();
10588 IEM_MC_END();
10589 }
10590 return VINF_SUCCESS;
10591}
10592
10593
10594/** Opcode 0x8b. */
10595FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10596{
10597 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
10598
10599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10600
10601 /*
10602 * If rm is denoting a register, no more instruction bytes.
10603 */
10604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10605 {
10606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10607 switch (pVCpu->iem.s.enmEffOpSize)
10608 {
10609 case IEMMODE_16BIT:
10610 IEM_MC_BEGIN(0, 1);
10611 IEM_MC_LOCAL(uint16_t, u16Value);
10612 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10613 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10614 IEM_MC_ADVANCE_RIP();
10615 IEM_MC_END();
10616 break;
10617
10618 case IEMMODE_32BIT:
10619 IEM_MC_BEGIN(0, 1);
10620 IEM_MC_LOCAL(uint32_t, u32Value);
10621 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10622 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10623 IEM_MC_ADVANCE_RIP();
10624 IEM_MC_END();
10625 break;
10626
10627 case IEMMODE_64BIT:
10628 IEM_MC_BEGIN(0, 1);
10629 IEM_MC_LOCAL(uint64_t, u64Value);
10630 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10631 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10632 IEM_MC_ADVANCE_RIP();
10633 IEM_MC_END();
10634 break;
10635 }
10636 }
10637 else
10638 {
10639 /*
10640 * We're loading a register from memory.
10641 */
10642 switch (pVCpu->iem.s.enmEffOpSize)
10643 {
10644 case IEMMODE_16BIT:
10645 IEM_MC_BEGIN(0, 2);
10646 IEM_MC_LOCAL(uint16_t, u16Value);
10647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10650 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10651 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10652 IEM_MC_ADVANCE_RIP();
10653 IEM_MC_END();
10654 break;
10655
10656 case IEMMODE_32BIT:
10657 IEM_MC_BEGIN(0, 2);
10658 IEM_MC_LOCAL(uint32_t, u32Value);
10659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10662 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10663 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10664 IEM_MC_ADVANCE_RIP();
10665 IEM_MC_END();
10666 break;
10667
10668 case IEMMODE_64BIT:
10669 IEM_MC_BEGIN(0, 2);
10670 IEM_MC_LOCAL(uint64_t, u64Value);
10671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10674 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10675 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10676 IEM_MC_ADVANCE_RIP();
10677 IEM_MC_END();
10678 break;
10679 }
10680 }
10681 return VINF_SUCCESS;
10682}
10683
10684
10685/** Opcode 0x63. */
10686FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10687{
10688 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
10689 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10690 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10691 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10692 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10693}
10694
10695
10696/** Opcode 0x8c. */
10697FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10698{
10699 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
10700
10701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10702
10703 /*
10704 * Check that the destination register exists. The REX.R prefix is ignored.
10705 */
10706 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10707 if ( iSegReg > X86_SREG_GS)
10708 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10709
10710 /*
10711 * If rm is denoting a register, no more instruction bytes.
10712 * In that case, the operand size is respected and the upper bits are
10713 * cleared (starting with some pentium).
10714 */
10715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10716 {
10717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10718 switch (pVCpu->iem.s.enmEffOpSize)
10719 {
10720 case IEMMODE_16BIT:
10721 IEM_MC_BEGIN(0, 1);
10722 IEM_MC_LOCAL(uint16_t, u16Value);
10723 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10724 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10725 IEM_MC_ADVANCE_RIP();
10726 IEM_MC_END();
10727 break;
10728
10729 case IEMMODE_32BIT:
10730 IEM_MC_BEGIN(0, 1);
10731 IEM_MC_LOCAL(uint32_t, u32Value);
10732 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10733 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10734 IEM_MC_ADVANCE_RIP();
10735 IEM_MC_END();
10736 break;
10737
10738 case IEMMODE_64BIT:
10739 IEM_MC_BEGIN(0, 1);
10740 IEM_MC_LOCAL(uint64_t, u64Value);
10741 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10742 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10743 IEM_MC_ADVANCE_RIP();
10744 IEM_MC_END();
10745 break;
10746 }
10747 }
10748 else
10749 {
10750 /*
10751 * We're saving the register to memory. The access is word sized
10752 * regardless of operand size prefixes.
10753 */
10754#if 0 /* not necessary */
10755 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10756#endif
10757 IEM_MC_BEGIN(0, 2);
10758 IEM_MC_LOCAL(uint16_t, u16Value);
10759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10762 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10763 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10764 IEM_MC_ADVANCE_RIP();
10765 IEM_MC_END();
10766 }
10767 return VINF_SUCCESS;
10768}
10769
10770
10771
10772
10773/** Opcode 0x8d. */
10774FNIEMOP_DEF(iemOp_lea_Gv_M)
10775{
10776 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
10777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10779 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10780
10781 switch (pVCpu->iem.s.enmEffOpSize)
10782 {
10783 case IEMMODE_16BIT:
10784 IEM_MC_BEGIN(0, 2);
10785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10786 IEM_MC_LOCAL(uint16_t, u16Cast);
10787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10789 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10790 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
10791 IEM_MC_ADVANCE_RIP();
10792 IEM_MC_END();
10793 return VINF_SUCCESS;
10794
10795 case IEMMODE_32BIT:
10796 IEM_MC_BEGIN(0, 2);
10797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10798 IEM_MC_LOCAL(uint32_t, u32Cast);
10799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10801 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10802 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
10803 IEM_MC_ADVANCE_RIP();
10804 IEM_MC_END();
10805 return VINF_SUCCESS;
10806
10807 case IEMMODE_64BIT:
10808 IEM_MC_BEGIN(0, 1);
10809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10812 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
10813 IEM_MC_ADVANCE_RIP();
10814 IEM_MC_END();
10815 return VINF_SUCCESS;
10816 }
10817 AssertFailedReturn(VERR_IEM_IPE_7);
10818}
10819
10820
10821/** Opcode 0x8e. */
10822FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10823{
10824 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
10825
10826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10827
10828 /*
10829 * The practical operand size is 16-bit.
10830 */
10831#if 0 /* not necessary */
10832 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10833#endif
10834
10835 /*
10836 * Check that the destination register exists and can be used with this
10837 * instruction. The REX.R prefix is ignored.
10838 */
10839 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10840 if ( iSegReg == X86_SREG_CS
10841 || iSegReg > X86_SREG_GS)
10842 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10843
10844 /*
10845 * If rm is denoting a register, no more instruction bytes.
10846 */
10847 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10848 {
10849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10850 IEM_MC_BEGIN(2, 0);
10851 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10852 IEM_MC_ARG(uint16_t, u16Value, 1);
10853 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10854 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10855 IEM_MC_END();
10856 }
10857 else
10858 {
10859 /*
10860 * We're loading the register from memory. The access is word sized
10861 * regardless of operand size prefixes.
10862 */
10863 IEM_MC_BEGIN(2, 1);
10864 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10865 IEM_MC_ARG(uint16_t, u16Value, 1);
10866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10869 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10870 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10871 IEM_MC_END();
10872 }
10873 return VINF_SUCCESS;
10874}
10875
10876
10877/** Opcode 0x8f /0. */
10878FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10879{
10880 /* This bugger is rather annoying as it requires rSP to be updated before
10881 doing the effective address calculations. Will eventually require a
10882 split between the R/M+SIB decoding and the effective address
10883 calculation - which is something that is required for any attempt at
10884 reusing this code for a recompiler. It may also be good to have if we
10885 need to delay #UD exception caused by invalid lock prefixes.
10886
10887 For now, we'll do a mostly safe interpreter-only implementation here. */
10888 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10889 * now until tests show it's checked.. */
10890 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
10891
10892 /* Register access is relatively easy and can share code. */
10893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10894 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10895
10896 /*
10897 * Memory target.
10898 *
10899 * Intel says that RSP is incremented before it's used in any effective
10900 * address calcuations. This means some serious extra annoyance here since
10901 * we decode and calculate the effective address in one step and like to
10902 * delay committing registers till everything is done.
10903 *
10904 * So, we'll decode and calculate the effective address twice. This will
10905 * require some recoding if turned into a recompiler.
10906 */
10907 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10908
10909#ifndef TST_IEM_CHECK_MC
10910 /* Calc effective address with modified ESP. */
10911/** @todo testcase */
10912 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
10913 RTGCPTR GCPtrEff;
10914 VBOXSTRICTRC rcStrict;
10915 switch (pVCpu->iem.s.enmEffOpSize)
10916 {
10917 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
10918 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
10919 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
10920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10921 }
10922 if (rcStrict != VINF_SUCCESS)
10923 return rcStrict;
10924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10925
10926 /* Perform the operation - this should be CImpl. */
10927 RTUINT64U TmpRsp;
10928 TmpRsp.u = pCtx->rsp;
10929 switch (pVCpu->iem.s.enmEffOpSize)
10930 {
10931 case IEMMODE_16BIT:
10932 {
10933 uint16_t u16Value;
10934 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
10935 if (rcStrict == VINF_SUCCESS)
10936 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
10937 break;
10938 }
10939
10940 case IEMMODE_32BIT:
10941 {
10942 uint32_t u32Value;
10943 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
10944 if (rcStrict == VINF_SUCCESS)
10945 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
10946 break;
10947 }
10948
10949 case IEMMODE_64BIT:
10950 {
10951 uint64_t u64Value;
10952 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
10953 if (rcStrict == VINF_SUCCESS)
10954 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
10955 break;
10956 }
10957
10958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10959 }
10960 if (rcStrict == VINF_SUCCESS)
10961 {
10962 pCtx->rsp = TmpRsp.u;
10963 iemRegUpdateRipAndClearRF(pVCpu);
10964 }
10965 return rcStrict;
10966
10967#else
10968 return VERR_IEM_IPE_2;
10969#endif
10970}
10971
10972
10973/** Opcode 0x8f. */
10974FNIEMOP_DEF(iemOp_Grp1A)
10975{
10976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10977 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10978 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10979
10980 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10981 /** @todo XOP decoding. */
10982 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
10983 return IEMOP_RAISE_INVALID_OPCODE();
10984}
10985
10986
10987/**
10988 * Common 'xchg reg,rAX' helper.
10989 */
10990FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10991{
10992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10993
10994 iReg |= pVCpu->iem.s.uRexB;
10995 switch (pVCpu->iem.s.enmEffOpSize)
10996 {
10997 case IEMMODE_16BIT:
10998 IEM_MC_BEGIN(0, 2);
10999 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11000 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11001 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11002 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11003 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11004 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11005 IEM_MC_ADVANCE_RIP();
11006 IEM_MC_END();
11007 return VINF_SUCCESS;
11008
11009 case IEMMODE_32BIT:
11010 IEM_MC_BEGIN(0, 2);
11011 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11012 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11013 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11014 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11015 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11016 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11017 IEM_MC_ADVANCE_RIP();
11018 IEM_MC_END();
11019 return VINF_SUCCESS;
11020
11021 case IEMMODE_64BIT:
11022 IEM_MC_BEGIN(0, 2);
11023 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11024 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11025 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11026 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11027 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11028 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11029 IEM_MC_ADVANCE_RIP();
11030 IEM_MC_END();
11031 return VINF_SUCCESS;
11032
11033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11034 }
11035}
11036
11037
11038/** Opcode 0x90. */
11039FNIEMOP_DEF(iemOp_nop)
11040{
11041 /* R8/R8D and RAX/EAX can be exchanged. */
11042 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11043 {
11044 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11045 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11046 }
11047
11048 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11049 IEMOP_MNEMONIC(pause, "pause");
11050 else
11051 IEMOP_MNEMONIC(nop, "nop");
11052 IEM_MC_BEGIN(0, 0);
11053 IEM_MC_ADVANCE_RIP();
11054 IEM_MC_END();
11055 return VINF_SUCCESS;
11056}
11057
11058
11059/** Opcode 0x91. */
11060FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11061{
11062 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11063 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11064}
11065
11066
11067/** Opcode 0x92. */
11068FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11069{
11070 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11071 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11072}
11073
11074
11075/** Opcode 0x93. */
11076FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11077{
11078 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11079 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11080}
11081
11082
11083/** Opcode 0x94. */
11084FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11085{
11086 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11087 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11088}
11089
11090
11091/** Opcode 0x95. */
11092FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11093{
11094 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11095 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11096}
11097
11098
11099/** Opcode 0x96. */
11100FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11101{
11102 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11103 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11104}
11105
11106
11107/** Opcode 0x97. */
11108FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11109{
11110 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11111 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11112}
11113
11114
11115/** Opcode 0x98. */
11116FNIEMOP_DEF(iemOp_cbw)
11117{
11118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11119 switch (pVCpu->iem.s.enmEffOpSize)
11120 {
11121 case IEMMODE_16BIT:
11122 IEMOP_MNEMONIC(cbw, "cbw");
11123 IEM_MC_BEGIN(0, 1);
11124 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11125 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11126 } IEM_MC_ELSE() {
11127 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11128 } IEM_MC_ENDIF();
11129 IEM_MC_ADVANCE_RIP();
11130 IEM_MC_END();
11131 return VINF_SUCCESS;
11132
11133 case IEMMODE_32BIT:
11134 IEMOP_MNEMONIC(cwde, "cwde");
11135 IEM_MC_BEGIN(0, 1);
11136 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11137 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11138 } IEM_MC_ELSE() {
11139 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11140 } IEM_MC_ENDIF();
11141 IEM_MC_ADVANCE_RIP();
11142 IEM_MC_END();
11143 return VINF_SUCCESS;
11144
11145 case IEMMODE_64BIT:
11146 IEMOP_MNEMONIC(cdqe, "cdqe");
11147 IEM_MC_BEGIN(0, 1);
11148 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11149 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11150 } IEM_MC_ELSE() {
11151 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11152 } IEM_MC_ENDIF();
11153 IEM_MC_ADVANCE_RIP();
11154 IEM_MC_END();
11155 return VINF_SUCCESS;
11156
11157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11158 }
11159}
11160
11161
11162/** Opcode 0x99. */
11163FNIEMOP_DEF(iemOp_cwd)
11164{
11165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11166 switch (pVCpu->iem.s.enmEffOpSize)
11167 {
11168 case IEMMODE_16BIT:
11169 IEMOP_MNEMONIC(cwd, "cwd");
11170 IEM_MC_BEGIN(0, 1);
11171 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11172 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11173 } IEM_MC_ELSE() {
11174 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11175 } IEM_MC_ENDIF();
11176 IEM_MC_ADVANCE_RIP();
11177 IEM_MC_END();
11178 return VINF_SUCCESS;
11179
11180 case IEMMODE_32BIT:
11181 IEMOP_MNEMONIC(cdq, "cdq");
11182 IEM_MC_BEGIN(0, 1);
11183 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11184 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11185 } IEM_MC_ELSE() {
11186 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11187 } IEM_MC_ENDIF();
11188 IEM_MC_ADVANCE_RIP();
11189 IEM_MC_END();
11190 return VINF_SUCCESS;
11191
11192 case IEMMODE_64BIT:
11193 IEMOP_MNEMONIC(cqo, "cqo");
11194 IEM_MC_BEGIN(0, 1);
11195 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11196 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11197 } IEM_MC_ELSE() {
11198 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11199 } IEM_MC_ENDIF();
11200 IEM_MC_ADVANCE_RIP();
11201 IEM_MC_END();
11202 return VINF_SUCCESS;
11203
11204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11205 }
11206}
11207
11208
11209/** Opcode 0x9a. */
11210FNIEMOP_DEF(iemOp_call_Ap)
11211{
11212 IEMOP_MNEMONIC(call_Ap, "call Ap");
11213 IEMOP_HLP_NO_64BIT();
11214
11215 /* Decode the far pointer address and pass it on to the far call C implementation. */
11216 uint32_t offSeg;
11217 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11218 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11219 else
11220 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11221 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11223 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11224}
11225
11226
11227/** Opcode 0x9b. (aka fwait) */
11228FNIEMOP_DEF(iemOp_wait)
11229{
11230 IEMOP_MNEMONIC(wait, "wait");
11231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11232
11233 IEM_MC_BEGIN(0, 0);
11234 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11235 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11236 IEM_MC_ADVANCE_RIP();
11237 IEM_MC_END();
11238 return VINF_SUCCESS;
11239}
11240
11241
11242/** Opcode 0x9c. */
11243FNIEMOP_DEF(iemOp_pushf_Fv)
11244{
11245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11247 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11248}
11249
11250
11251/** Opcode 0x9d. */
11252FNIEMOP_DEF(iemOp_popf_Fv)
11253{
11254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11255 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11256 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11257}
11258
11259
11260/** Opcode 0x9e. */
11261FNIEMOP_DEF(iemOp_sahf)
11262{
11263 IEMOP_MNEMONIC(sahf, "sahf");
11264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11265 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11266 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11267 return IEMOP_RAISE_INVALID_OPCODE();
11268 IEM_MC_BEGIN(0, 2);
11269 IEM_MC_LOCAL(uint32_t, u32Flags);
11270 IEM_MC_LOCAL(uint32_t, EFlags);
11271 IEM_MC_FETCH_EFLAGS(EFlags);
11272 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11273 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11274 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11275 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11276 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11277 IEM_MC_COMMIT_EFLAGS(EFlags);
11278 IEM_MC_ADVANCE_RIP();
11279 IEM_MC_END();
11280 return VINF_SUCCESS;
11281}
11282
11283
11284/** Opcode 0x9f. */
11285FNIEMOP_DEF(iemOp_lahf)
11286{
11287 IEMOP_MNEMONIC(lahf, "lahf");
11288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11289 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11290 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11291 return IEMOP_RAISE_INVALID_OPCODE();
11292 IEM_MC_BEGIN(0, 1);
11293 IEM_MC_LOCAL(uint8_t, u8Flags);
11294 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11295 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11296 IEM_MC_ADVANCE_RIP();
11297 IEM_MC_END();
11298 return VINF_SUCCESS;
11299}
11300
11301
11302/**
11303 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11304 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11305 * prefixes. Will return on failures.
11306 * @param a_GCPtrMemOff The variable to store the offset in.
11307 */
11308#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11309 do \
11310 { \
11311 switch (pVCpu->iem.s.enmEffAddrMode) \
11312 { \
11313 case IEMMODE_16BIT: \
11314 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11315 break; \
11316 case IEMMODE_32BIT: \
11317 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11318 break; \
11319 case IEMMODE_64BIT: \
11320 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11321 break; \
11322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11323 } \
11324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11325 } while (0)
11326
11327/** Opcode 0xa0. */
11328FNIEMOP_DEF(iemOp_mov_Al_Ob)
11329{
11330 /*
11331 * Get the offset and fend of lock prefixes.
11332 */
11333 RTGCPTR GCPtrMemOff;
11334 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11335
11336 /*
11337 * Fetch AL.
11338 */
11339 IEM_MC_BEGIN(0,1);
11340 IEM_MC_LOCAL(uint8_t, u8Tmp);
11341 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11342 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11343 IEM_MC_ADVANCE_RIP();
11344 IEM_MC_END();
11345 return VINF_SUCCESS;
11346}
11347
11348
11349/** Opcode 0xa1. */
11350FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11351{
11352 /*
11353 * Get the offset and fend of lock prefixes.
11354 */
11355 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
11356 RTGCPTR GCPtrMemOff;
11357 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11358
11359 /*
11360 * Fetch rAX.
11361 */
11362 switch (pVCpu->iem.s.enmEffOpSize)
11363 {
11364 case IEMMODE_16BIT:
11365 IEM_MC_BEGIN(0,1);
11366 IEM_MC_LOCAL(uint16_t, u16Tmp);
11367 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11368 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11369 IEM_MC_ADVANCE_RIP();
11370 IEM_MC_END();
11371 return VINF_SUCCESS;
11372
11373 case IEMMODE_32BIT:
11374 IEM_MC_BEGIN(0,1);
11375 IEM_MC_LOCAL(uint32_t, u32Tmp);
11376 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11377 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11378 IEM_MC_ADVANCE_RIP();
11379 IEM_MC_END();
11380 return VINF_SUCCESS;
11381
11382 case IEMMODE_64BIT:
11383 IEM_MC_BEGIN(0,1);
11384 IEM_MC_LOCAL(uint64_t, u64Tmp);
11385 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11386 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11387 IEM_MC_ADVANCE_RIP();
11388 IEM_MC_END();
11389 return VINF_SUCCESS;
11390
11391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11392 }
11393}
11394
11395
11396/** Opcode 0xa2. */
11397FNIEMOP_DEF(iemOp_mov_Ob_AL)
11398{
11399 /*
11400 * Get the offset and fend of lock prefixes.
11401 */
11402 RTGCPTR GCPtrMemOff;
11403 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11404
11405 /*
11406 * Store AL.
11407 */
11408 IEM_MC_BEGIN(0,1);
11409 IEM_MC_LOCAL(uint8_t, u8Tmp);
11410 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11411 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
11412 IEM_MC_ADVANCE_RIP();
11413 IEM_MC_END();
11414 return VINF_SUCCESS;
11415}
11416
11417
11418/** Opcode 0xa3. */
11419FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11420{
11421 /*
11422 * Get the offset and fend of lock prefixes.
11423 */
11424 RTGCPTR GCPtrMemOff;
11425 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11426
11427 /*
11428 * Store rAX.
11429 */
11430 switch (pVCpu->iem.s.enmEffOpSize)
11431 {
11432 case IEMMODE_16BIT:
11433 IEM_MC_BEGIN(0,1);
11434 IEM_MC_LOCAL(uint16_t, u16Tmp);
11435 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11436 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
11437 IEM_MC_ADVANCE_RIP();
11438 IEM_MC_END();
11439 return VINF_SUCCESS;
11440
11441 case IEMMODE_32BIT:
11442 IEM_MC_BEGIN(0,1);
11443 IEM_MC_LOCAL(uint32_t, u32Tmp);
11444 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11445 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
11446 IEM_MC_ADVANCE_RIP();
11447 IEM_MC_END();
11448 return VINF_SUCCESS;
11449
11450 case IEMMODE_64BIT:
11451 IEM_MC_BEGIN(0,1);
11452 IEM_MC_LOCAL(uint64_t, u64Tmp);
11453 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11454 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
11455 IEM_MC_ADVANCE_RIP();
11456 IEM_MC_END();
11457 return VINF_SUCCESS;
11458
11459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11460 }
11461}
11462
11463/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11464#define IEM_MOVS_CASE(ValBits, AddrBits) \
11465 IEM_MC_BEGIN(0, 2); \
11466 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11467 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11468 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11469 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11470 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11471 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11472 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11473 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11474 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11475 } IEM_MC_ELSE() { \
11476 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11477 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11478 } IEM_MC_ENDIF(); \
11479 IEM_MC_ADVANCE_RIP(); \
11480 IEM_MC_END();
11481
11482/** Opcode 0xa4. */
11483FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11484{
11485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11486
11487 /*
11488 * Use the C implementation if a repeat prefix is encountered.
11489 */
11490 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11491 {
11492 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
11493 switch (pVCpu->iem.s.enmEffAddrMode)
11494 {
11495 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
11496 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
11497 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
11498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11499 }
11500 }
11501 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
11502
11503 /*
11504 * Sharing case implementation with movs[wdq] below.
11505 */
11506 switch (pVCpu->iem.s.enmEffAddrMode)
11507 {
11508 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11509 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11510 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11512 }
11513 return VINF_SUCCESS;
11514}
11515
11516
11517/** Opcode 0xa5. */
11518FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11519{
11520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11521
11522 /*
11523 * Use the C implementation if a repeat prefix is encountered.
11524 */
11525 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11526 {
11527 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
11528 switch (pVCpu->iem.s.enmEffOpSize)
11529 {
11530 case IEMMODE_16BIT:
11531 switch (pVCpu->iem.s.enmEffAddrMode)
11532 {
11533 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
11534 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
11535 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
11536 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11537 }
11538 break;
11539 case IEMMODE_32BIT:
11540 switch (pVCpu->iem.s.enmEffAddrMode)
11541 {
11542 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
11543 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
11544 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
11545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11546 }
11547 case IEMMODE_64BIT:
11548 switch (pVCpu->iem.s.enmEffAddrMode)
11549 {
11550 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11551 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
11552 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
11553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11554 }
11555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11556 }
11557 }
11558 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
11559
11560 /*
11561 * Annoying double switch here.
11562 * Using ugly macro for implementing the cases, sharing it with movsb.
11563 */
11564 switch (pVCpu->iem.s.enmEffOpSize)
11565 {
11566 case IEMMODE_16BIT:
11567 switch (pVCpu->iem.s.enmEffAddrMode)
11568 {
11569 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11570 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11571 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11573 }
11574 break;
11575
11576 case IEMMODE_32BIT:
11577 switch (pVCpu->iem.s.enmEffAddrMode)
11578 {
11579 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11580 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11581 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11583 }
11584 break;
11585
11586 case IEMMODE_64BIT:
11587 switch (pVCpu->iem.s.enmEffAddrMode)
11588 {
11589 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11590 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11591 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11593 }
11594 break;
11595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11596 }
11597 return VINF_SUCCESS;
11598}
11599
11600#undef IEM_MOVS_CASE
11601
11602/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11603#define IEM_CMPS_CASE(ValBits, AddrBits) \
11604 IEM_MC_BEGIN(3, 3); \
11605 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11606 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11607 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11608 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11609 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11610 \
11611 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11612 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
11613 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11614 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11615 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11616 IEM_MC_REF_EFLAGS(pEFlags); \
11617 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11618 \
11619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11620 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11621 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11622 } IEM_MC_ELSE() { \
11623 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11624 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11625 } IEM_MC_ENDIF(); \
11626 IEM_MC_ADVANCE_RIP(); \
11627 IEM_MC_END(); \
11628
11629/** Opcode 0xa6. */
11630FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11631{
11632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11633
11634 /*
11635 * Use the C implementation if a repeat prefix is encountered.
11636 */
11637 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11638 {
11639 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
11640 switch (pVCpu->iem.s.enmEffAddrMode)
11641 {
11642 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11643 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11644 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11646 }
11647 }
11648 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11649 {
11650 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
11651 switch (pVCpu->iem.s.enmEffAddrMode)
11652 {
11653 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11654 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11655 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11657 }
11658 }
11659 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
11660
11661 /*
11662 * Sharing case implementation with cmps[wdq] below.
11663 */
11664 switch (pVCpu->iem.s.enmEffAddrMode)
11665 {
11666 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11667 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11668 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11670 }
11671 return VINF_SUCCESS;
11672
11673}
11674
11675
11676/** Opcode 0xa7. */
11677FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11678{
11679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11680
11681 /*
11682 * Use the C implementation if a repeat prefix is encountered.
11683 */
11684 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11685 {
11686 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
11687 switch (pVCpu->iem.s.enmEffOpSize)
11688 {
11689 case IEMMODE_16BIT:
11690 switch (pVCpu->iem.s.enmEffAddrMode)
11691 {
11692 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11693 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11694 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11696 }
11697 break;
11698 case IEMMODE_32BIT:
11699 switch (pVCpu->iem.s.enmEffAddrMode)
11700 {
11701 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11702 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11703 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11705 }
11706 case IEMMODE_64BIT:
11707 switch (pVCpu->iem.s.enmEffAddrMode)
11708 {
11709 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11710 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11711 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11713 }
11714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11715 }
11716 }
11717
11718 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11719 {
11720 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
11721 switch (pVCpu->iem.s.enmEffOpSize)
11722 {
11723 case IEMMODE_16BIT:
11724 switch (pVCpu->iem.s.enmEffAddrMode)
11725 {
11726 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11727 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11728 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11730 }
11731 break;
11732 case IEMMODE_32BIT:
11733 switch (pVCpu->iem.s.enmEffAddrMode)
11734 {
11735 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11736 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11737 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11739 }
11740 case IEMMODE_64BIT:
11741 switch (pVCpu->iem.s.enmEffAddrMode)
11742 {
11743 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11744 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11745 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11747 }
11748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11749 }
11750 }
11751
11752 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
11753
11754 /*
11755 * Annoying double switch here.
11756 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11757 */
11758 switch (pVCpu->iem.s.enmEffOpSize)
11759 {
11760 case IEMMODE_16BIT:
11761 switch (pVCpu->iem.s.enmEffAddrMode)
11762 {
11763 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11764 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11765 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11767 }
11768 break;
11769
11770 case IEMMODE_32BIT:
11771 switch (pVCpu->iem.s.enmEffAddrMode)
11772 {
11773 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11774 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11775 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11777 }
11778 break;
11779
11780 case IEMMODE_64BIT:
11781 switch (pVCpu->iem.s.enmEffAddrMode)
11782 {
11783 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11784 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11785 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11787 }
11788 break;
11789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11790 }
11791 return VINF_SUCCESS;
11792
11793}
11794
11795#undef IEM_CMPS_CASE
11796
11797/** Opcode 0xa8. */
11798FNIEMOP_DEF(iemOp_test_AL_Ib)
11799{
11800 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
11801 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11802 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11803}
11804
11805
11806/** Opcode 0xa9. */
11807FNIEMOP_DEF(iemOp_test_eAX_Iz)
11808{
11809 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
11810 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11811 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11812}
11813
11814
11815/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11816#define IEM_STOS_CASE(ValBits, AddrBits) \
11817 IEM_MC_BEGIN(0, 2); \
11818 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11819 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11820 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11821 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11822 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11824 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11825 } IEM_MC_ELSE() { \
11826 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11827 } IEM_MC_ENDIF(); \
11828 IEM_MC_ADVANCE_RIP(); \
11829 IEM_MC_END(); \
11830
11831/** Opcode 0xaa. */
11832FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11833{
11834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11835
11836 /*
11837 * Use the C implementation if a repeat prefix is encountered.
11838 */
11839 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11840 {
11841 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
11842 switch (pVCpu->iem.s.enmEffAddrMode)
11843 {
11844 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11845 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11846 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11848 }
11849 }
11850 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
11851
11852 /*
11853 * Sharing case implementation with stos[wdq] below.
11854 */
11855 switch (pVCpu->iem.s.enmEffAddrMode)
11856 {
11857 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11858 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11859 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11861 }
11862 return VINF_SUCCESS;
11863}
11864
11865
11866/** Opcode 0xab. */
11867FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11868{
11869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11870
11871 /*
11872 * Use the C implementation if a repeat prefix is encountered.
11873 */
11874 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11875 {
11876 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
11877 switch (pVCpu->iem.s.enmEffOpSize)
11878 {
11879 case IEMMODE_16BIT:
11880 switch (pVCpu->iem.s.enmEffAddrMode)
11881 {
11882 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11883 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11884 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11886 }
11887 break;
11888 case IEMMODE_32BIT:
11889 switch (pVCpu->iem.s.enmEffAddrMode)
11890 {
11891 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11892 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11893 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11895 }
11896 case IEMMODE_64BIT:
11897 switch (pVCpu->iem.s.enmEffAddrMode)
11898 {
11899 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11900 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11901 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11903 }
11904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11905 }
11906 }
11907 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
11908
11909 /*
11910 * Annoying double switch here.
11911 * Using ugly macro for implementing the cases, sharing it with stosb.
11912 */
11913 switch (pVCpu->iem.s.enmEffOpSize)
11914 {
11915 case IEMMODE_16BIT:
11916 switch (pVCpu->iem.s.enmEffAddrMode)
11917 {
11918 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11919 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11920 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11922 }
11923 break;
11924
11925 case IEMMODE_32BIT:
11926 switch (pVCpu->iem.s.enmEffAddrMode)
11927 {
11928 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11929 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11930 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11932 }
11933 break;
11934
11935 case IEMMODE_64BIT:
11936 switch (pVCpu->iem.s.enmEffAddrMode)
11937 {
11938 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11939 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11940 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11942 }
11943 break;
11944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11945 }
11946 return VINF_SUCCESS;
11947}
11948
11949#undef IEM_STOS_CASE
11950
11951/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11952#define IEM_LODS_CASE(ValBits, AddrBits) \
11953 IEM_MC_BEGIN(0, 2); \
11954 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11955 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11956 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11957 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11958 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11960 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11961 } IEM_MC_ELSE() { \
11962 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11963 } IEM_MC_ENDIF(); \
11964 IEM_MC_ADVANCE_RIP(); \
11965 IEM_MC_END();
11966
11967/** Opcode 0xac. */
11968FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11969{
11970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11971
11972 /*
11973 * Use the C implementation if a repeat prefix is encountered.
11974 */
11975 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11976 {
11977 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
11978 switch (pVCpu->iem.s.enmEffAddrMode)
11979 {
11980 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
11981 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
11982 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
11983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11984 }
11985 }
11986 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
11987
11988 /*
11989 * Sharing case implementation with stos[wdq] below.
11990 */
11991 switch (pVCpu->iem.s.enmEffAddrMode)
11992 {
11993 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11994 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11995 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11997 }
11998 return VINF_SUCCESS;
11999}
12000
12001
12002/** Opcode 0xad. */
12003FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12004{
12005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12006
12007 /*
12008 * Use the C implementation if a repeat prefix is encountered.
12009 */
12010 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12011 {
12012 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12013 switch (pVCpu->iem.s.enmEffOpSize)
12014 {
12015 case IEMMODE_16BIT:
12016 switch (pVCpu->iem.s.enmEffAddrMode)
12017 {
12018 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12019 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12020 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12022 }
12023 break;
12024 case IEMMODE_32BIT:
12025 switch (pVCpu->iem.s.enmEffAddrMode)
12026 {
12027 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12028 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12029 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12031 }
12032 case IEMMODE_64BIT:
12033 switch (pVCpu->iem.s.enmEffAddrMode)
12034 {
12035 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12036 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12037 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12039 }
12040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12041 }
12042 }
12043 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12044
12045 /*
12046 * Annoying double switch here.
12047 * Using ugly macro for implementing the cases, sharing it with lodsb.
12048 */
12049 switch (pVCpu->iem.s.enmEffOpSize)
12050 {
12051 case IEMMODE_16BIT:
12052 switch (pVCpu->iem.s.enmEffAddrMode)
12053 {
12054 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12055 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12056 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12058 }
12059 break;
12060
12061 case IEMMODE_32BIT:
12062 switch (pVCpu->iem.s.enmEffAddrMode)
12063 {
12064 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12065 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12066 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12068 }
12069 break;
12070
12071 case IEMMODE_64BIT:
12072 switch (pVCpu->iem.s.enmEffAddrMode)
12073 {
12074 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12075 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12076 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12078 }
12079 break;
12080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12081 }
12082 return VINF_SUCCESS;
12083}
12084
12085#undef IEM_LODS_CASE
12086
12087/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12088#define IEM_SCAS_CASE(ValBits, AddrBits) \
12089 IEM_MC_BEGIN(3, 2); \
12090 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12091 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12092 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12093 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12094 \
12095 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12096 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12097 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12098 IEM_MC_REF_EFLAGS(pEFlags); \
12099 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12100 \
12101 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12102 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12103 } IEM_MC_ELSE() { \
12104 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12105 } IEM_MC_ENDIF(); \
12106 IEM_MC_ADVANCE_RIP(); \
12107 IEM_MC_END();
12108
12109/** Opcode 0xae. */
12110FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12111{
12112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12113
12114 /*
12115 * Use the C implementation if a repeat prefix is encountered.
12116 */
12117 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12118 {
12119 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12120 switch (pVCpu->iem.s.enmEffAddrMode)
12121 {
12122 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12123 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12124 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12126 }
12127 }
12128 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12129 {
12130 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12131 switch (pVCpu->iem.s.enmEffAddrMode)
12132 {
12133 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12134 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12135 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12137 }
12138 }
12139 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12140
12141 /*
12142 * Sharing case implementation with stos[wdq] below.
12143 */
12144 switch (pVCpu->iem.s.enmEffAddrMode)
12145 {
12146 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12147 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12148 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12150 }
12151 return VINF_SUCCESS;
12152}
12153
12154
12155/** Opcode 0xaf. */
12156FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12157{
12158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12159
12160 /*
12161 * Use the C implementation if a repeat prefix is encountered.
12162 */
12163 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12164 {
12165 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12166 switch (pVCpu->iem.s.enmEffOpSize)
12167 {
12168 case IEMMODE_16BIT:
12169 switch (pVCpu->iem.s.enmEffAddrMode)
12170 {
12171 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12172 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12173 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12175 }
12176 break;
12177 case IEMMODE_32BIT:
12178 switch (pVCpu->iem.s.enmEffAddrMode)
12179 {
12180 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12181 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12182 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12184 }
12185 case IEMMODE_64BIT:
12186 switch (pVCpu->iem.s.enmEffAddrMode)
12187 {
12188 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12189 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12190 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12192 }
12193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12194 }
12195 }
12196 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12197 {
12198 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12199 switch (pVCpu->iem.s.enmEffOpSize)
12200 {
12201 case IEMMODE_16BIT:
12202 switch (pVCpu->iem.s.enmEffAddrMode)
12203 {
12204 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12205 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12206 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12208 }
12209 break;
12210 case IEMMODE_32BIT:
12211 switch (pVCpu->iem.s.enmEffAddrMode)
12212 {
12213 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12214 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12215 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12217 }
12218 case IEMMODE_64BIT:
12219 switch (pVCpu->iem.s.enmEffAddrMode)
12220 {
12221 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12222 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12223 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12225 }
12226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12227 }
12228 }
12229 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12230
12231 /*
12232 * Annoying double switch here.
12233 * Using ugly macro for implementing the cases, sharing it with scasb.
12234 */
12235 switch (pVCpu->iem.s.enmEffOpSize)
12236 {
12237 case IEMMODE_16BIT:
12238 switch (pVCpu->iem.s.enmEffAddrMode)
12239 {
12240 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12241 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12242 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12244 }
12245 break;
12246
12247 case IEMMODE_32BIT:
12248 switch (pVCpu->iem.s.enmEffAddrMode)
12249 {
12250 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12251 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12252 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12254 }
12255 break;
12256
12257 case IEMMODE_64BIT:
12258 switch (pVCpu->iem.s.enmEffAddrMode)
12259 {
12260 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12261 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12262 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12264 }
12265 break;
12266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12267 }
12268 return VINF_SUCCESS;
12269}
12270
12271#undef IEM_SCAS_CASE
12272
12273/**
12274 * Common 'mov r8, imm8' helper.
12275 */
12276FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12277{
12278 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12280
12281 IEM_MC_BEGIN(0, 1);
12282 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12283 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12284 IEM_MC_ADVANCE_RIP();
12285 IEM_MC_END();
12286
12287 return VINF_SUCCESS;
12288}
12289
12290
12291/** Opcode 0xb0. */
12292FNIEMOP_DEF(iemOp_mov_AL_Ib)
12293{
12294 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
12295 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12296}
12297
12298
12299/** Opcode 0xb1. */
12300FNIEMOP_DEF(iemOp_CL_Ib)
12301{
12302 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
12303 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12304}
12305
12306
12307/** Opcode 0xb2. */
12308FNIEMOP_DEF(iemOp_DL_Ib)
12309{
12310 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
12311 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12312}
12313
12314
12315/** Opcode 0xb3. */
12316FNIEMOP_DEF(iemOp_BL_Ib)
12317{
12318 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
12319 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12320}
12321
12322
12323/** Opcode 0xb4. */
12324FNIEMOP_DEF(iemOp_mov_AH_Ib)
12325{
12326 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
12327 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12328}
12329
12330
12331/** Opcode 0xb5. */
12332FNIEMOP_DEF(iemOp_CH_Ib)
12333{
12334 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
12335 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12336}
12337
12338
12339/** Opcode 0xb6. */
12340FNIEMOP_DEF(iemOp_DH_Ib)
12341{
12342 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
12343 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12344}
12345
12346
12347/** Opcode 0xb7. */
12348FNIEMOP_DEF(iemOp_BH_Ib)
12349{
12350 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
12351 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12352}
12353
12354
12355/**
12356 * Common 'mov regX,immX' helper.
12357 */
12358FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12359{
12360 switch (pVCpu->iem.s.enmEffOpSize)
12361 {
12362 case IEMMODE_16BIT:
12363 {
12364 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12366
12367 IEM_MC_BEGIN(0, 1);
12368 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12369 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12370 IEM_MC_ADVANCE_RIP();
12371 IEM_MC_END();
12372 break;
12373 }
12374
12375 case IEMMODE_32BIT:
12376 {
12377 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12379
12380 IEM_MC_BEGIN(0, 1);
12381 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12382 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12383 IEM_MC_ADVANCE_RIP();
12384 IEM_MC_END();
12385 break;
12386 }
12387 case IEMMODE_64BIT:
12388 {
12389 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12391
12392 IEM_MC_BEGIN(0, 1);
12393 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12394 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12395 IEM_MC_ADVANCE_RIP();
12396 IEM_MC_END();
12397 break;
12398 }
12399 }
12400
12401 return VINF_SUCCESS;
12402}
12403
12404
12405/** Opcode 0xb8. */
12406FNIEMOP_DEF(iemOp_eAX_Iv)
12407{
12408 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
12409 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12410}
12411
12412
12413/** Opcode 0xb9. */
12414FNIEMOP_DEF(iemOp_eCX_Iv)
12415{
12416 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
12417 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12418}
12419
12420
12421/** Opcode 0xba. */
12422FNIEMOP_DEF(iemOp_eDX_Iv)
12423{
12424 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
12425 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12426}
12427
12428
12429/** Opcode 0xbb. */
12430FNIEMOP_DEF(iemOp_eBX_Iv)
12431{
12432 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
12433 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12434}
12435
12436
12437/** Opcode 0xbc. */
12438FNIEMOP_DEF(iemOp_eSP_Iv)
12439{
12440 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
12441 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12442}
12443
12444
12445/** Opcode 0xbd. */
12446FNIEMOP_DEF(iemOp_eBP_Iv)
12447{
12448 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
12449 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12450}
12451
12452
12453/** Opcode 0xbe. */
12454FNIEMOP_DEF(iemOp_eSI_Iv)
12455{
12456 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
12457 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12458}
12459
12460
12461/** Opcode 0xbf. */
12462FNIEMOP_DEF(iemOp_eDI_Iv)
12463{
12464 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
12465 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12466}
12467
12468
12469/** Opcode 0xc0. */
12470FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12471{
12472 IEMOP_HLP_MIN_186();
12473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12474 PCIEMOPSHIFTSIZES pImpl;
12475 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12476 {
12477 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
12478 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
12479 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
12480 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
12481 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
12482 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
12483 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
12484 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12485 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12486 }
12487 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12488
12489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12490 {
12491 /* register */
12492 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12494 IEM_MC_BEGIN(3, 0);
12495 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12496 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12498 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12499 IEM_MC_REF_EFLAGS(pEFlags);
12500 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12501 IEM_MC_ADVANCE_RIP();
12502 IEM_MC_END();
12503 }
12504 else
12505 {
12506 /* memory */
12507 IEM_MC_BEGIN(3, 2);
12508 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12509 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12510 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12512
12513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12514 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12515 IEM_MC_ASSIGN(cShiftArg, cShift);
12516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12517 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12518 IEM_MC_FETCH_EFLAGS(EFlags);
12519 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12520
12521 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12522 IEM_MC_COMMIT_EFLAGS(EFlags);
12523 IEM_MC_ADVANCE_RIP();
12524 IEM_MC_END();
12525 }
12526 return VINF_SUCCESS;
12527}
12528
12529
12530/** Opcode 0xc1. */
12531FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12532{
12533 IEMOP_HLP_MIN_186();
12534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12535 PCIEMOPSHIFTSIZES pImpl;
12536 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12537 {
12538 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
12539 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
12540 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
12541 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
12542 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
12543 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
12544 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
12545 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12546 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12547 }
12548 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12549
12550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12551 {
12552 /* register */
12553 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12555 switch (pVCpu->iem.s.enmEffOpSize)
12556 {
12557 case IEMMODE_16BIT:
12558 IEM_MC_BEGIN(3, 0);
12559 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12560 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12561 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12562 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12563 IEM_MC_REF_EFLAGS(pEFlags);
12564 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12565 IEM_MC_ADVANCE_RIP();
12566 IEM_MC_END();
12567 return VINF_SUCCESS;
12568
12569 case IEMMODE_32BIT:
12570 IEM_MC_BEGIN(3, 0);
12571 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12572 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12573 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12574 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12575 IEM_MC_REF_EFLAGS(pEFlags);
12576 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12577 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12578 IEM_MC_ADVANCE_RIP();
12579 IEM_MC_END();
12580 return VINF_SUCCESS;
12581
12582 case IEMMODE_64BIT:
12583 IEM_MC_BEGIN(3, 0);
12584 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12585 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12586 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12587 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12588 IEM_MC_REF_EFLAGS(pEFlags);
12589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12590 IEM_MC_ADVANCE_RIP();
12591 IEM_MC_END();
12592 return VINF_SUCCESS;
12593
12594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12595 }
12596 }
12597 else
12598 {
12599 /* memory */
12600 switch (pVCpu->iem.s.enmEffOpSize)
12601 {
12602 case IEMMODE_16BIT:
12603 IEM_MC_BEGIN(3, 2);
12604 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12605 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12606 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12608
12609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12610 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12611 IEM_MC_ASSIGN(cShiftArg, cShift);
12612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12613 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12614 IEM_MC_FETCH_EFLAGS(EFlags);
12615 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12616
12617 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12618 IEM_MC_COMMIT_EFLAGS(EFlags);
12619 IEM_MC_ADVANCE_RIP();
12620 IEM_MC_END();
12621 return VINF_SUCCESS;
12622
12623 case IEMMODE_32BIT:
12624 IEM_MC_BEGIN(3, 2);
12625 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12626 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12627 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12629
12630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12631 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12632 IEM_MC_ASSIGN(cShiftArg, cShift);
12633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12634 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12635 IEM_MC_FETCH_EFLAGS(EFlags);
12636 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12637
12638 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12639 IEM_MC_COMMIT_EFLAGS(EFlags);
12640 IEM_MC_ADVANCE_RIP();
12641 IEM_MC_END();
12642 return VINF_SUCCESS;
12643
12644 case IEMMODE_64BIT:
12645 IEM_MC_BEGIN(3, 2);
12646 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12647 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12648 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12650
12651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12652 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12653 IEM_MC_ASSIGN(cShiftArg, cShift);
12654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12655 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12656 IEM_MC_FETCH_EFLAGS(EFlags);
12657 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12658
12659 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12660 IEM_MC_COMMIT_EFLAGS(EFlags);
12661 IEM_MC_ADVANCE_RIP();
12662 IEM_MC_END();
12663 return VINF_SUCCESS;
12664
12665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12666 }
12667 }
12668}
12669
12670
12671/** Opcode 0xc2. */
12672FNIEMOP_DEF(iemOp_retn_Iw)
12673{
12674 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
12675 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12677 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12678 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
12679}
12680
12681
12682/** Opcode 0xc3. */
12683FNIEMOP_DEF(iemOp_retn)
12684{
12685 IEMOP_MNEMONIC(retn, "retn");
12686 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12688 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
12689}
12690
12691
12692/** Opcode 0xc4. */
12693FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12694{
12695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12696 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12697 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12698 {
12699 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
12700 /* The LES instruction is invalid 64-bit mode. In legacy and
12701 compatability mode it is invalid with MOD=3.
12702 The use as a VEX prefix is made possible by assigning the inverted
12703 REX.R to the top MOD bit, and the top bit in the inverted register
12704 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12705 to accessing registers 0..7 in this VEX form. */
12706 /** @todo VEX: Just use new tables for it. */
12707 return IEMOP_RAISE_INVALID_OPCODE();
12708 }
12709 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
12710 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12711}
12712
12713
12714/** Opcode 0xc5. */
12715FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12716{
12717 /* The LDS instruction is invalid 64-bit mode. In legacy and
12718 compatability mode it is invalid with MOD=3.
12719 The use as a VEX prefix is made possible by assigning the inverted
12720 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12721 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12723 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
12724 {
12725 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12726 {
12727 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
12728 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12729 }
12730 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12731 }
12732
12733 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
12734 /** @todo Test when exctly the VEX conformance checks kick in during
12735 * instruction decoding and fetching (using \#PF). */
12736 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12737 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12738 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12739#if 0 /* will make sense of this next week... */
12740 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12741 &&
12742 )
12743 {
12744
12745 }
12746#endif
12747
12748 /** @todo VEX: Just use new tables for it. */
12749 return IEMOP_RAISE_INVALID_OPCODE();
12750}
12751
12752
12753/** Opcode 0xc6. */
12754FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12755{
12756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12757 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12758 return IEMOP_RAISE_INVALID_OPCODE();
12759 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
12760
12761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12762 {
12763 /* register access */
12764 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12766 IEM_MC_BEGIN(0, 0);
12767 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
12768 IEM_MC_ADVANCE_RIP();
12769 IEM_MC_END();
12770 }
12771 else
12772 {
12773 /* memory access. */
12774 IEM_MC_BEGIN(0, 1);
12775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12777 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12779 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
12780 IEM_MC_ADVANCE_RIP();
12781 IEM_MC_END();
12782 }
12783 return VINF_SUCCESS;
12784}
12785
12786
12787/** Opcode 0xc7. */
12788FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12789{
12790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12791 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12792 return IEMOP_RAISE_INVALID_OPCODE();
12793 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
12794
12795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12796 {
12797 /* register access */
12798 switch (pVCpu->iem.s.enmEffOpSize)
12799 {
12800 case IEMMODE_16BIT:
12801 IEM_MC_BEGIN(0, 0);
12802 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12804 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
12805 IEM_MC_ADVANCE_RIP();
12806 IEM_MC_END();
12807 return VINF_SUCCESS;
12808
12809 case IEMMODE_32BIT:
12810 IEM_MC_BEGIN(0, 0);
12811 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12813 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
12814 IEM_MC_ADVANCE_RIP();
12815 IEM_MC_END();
12816 return VINF_SUCCESS;
12817
12818 case IEMMODE_64BIT:
12819 IEM_MC_BEGIN(0, 0);
12820 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12822 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
12823 IEM_MC_ADVANCE_RIP();
12824 IEM_MC_END();
12825 return VINF_SUCCESS;
12826
12827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12828 }
12829 }
12830 else
12831 {
12832 /* memory access. */
12833 switch (pVCpu->iem.s.enmEffOpSize)
12834 {
12835 case IEMMODE_16BIT:
12836 IEM_MC_BEGIN(0, 1);
12837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12839 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12841 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
12842 IEM_MC_ADVANCE_RIP();
12843 IEM_MC_END();
12844 return VINF_SUCCESS;
12845
12846 case IEMMODE_32BIT:
12847 IEM_MC_BEGIN(0, 1);
12848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12850 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12852 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
12853 IEM_MC_ADVANCE_RIP();
12854 IEM_MC_END();
12855 return VINF_SUCCESS;
12856
12857 case IEMMODE_64BIT:
12858 IEM_MC_BEGIN(0, 1);
12859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12861 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12863 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
12864 IEM_MC_ADVANCE_RIP();
12865 IEM_MC_END();
12866 return VINF_SUCCESS;
12867
12868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12869 }
12870 }
12871}
12872
12873
12874
12875
12876/** Opcode 0xc8. */
12877FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12878{
12879 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
12880 IEMOP_HLP_MIN_186();
12881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12882 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12883 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12885 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
12886}
12887
12888
12889/** Opcode 0xc9. */
12890FNIEMOP_DEF(iemOp_leave)
12891{
12892 IEMOP_MNEMONIC(leave, "leave");
12893 IEMOP_HLP_MIN_186();
12894 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12896 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
12897}
12898
12899
12900/** Opcode 0xca. */
12901FNIEMOP_DEF(iemOp_retf_Iw)
12902{
12903 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
12904 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12907 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
12908}
12909
12910
12911/** Opcode 0xcb. */
12912FNIEMOP_DEF(iemOp_retf)
12913{
12914 IEMOP_MNEMONIC(retf, "retf");
12915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12916 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12917 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
12918}
12919
12920
12921/** Opcode 0xcc. */
12922FNIEMOP_DEF(iemOp_int_3)
12923{
12924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12926}
12927
12928
12929/** Opcode 0xcd. */
12930FNIEMOP_DEF(iemOp_int_Ib)
12931{
12932 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12934 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12935}
12936
12937
12938/** Opcode 0xce. */
12939FNIEMOP_DEF(iemOp_into)
12940{
12941 IEMOP_MNEMONIC(into, "into");
12942 IEMOP_HLP_NO_64BIT();
12943
12944 IEM_MC_BEGIN(2, 0);
12945 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12946 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12947 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12948 IEM_MC_END();
12949 return VINF_SUCCESS;
12950}
12951
12952
12953/** Opcode 0xcf. */
12954FNIEMOP_DEF(iemOp_iret)
12955{
12956 IEMOP_MNEMONIC(iret, "iret");
12957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12958 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
12959}
12960
12961
12962/** Opcode 0xd0. */
12963FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12964{
12965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12966 PCIEMOPSHIFTSIZES pImpl;
12967 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12968 {
12969 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
12970 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
12971 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
12972 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
12973 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
12974 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
12975 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
12976 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12977 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12978 }
12979 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12980
12981 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12982 {
12983 /* register */
12984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12985 IEM_MC_BEGIN(3, 0);
12986 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12987 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12988 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12989 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12990 IEM_MC_REF_EFLAGS(pEFlags);
12991 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12992 IEM_MC_ADVANCE_RIP();
12993 IEM_MC_END();
12994 }
12995 else
12996 {
12997 /* memory */
12998 IEM_MC_BEGIN(3, 2);
12999 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13000 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13001 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13003
13004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13006 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13007 IEM_MC_FETCH_EFLAGS(EFlags);
13008 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13009
13010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13011 IEM_MC_COMMIT_EFLAGS(EFlags);
13012 IEM_MC_ADVANCE_RIP();
13013 IEM_MC_END();
13014 }
13015 return VINF_SUCCESS;
13016}
13017
13018
13019
13020/** Opcode 0xd1. */
13021FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13022{
13023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13024 PCIEMOPSHIFTSIZES pImpl;
13025 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13026 {
13027 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13028 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13029 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13030 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13031 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13032 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13033 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13034 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13035 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13036 }
13037 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13038
13039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13040 {
13041 /* register */
13042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13043 switch (pVCpu->iem.s.enmEffOpSize)
13044 {
13045 case IEMMODE_16BIT:
13046 IEM_MC_BEGIN(3, 0);
13047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13048 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13049 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13050 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13051 IEM_MC_REF_EFLAGS(pEFlags);
13052 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13053 IEM_MC_ADVANCE_RIP();
13054 IEM_MC_END();
13055 return VINF_SUCCESS;
13056
13057 case IEMMODE_32BIT:
13058 IEM_MC_BEGIN(3, 0);
13059 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13060 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13061 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13062 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13063 IEM_MC_REF_EFLAGS(pEFlags);
13064 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13065 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13066 IEM_MC_ADVANCE_RIP();
13067 IEM_MC_END();
13068 return VINF_SUCCESS;
13069
13070 case IEMMODE_64BIT:
13071 IEM_MC_BEGIN(3, 0);
13072 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13073 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13074 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13075 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13076 IEM_MC_REF_EFLAGS(pEFlags);
13077 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13078 IEM_MC_ADVANCE_RIP();
13079 IEM_MC_END();
13080 return VINF_SUCCESS;
13081
13082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13083 }
13084 }
13085 else
13086 {
13087 /* memory */
13088 switch (pVCpu->iem.s.enmEffOpSize)
13089 {
13090 case IEMMODE_16BIT:
13091 IEM_MC_BEGIN(3, 2);
13092 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13093 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13094 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13096
13097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13099 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13100 IEM_MC_FETCH_EFLAGS(EFlags);
13101 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13102
13103 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13104 IEM_MC_COMMIT_EFLAGS(EFlags);
13105 IEM_MC_ADVANCE_RIP();
13106 IEM_MC_END();
13107 return VINF_SUCCESS;
13108
13109 case IEMMODE_32BIT:
13110 IEM_MC_BEGIN(3, 2);
13111 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13112 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13113 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13115
13116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13118 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13119 IEM_MC_FETCH_EFLAGS(EFlags);
13120 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13121
13122 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13123 IEM_MC_COMMIT_EFLAGS(EFlags);
13124 IEM_MC_ADVANCE_RIP();
13125 IEM_MC_END();
13126 return VINF_SUCCESS;
13127
13128 case IEMMODE_64BIT:
13129 IEM_MC_BEGIN(3, 2);
13130 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13131 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13132 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13134
13135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13137 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13138 IEM_MC_FETCH_EFLAGS(EFlags);
13139 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13140
13141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13142 IEM_MC_COMMIT_EFLAGS(EFlags);
13143 IEM_MC_ADVANCE_RIP();
13144 IEM_MC_END();
13145 return VINF_SUCCESS;
13146
13147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13148 }
13149 }
13150}
13151
13152
13153/** Opcode 0xd2. */
13154FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13155{
13156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13157 PCIEMOPSHIFTSIZES pImpl;
13158 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13159 {
13160 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13161 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13162 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13163 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13164 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13165 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13166 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13167 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13168 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13169 }
13170 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13171
13172 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13173 {
13174 /* register */
13175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13176 IEM_MC_BEGIN(3, 0);
13177 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13178 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13179 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13180 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13181 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13182 IEM_MC_REF_EFLAGS(pEFlags);
13183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13184 IEM_MC_ADVANCE_RIP();
13185 IEM_MC_END();
13186 }
13187 else
13188 {
13189 /* memory */
13190 IEM_MC_BEGIN(3, 2);
13191 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13192 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13193 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13195
13196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13198 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13199 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13200 IEM_MC_FETCH_EFLAGS(EFlags);
13201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13202
13203 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13204 IEM_MC_COMMIT_EFLAGS(EFlags);
13205 IEM_MC_ADVANCE_RIP();
13206 IEM_MC_END();
13207 }
13208 return VINF_SUCCESS;
13209}
13210
13211
13212/** Opcode 0xd3. */
13213FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13214{
13215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13216 PCIEMOPSHIFTSIZES pImpl;
13217 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13218 {
13219 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13220 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13221 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13222 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13223 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13224 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13225 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13226 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13227 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13228 }
13229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13230
13231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13232 {
13233 /* register */
13234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13235 switch (pVCpu->iem.s.enmEffOpSize)
13236 {
13237 case IEMMODE_16BIT:
13238 IEM_MC_BEGIN(3, 0);
13239 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13240 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13241 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13242 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13243 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13244 IEM_MC_REF_EFLAGS(pEFlags);
13245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13246 IEM_MC_ADVANCE_RIP();
13247 IEM_MC_END();
13248 return VINF_SUCCESS;
13249
13250 case IEMMODE_32BIT:
13251 IEM_MC_BEGIN(3, 0);
13252 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13253 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13255 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13256 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13257 IEM_MC_REF_EFLAGS(pEFlags);
13258 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13259 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13260 IEM_MC_ADVANCE_RIP();
13261 IEM_MC_END();
13262 return VINF_SUCCESS;
13263
13264 case IEMMODE_64BIT:
13265 IEM_MC_BEGIN(3, 0);
13266 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13267 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13268 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13269 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13270 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13271 IEM_MC_REF_EFLAGS(pEFlags);
13272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13273 IEM_MC_ADVANCE_RIP();
13274 IEM_MC_END();
13275 return VINF_SUCCESS;
13276
13277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13278 }
13279 }
13280 else
13281 {
13282 /* memory */
13283 switch (pVCpu->iem.s.enmEffOpSize)
13284 {
13285 case IEMMODE_16BIT:
13286 IEM_MC_BEGIN(3, 2);
13287 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13288 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13289 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13291
13292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13294 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13295 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13296 IEM_MC_FETCH_EFLAGS(EFlags);
13297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13298
13299 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13300 IEM_MC_COMMIT_EFLAGS(EFlags);
13301 IEM_MC_ADVANCE_RIP();
13302 IEM_MC_END();
13303 return VINF_SUCCESS;
13304
13305 case IEMMODE_32BIT:
13306 IEM_MC_BEGIN(3, 2);
13307 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13308 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13309 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13311
13312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13314 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13315 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13316 IEM_MC_FETCH_EFLAGS(EFlags);
13317 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13318
13319 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13320 IEM_MC_COMMIT_EFLAGS(EFlags);
13321 IEM_MC_ADVANCE_RIP();
13322 IEM_MC_END();
13323 return VINF_SUCCESS;
13324
13325 case IEMMODE_64BIT:
13326 IEM_MC_BEGIN(3, 2);
13327 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13328 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13329 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13331
13332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13334 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13335 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13336 IEM_MC_FETCH_EFLAGS(EFlags);
13337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13338
13339 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13340 IEM_MC_COMMIT_EFLAGS(EFlags);
13341 IEM_MC_ADVANCE_RIP();
13342 IEM_MC_END();
13343 return VINF_SUCCESS;
13344
13345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13346 }
13347 }
13348}
13349
13350/** Opcode 0xd4. */
13351FNIEMOP_DEF(iemOp_aam_Ib)
13352{
13353 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
13354 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13356 IEMOP_HLP_NO_64BIT();
13357 if (!bImm)
13358 return IEMOP_RAISE_DIVIDE_ERROR();
13359 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13360}
13361
13362
13363/** Opcode 0xd5. */
13364FNIEMOP_DEF(iemOp_aad_Ib)
13365{
13366 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
13367 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13369 IEMOP_HLP_NO_64BIT();
13370 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13371}
13372
13373
13374/** Opcode 0xd6. */
13375FNIEMOP_DEF(iemOp_salc)
13376{
13377 IEMOP_MNEMONIC(salc, "salc");
13378 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13379 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13381 IEMOP_HLP_NO_64BIT();
13382
13383 IEM_MC_BEGIN(0, 0);
13384 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13385 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13386 } IEM_MC_ELSE() {
13387 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13388 } IEM_MC_ENDIF();
13389 IEM_MC_ADVANCE_RIP();
13390 IEM_MC_END();
13391 return VINF_SUCCESS;
13392}
13393
13394
13395/** Opcode 0xd7. */
13396FNIEMOP_DEF(iemOp_xlat)
13397{
13398 IEMOP_MNEMONIC(xlat, "xlat");
13399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13400 switch (pVCpu->iem.s.enmEffAddrMode)
13401 {
13402 case IEMMODE_16BIT:
13403 IEM_MC_BEGIN(2, 0);
13404 IEM_MC_LOCAL(uint8_t, u8Tmp);
13405 IEM_MC_LOCAL(uint16_t, u16Addr);
13406 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13407 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13408 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
13409 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13410 IEM_MC_ADVANCE_RIP();
13411 IEM_MC_END();
13412 return VINF_SUCCESS;
13413
13414 case IEMMODE_32BIT:
13415 IEM_MC_BEGIN(2, 0);
13416 IEM_MC_LOCAL(uint8_t, u8Tmp);
13417 IEM_MC_LOCAL(uint32_t, u32Addr);
13418 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13419 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13420 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
13421 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13422 IEM_MC_ADVANCE_RIP();
13423 IEM_MC_END();
13424 return VINF_SUCCESS;
13425
13426 case IEMMODE_64BIT:
13427 IEM_MC_BEGIN(2, 0);
13428 IEM_MC_LOCAL(uint8_t, u8Tmp);
13429 IEM_MC_LOCAL(uint64_t, u64Addr);
13430 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13431 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13432 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
13433 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13434 IEM_MC_ADVANCE_RIP();
13435 IEM_MC_END();
13436 return VINF_SUCCESS;
13437
13438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13439 }
13440}
13441
13442
13443/**
13444 * Common worker for FPU instructions working on ST0 and STn, and storing the
13445 * result in ST0.
13446 *
13447 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13448 */
13449FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13450{
13451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13452
13453 IEM_MC_BEGIN(3, 1);
13454 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13455 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13456 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13457 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13458
13459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13461 IEM_MC_PREPARE_FPU_USAGE();
13462 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13463 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13464 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13465 IEM_MC_ELSE()
13466 IEM_MC_FPU_STACK_UNDERFLOW(0);
13467 IEM_MC_ENDIF();
13468 IEM_MC_ADVANCE_RIP();
13469
13470 IEM_MC_END();
13471 return VINF_SUCCESS;
13472}
13473
13474
13475/**
13476 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13477 * flags.
13478 *
13479 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13480 */
13481FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13482{
13483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13484
13485 IEM_MC_BEGIN(3, 1);
13486 IEM_MC_LOCAL(uint16_t, u16Fsw);
13487 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13488 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13489 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13490
13491 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13492 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13493 IEM_MC_PREPARE_FPU_USAGE();
13494 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13495 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13496 IEM_MC_UPDATE_FSW(u16Fsw);
13497 IEM_MC_ELSE()
13498 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13499 IEM_MC_ENDIF();
13500 IEM_MC_ADVANCE_RIP();
13501
13502 IEM_MC_END();
13503 return VINF_SUCCESS;
13504}
13505
13506
13507/**
13508 * Common worker for FPU instructions working on ST0 and STn, only affecting
13509 * flags, and popping when done.
13510 *
13511 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13512 */
13513FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13514{
13515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13516
13517 IEM_MC_BEGIN(3, 1);
13518 IEM_MC_LOCAL(uint16_t, u16Fsw);
13519 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13520 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13521 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13522
13523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13524 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13525 IEM_MC_PREPARE_FPU_USAGE();
13526 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13527 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13528 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13529 IEM_MC_ELSE()
13530 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13531 IEM_MC_ENDIF();
13532 IEM_MC_ADVANCE_RIP();
13533
13534 IEM_MC_END();
13535 return VINF_SUCCESS;
13536}
13537
13538
13539/** Opcode 0xd8 11/0. */
13540FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13541{
13542 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
13543 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13544}
13545
13546
13547/** Opcode 0xd8 11/1. */
13548FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13549{
13550 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
13551 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13552}
13553
13554
13555/** Opcode 0xd8 11/2. */
13556FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13557{
13558 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
13559 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13560}
13561
13562
13563/** Opcode 0xd8 11/3. */
13564FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13565{
13566 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
13567 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13568}
13569
13570
13571/** Opcode 0xd8 11/4. */
13572FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13573{
13574 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
13575 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13576}
13577
13578
13579/** Opcode 0xd8 11/5. */
13580FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13581{
13582 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
13583 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13584}
13585
13586
13587/** Opcode 0xd8 11/6. */
13588FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13589{
13590 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
13591 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13592}
13593
13594
13595/** Opcode 0xd8 11/7. */
13596FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13597{
13598 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
13599 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13600}
13601
13602
13603/**
13604 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13605 * the result in ST0.
13606 *
13607 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13608 */
13609FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13610{
13611 IEM_MC_BEGIN(3, 3);
13612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13613 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13614 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13615 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13616 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13617 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13618
13619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13621
13622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13624 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13625
13626 IEM_MC_PREPARE_FPU_USAGE();
13627 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13628 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13629 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13630 IEM_MC_ELSE()
13631 IEM_MC_FPU_STACK_UNDERFLOW(0);
13632 IEM_MC_ENDIF();
13633 IEM_MC_ADVANCE_RIP();
13634
13635 IEM_MC_END();
13636 return VINF_SUCCESS;
13637}
13638
13639
13640/** Opcode 0xd8 !11/0. */
13641FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13642{
13643 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
13644 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13645}
13646
13647
13648/** Opcode 0xd8 !11/1. */
13649FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13650{
13651 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
13652 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13653}
13654
13655
13656/** Opcode 0xd8 !11/2. */
13657FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13658{
13659 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
13660
13661 IEM_MC_BEGIN(3, 3);
13662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13663 IEM_MC_LOCAL(uint16_t, u16Fsw);
13664 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13665 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13666 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13667 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13668
13669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13671
13672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13674 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13675
13676 IEM_MC_PREPARE_FPU_USAGE();
13677 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13678 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13679 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13680 IEM_MC_ELSE()
13681 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13682 IEM_MC_ENDIF();
13683 IEM_MC_ADVANCE_RIP();
13684
13685 IEM_MC_END();
13686 return VINF_SUCCESS;
13687}
13688
13689
13690/** Opcode 0xd8 !11/3. */
13691FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13692{
13693 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
13694
13695 IEM_MC_BEGIN(3, 3);
13696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13697 IEM_MC_LOCAL(uint16_t, u16Fsw);
13698 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13699 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13700 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13701 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13702
13703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13705
13706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13708 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13709
13710 IEM_MC_PREPARE_FPU_USAGE();
13711 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13712 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13713 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13714 IEM_MC_ELSE()
13715 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13716 IEM_MC_ENDIF();
13717 IEM_MC_ADVANCE_RIP();
13718
13719 IEM_MC_END();
13720 return VINF_SUCCESS;
13721}
13722
13723
13724/** Opcode 0xd8 !11/4. */
13725FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13726{
13727 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
13728 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13729}
13730
13731
13732/** Opcode 0xd8 !11/5. */
13733FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13734{
13735 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
13736 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13737}
13738
13739
13740/** Opcode 0xd8 !11/6. */
13741FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13742{
13743 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
13744 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13745}
13746
13747
13748/** Opcode 0xd8 !11/7. */
13749FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13750{
13751 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
13752 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13753}
13754
13755
13756/** Opcode 0xd8. */
13757FNIEMOP_DEF(iemOp_EscF0)
13758{
13759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13760 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
13761
13762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13763 {
13764 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13765 {
13766 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13767 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13768 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13769 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13770 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13771 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13772 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13773 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13775 }
13776 }
13777 else
13778 {
13779 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13780 {
13781 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13782 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13783 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13784 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13785 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13786 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13787 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13788 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13790 }
13791 }
13792}
13793
13794
13795/** Opcode 0xd9 /0 mem32real
13796 * @sa iemOp_fld_m64r */
13797FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13798{
13799 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
13800
13801 IEM_MC_BEGIN(2, 3);
13802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13803 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13804 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13805 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13806 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13807
13808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13810
13811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13813 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13814
13815 IEM_MC_PREPARE_FPU_USAGE();
13816 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13817 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13818 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13819 IEM_MC_ELSE()
13820 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13821 IEM_MC_ENDIF();
13822 IEM_MC_ADVANCE_RIP();
13823
13824 IEM_MC_END();
13825 return VINF_SUCCESS;
13826}
13827
13828
13829/** Opcode 0xd9 !11/2 mem32real */
13830FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13831{
13832 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
13833 IEM_MC_BEGIN(3, 2);
13834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13835 IEM_MC_LOCAL(uint16_t, u16Fsw);
13836 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13837 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13838 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13839
13840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13842 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13843 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13844
13845 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13846 IEM_MC_PREPARE_FPU_USAGE();
13847 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13848 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13849 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13850 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13851 IEM_MC_ELSE()
13852 IEM_MC_IF_FCW_IM()
13853 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13854 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13855 IEM_MC_ENDIF();
13856 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13857 IEM_MC_ENDIF();
13858 IEM_MC_ADVANCE_RIP();
13859
13860 IEM_MC_END();
13861 return VINF_SUCCESS;
13862}
13863
13864
13865/** Opcode 0xd9 !11/3 */
13866FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13867{
13868 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
13869 IEM_MC_BEGIN(3, 2);
13870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13871 IEM_MC_LOCAL(uint16_t, u16Fsw);
13872 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13873 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13874 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13875
13876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13880
13881 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13882 IEM_MC_PREPARE_FPU_USAGE();
13883 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13884 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13885 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13886 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13887 IEM_MC_ELSE()
13888 IEM_MC_IF_FCW_IM()
13889 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13890 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13891 IEM_MC_ENDIF();
13892 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13893 IEM_MC_ENDIF();
13894 IEM_MC_ADVANCE_RIP();
13895
13896 IEM_MC_END();
13897 return VINF_SUCCESS;
13898}
13899
13900
13901/** Opcode 0xd9 !11/4 */
13902FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13903{
13904 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
13905 IEM_MC_BEGIN(3, 0);
13906 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13907 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13908 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13912 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13913 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13914 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13915 IEM_MC_END();
13916 return VINF_SUCCESS;
13917}
13918
13919
13920/** Opcode 0xd9 !11/5 */
13921FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13922{
13923 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
13924 IEM_MC_BEGIN(1, 1);
13925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13926 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13929 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13930 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13931 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13932 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13933 IEM_MC_END();
13934 return VINF_SUCCESS;
13935}
13936
13937
13938/** Opcode 0xd9 !11/6 */
13939FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13940{
13941 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
13942 IEM_MC_BEGIN(3, 0);
13943 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13944 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13945 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13948 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13949 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13950 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13951 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13952 IEM_MC_END();
13953 return VINF_SUCCESS;
13954}
13955
13956
13957/** Opcode 0xd9 !11/7 */
13958FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13959{
13960 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
13961 IEM_MC_BEGIN(2, 0);
13962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13963 IEM_MC_LOCAL(uint16_t, u16Fcw);
13964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13967 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13968 IEM_MC_FETCH_FCW(u16Fcw);
13969 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
13970 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13971 IEM_MC_END();
13972 return VINF_SUCCESS;
13973}
13974
13975
13976/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13977FNIEMOP_DEF(iemOp_fnop)
13978{
13979 IEMOP_MNEMONIC(fnop, "fnop");
13980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13981
13982 IEM_MC_BEGIN(0, 0);
13983 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13984 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13985 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13986 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13987 * intel optimizations. Investigate. */
13988 IEM_MC_UPDATE_FPU_OPCODE_IP();
13989 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13990 IEM_MC_END();
13991 return VINF_SUCCESS;
13992}
13993
13994
13995/** Opcode 0xd9 11/0 stN */
13996FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13997{
13998 IEMOP_MNEMONIC(fld_stN, "fld stN");
13999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14000
14001 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14002 * indicates that it does. */
14003 IEM_MC_BEGIN(0, 2);
14004 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14005 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14006 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14007 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14008
14009 IEM_MC_PREPARE_FPU_USAGE();
14010 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14011 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14012 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14013 IEM_MC_ELSE()
14014 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14015 IEM_MC_ENDIF();
14016
14017 IEM_MC_ADVANCE_RIP();
14018 IEM_MC_END();
14019
14020 return VINF_SUCCESS;
14021}
14022
14023
14024/** Opcode 0xd9 11/3 stN */
14025FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14026{
14027 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14029
14030 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14031 * indicates that it does. */
14032 IEM_MC_BEGIN(1, 3);
14033 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14034 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14035 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14036 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14037 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14038 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14039
14040 IEM_MC_PREPARE_FPU_USAGE();
14041 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14042 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14043 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14044 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14045 IEM_MC_ELSE()
14046 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14047 IEM_MC_ENDIF();
14048
14049 IEM_MC_ADVANCE_RIP();
14050 IEM_MC_END();
14051
14052 return VINF_SUCCESS;
14053}
14054
14055
14056/** Opcode 0xd9 11/4, 0xdd 11/2. */
14057FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14058{
14059 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14061
14062 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
14063 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14064 if (!iDstReg)
14065 {
14066 IEM_MC_BEGIN(0, 1);
14067 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14070
14071 IEM_MC_PREPARE_FPU_USAGE();
14072 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14073 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14074 IEM_MC_ELSE()
14075 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14076 IEM_MC_ENDIF();
14077
14078 IEM_MC_ADVANCE_RIP();
14079 IEM_MC_END();
14080 }
14081 else
14082 {
14083 IEM_MC_BEGIN(0, 2);
14084 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14085 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14088
14089 IEM_MC_PREPARE_FPU_USAGE();
14090 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14091 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14092 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14093 IEM_MC_ELSE()
14094 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14095 IEM_MC_ENDIF();
14096
14097 IEM_MC_ADVANCE_RIP();
14098 IEM_MC_END();
14099 }
14100 return VINF_SUCCESS;
14101}
14102
14103
14104/**
14105 * Common worker for FPU instructions working on ST0 and replaces it with the
14106 * result, i.e. unary operators.
14107 *
14108 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14109 */
14110FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14111{
14112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14113
14114 IEM_MC_BEGIN(2, 1);
14115 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14116 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14117 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14118
14119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14120 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14121 IEM_MC_PREPARE_FPU_USAGE();
14122 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14123 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14124 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14125 IEM_MC_ELSE()
14126 IEM_MC_FPU_STACK_UNDERFLOW(0);
14127 IEM_MC_ENDIF();
14128 IEM_MC_ADVANCE_RIP();
14129
14130 IEM_MC_END();
14131 return VINF_SUCCESS;
14132}
14133
14134
14135/** Opcode 0xd9 0xe0. */
14136FNIEMOP_DEF(iemOp_fchs)
14137{
14138 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14139 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14140}
14141
14142
14143/** Opcode 0xd9 0xe1. */
14144FNIEMOP_DEF(iemOp_fabs)
14145{
14146 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14147 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14148}
14149
14150
14151/**
14152 * Common worker for FPU instructions working on ST0 and only returns FSW.
14153 *
14154 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14155 */
14156FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14157{
14158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14159
14160 IEM_MC_BEGIN(2, 1);
14161 IEM_MC_LOCAL(uint16_t, u16Fsw);
14162 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14163 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14164
14165 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14166 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14167 IEM_MC_PREPARE_FPU_USAGE();
14168 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14169 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14170 IEM_MC_UPDATE_FSW(u16Fsw);
14171 IEM_MC_ELSE()
14172 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14173 IEM_MC_ENDIF();
14174 IEM_MC_ADVANCE_RIP();
14175
14176 IEM_MC_END();
14177 return VINF_SUCCESS;
14178}
14179
14180
14181/** Opcode 0xd9 0xe4. */
14182FNIEMOP_DEF(iemOp_ftst)
14183{
14184 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14185 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14186}
14187
14188
14189/** Opcode 0xd9 0xe5. */
14190FNIEMOP_DEF(iemOp_fxam)
14191{
14192 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14193 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14194}
14195
14196
14197/**
14198 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14199 *
14200 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14201 */
14202FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14203{
14204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14205
14206 IEM_MC_BEGIN(1, 1);
14207 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14208 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14209
14210 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14211 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14212 IEM_MC_PREPARE_FPU_USAGE();
14213 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14214 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14215 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14216 IEM_MC_ELSE()
14217 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14218 IEM_MC_ENDIF();
14219 IEM_MC_ADVANCE_RIP();
14220
14221 IEM_MC_END();
14222 return VINF_SUCCESS;
14223}
14224
14225
14226/** Opcode 0xd9 0xe8. */
14227FNIEMOP_DEF(iemOp_fld1)
14228{
14229 IEMOP_MNEMONIC(fld1, "fld1");
14230 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14231}
14232
14233
14234/** Opcode 0xd9 0xe9. */
14235FNIEMOP_DEF(iemOp_fldl2t)
14236{
14237 IEMOP_MNEMONIC(fldl2t, "fldl2t");
14238 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14239}
14240
14241
14242/** Opcode 0xd9 0xea. */
14243FNIEMOP_DEF(iemOp_fldl2e)
14244{
14245 IEMOP_MNEMONIC(fldl2e, "fldl2e");
14246 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14247}
14248
14249/** Opcode 0xd9 0xeb. */
14250FNIEMOP_DEF(iemOp_fldpi)
14251{
14252 IEMOP_MNEMONIC(fldpi, "fldpi");
14253 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14254}
14255
14256
14257/** Opcode 0xd9 0xec. */
14258FNIEMOP_DEF(iemOp_fldlg2)
14259{
14260 IEMOP_MNEMONIC(fldlg2, "fldlg2");
14261 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14262}
14263
14264/** Opcode 0xd9 0xed. */
14265FNIEMOP_DEF(iemOp_fldln2)
14266{
14267 IEMOP_MNEMONIC(fldln2, "fldln2");
14268 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14269}
14270
14271
14272/** Opcode 0xd9 0xee. */
14273FNIEMOP_DEF(iemOp_fldz)
14274{
14275 IEMOP_MNEMONIC(fldz, "fldz");
14276 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14277}
14278
14279
14280/** Opcode 0xd9 0xf0. */
14281FNIEMOP_DEF(iemOp_f2xm1)
14282{
14283 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
14284 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14285}
14286
14287
14288/** Opcode 0xd9 0xf1. */
14289FNIEMOP_DEF(iemOp_fylx2)
14290{
14291 IEMOP_MNEMONIC(fylx2_st0, "fylx2 st0");
14292 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
14293}
14294
14295
14296/**
14297 * Common worker for FPU instructions working on ST0 and having two outputs, one
14298 * replacing ST0 and one pushed onto the stack.
14299 *
14300 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14301 */
14302FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14303{
14304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14305
14306 IEM_MC_BEGIN(2, 1);
14307 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14308 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14309 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14310
14311 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14312 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14313 IEM_MC_PREPARE_FPU_USAGE();
14314 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14315 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14316 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14317 IEM_MC_ELSE()
14318 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14319 IEM_MC_ENDIF();
14320 IEM_MC_ADVANCE_RIP();
14321
14322 IEM_MC_END();
14323 return VINF_SUCCESS;
14324}
14325
14326
14327/** Opcode 0xd9 0xf2. */
14328FNIEMOP_DEF(iemOp_fptan)
14329{
14330 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
14331 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14332}
14333
14334
14335/**
14336 * Common worker for FPU instructions working on STn and ST0, storing the result
14337 * in STn, and popping the stack unless IE, DE or ZE was raised.
14338 *
14339 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14340 */
14341FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14342{
14343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14344
14345 IEM_MC_BEGIN(3, 1);
14346 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14347 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14348 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14349 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14350
14351 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14352 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14353
14354 IEM_MC_PREPARE_FPU_USAGE();
14355 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14356 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14357 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14358 IEM_MC_ELSE()
14359 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14360 IEM_MC_ENDIF();
14361 IEM_MC_ADVANCE_RIP();
14362
14363 IEM_MC_END();
14364 return VINF_SUCCESS;
14365}
14366
14367
14368/** Opcode 0xd9 0xf3. */
14369FNIEMOP_DEF(iemOp_fpatan)
14370{
14371 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
14372 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14373}
14374
14375
14376/** Opcode 0xd9 0xf4. */
14377FNIEMOP_DEF(iemOp_fxtract)
14378{
14379 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
14380 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14381}
14382
14383
14384/** Opcode 0xd9 0xf5. */
14385FNIEMOP_DEF(iemOp_fprem1)
14386{
14387 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
14388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14389}
14390
14391
14392/** Opcode 0xd9 0xf6. */
14393FNIEMOP_DEF(iemOp_fdecstp)
14394{
14395 IEMOP_MNEMONIC(fdecstp, "fdecstp");
14396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14397 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14398 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14399 * FINCSTP and FDECSTP. */
14400
14401 IEM_MC_BEGIN(0,0);
14402
14403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14404 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14405
14406 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14407 IEM_MC_FPU_STACK_DEC_TOP();
14408 IEM_MC_UPDATE_FSW_CONST(0);
14409
14410 IEM_MC_ADVANCE_RIP();
14411 IEM_MC_END();
14412 return VINF_SUCCESS;
14413}
14414
14415
14416/** Opcode 0xd9 0xf7. */
14417FNIEMOP_DEF(iemOp_fincstp)
14418{
14419 IEMOP_MNEMONIC(fincstp, "fincstp");
14420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14421 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14422 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14423 * FINCSTP and FDECSTP. */
14424
14425 IEM_MC_BEGIN(0,0);
14426
14427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14429
14430 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14431 IEM_MC_FPU_STACK_INC_TOP();
14432 IEM_MC_UPDATE_FSW_CONST(0);
14433
14434 IEM_MC_ADVANCE_RIP();
14435 IEM_MC_END();
14436 return VINF_SUCCESS;
14437}
14438
14439
14440/** Opcode 0xd9 0xf8. */
14441FNIEMOP_DEF(iemOp_fprem)
14442{
14443 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
14444 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14445}
14446
14447
14448/** Opcode 0xd9 0xf9. */
14449FNIEMOP_DEF(iemOp_fyl2xp1)
14450{
14451 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
14452 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14453}
14454
14455
14456/** Opcode 0xd9 0xfa. */
14457FNIEMOP_DEF(iemOp_fsqrt)
14458{
14459 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
14460 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14461}
14462
14463
14464/** Opcode 0xd9 0xfb. */
14465FNIEMOP_DEF(iemOp_fsincos)
14466{
14467 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
14468 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14469}
14470
14471
14472/** Opcode 0xd9 0xfc. */
14473FNIEMOP_DEF(iemOp_frndint)
14474{
14475 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
14476 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14477}
14478
14479
14480/** Opcode 0xd9 0xfd. */
14481FNIEMOP_DEF(iemOp_fscale)
14482{
14483 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
14484 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14485}
14486
14487
14488/** Opcode 0xd9 0xfe. */
14489FNIEMOP_DEF(iemOp_fsin)
14490{
14491 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
14492 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14493}
14494
14495
14496/** Opcode 0xd9 0xff. */
14497FNIEMOP_DEF(iemOp_fcos)
14498{
14499 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
14500 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14501}
14502
14503
14504/** Used by iemOp_EscF1. */
14505IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14506{
14507 /* 0xe0 */ iemOp_fchs,
14508 /* 0xe1 */ iemOp_fabs,
14509 /* 0xe2 */ iemOp_Invalid,
14510 /* 0xe3 */ iemOp_Invalid,
14511 /* 0xe4 */ iemOp_ftst,
14512 /* 0xe5 */ iemOp_fxam,
14513 /* 0xe6 */ iemOp_Invalid,
14514 /* 0xe7 */ iemOp_Invalid,
14515 /* 0xe8 */ iemOp_fld1,
14516 /* 0xe9 */ iemOp_fldl2t,
14517 /* 0xea */ iemOp_fldl2e,
14518 /* 0xeb */ iemOp_fldpi,
14519 /* 0xec */ iemOp_fldlg2,
14520 /* 0xed */ iemOp_fldln2,
14521 /* 0xee */ iemOp_fldz,
14522 /* 0xef */ iemOp_Invalid,
14523 /* 0xf0 */ iemOp_f2xm1,
14524 /* 0xf1 */ iemOp_fylx2,
14525 /* 0xf2 */ iemOp_fptan,
14526 /* 0xf3 */ iemOp_fpatan,
14527 /* 0xf4 */ iemOp_fxtract,
14528 /* 0xf5 */ iemOp_fprem1,
14529 /* 0xf6 */ iemOp_fdecstp,
14530 /* 0xf7 */ iemOp_fincstp,
14531 /* 0xf8 */ iemOp_fprem,
14532 /* 0xf9 */ iemOp_fyl2xp1,
14533 /* 0xfa */ iemOp_fsqrt,
14534 /* 0xfb */ iemOp_fsincos,
14535 /* 0xfc */ iemOp_frndint,
14536 /* 0xfd */ iemOp_fscale,
14537 /* 0xfe */ iemOp_fsin,
14538 /* 0xff */ iemOp_fcos
14539};
14540
14541
14542/** Opcode 0xd9. */
14543FNIEMOP_DEF(iemOp_EscF1)
14544{
14545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14546 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
14547
14548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14549 {
14550 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14551 {
14552 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14553 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14554 case 2:
14555 if (bRm == 0xd0)
14556 return FNIEMOP_CALL(iemOp_fnop);
14557 return IEMOP_RAISE_INVALID_OPCODE();
14558 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14559 case 4:
14560 case 5:
14561 case 6:
14562 case 7:
14563 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14564 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14566 }
14567 }
14568 else
14569 {
14570 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14571 {
14572 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14573 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14574 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14575 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14576 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14577 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14578 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14579 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14581 }
14582 }
14583}
14584
14585
14586/** Opcode 0xda 11/0. */
14587FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14588{
14589 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
14590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14591
14592 IEM_MC_BEGIN(0, 1);
14593 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14594
14595 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14597
14598 IEM_MC_PREPARE_FPU_USAGE();
14599 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14601 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14602 IEM_MC_ENDIF();
14603 IEM_MC_UPDATE_FPU_OPCODE_IP();
14604 IEM_MC_ELSE()
14605 IEM_MC_FPU_STACK_UNDERFLOW(0);
14606 IEM_MC_ENDIF();
14607 IEM_MC_ADVANCE_RIP();
14608
14609 IEM_MC_END();
14610 return VINF_SUCCESS;
14611}
14612
14613
14614/** Opcode 0xda 11/1. */
14615FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14616{
14617 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
14618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14619
14620 IEM_MC_BEGIN(0, 1);
14621 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14622
14623 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14624 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14625
14626 IEM_MC_PREPARE_FPU_USAGE();
14627 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14629 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14630 IEM_MC_ENDIF();
14631 IEM_MC_UPDATE_FPU_OPCODE_IP();
14632 IEM_MC_ELSE()
14633 IEM_MC_FPU_STACK_UNDERFLOW(0);
14634 IEM_MC_ENDIF();
14635 IEM_MC_ADVANCE_RIP();
14636
14637 IEM_MC_END();
14638 return VINF_SUCCESS;
14639}
14640
14641
14642/** Opcode 0xda 11/2. */
14643FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14644{
14645 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
14646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14647
14648 IEM_MC_BEGIN(0, 1);
14649 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14650
14651 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14652 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14653
14654 IEM_MC_PREPARE_FPU_USAGE();
14655 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14656 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14657 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14658 IEM_MC_ENDIF();
14659 IEM_MC_UPDATE_FPU_OPCODE_IP();
14660 IEM_MC_ELSE()
14661 IEM_MC_FPU_STACK_UNDERFLOW(0);
14662 IEM_MC_ENDIF();
14663 IEM_MC_ADVANCE_RIP();
14664
14665 IEM_MC_END();
14666 return VINF_SUCCESS;
14667}
14668
14669
14670/** Opcode 0xda 11/3. */
14671FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14672{
14673 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
14674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14675
14676 IEM_MC_BEGIN(0, 1);
14677 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14678
14679 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14680 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14681
14682 IEM_MC_PREPARE_FPU_USAGE();
14683 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14685 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14686 IEM_MC_ENDIF();
14687 IEM_MC_UPDATE_FPU_OPCODE_IP();
14688 IEM_MC_ELSE()
14689 IEM_MC_FPU_STACK_UNDERFLOW(0);
14690 IEM_MC_ENDIF();
14691 IEM_MC_ADVANCE_RIP();
14692
14693 IEM_MC_END();
14694 return VINF_SUCCESS;
14695}
14696
14697
14698/**
14699 * Common worker for FPU instructions working on ST0 and STn, only affecting
14700 * flags, and popping twice when done.
14701 *
14702 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14703 */
14704FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14705{
14706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14707
14708 IEM_MC_BEGIN(3, 1);
14709 IEM_MC_LOCAL(uint16_t, u16Fsw);
14710 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14711 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14712 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14713
14714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14715 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14716
14717 IEM_MC_PREPARE_FPU_USAGE();
14718 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14719 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14720 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14721 IEM_MC_ELSE()
14722 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14723 IEM_MC_ENDIF();
14724 IEM_MC_ADVANCE_RIP();
14725
14726 IEM_MC_END();
14727 return VINF_SUCCESS;
14728}
14729
14730
14731/** Opcode 0xda 0xe9. */
14732FNIEMOP_DEF(iemOp_fucompp)
14733{
14734 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
14735 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14736}
14737
14738
14739/**
14740 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14741 * the result in ST0.
14742 *
14743 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14744 */
14745FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14746{
14747 IEM_MC_BEGIN(3, 3);
14748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14749 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14750 IEM_MC_LOCAL(int32_t, i32Val2);
14751 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14752 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14753 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14754
14755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14757
14758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14760 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14761
14762 IEM_MC_PREPARE_FPU_USAGE();
14763 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14764 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14765 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14766 IEM_MC_ELSE()
14767 IEM_MC_FPU_STACK_UNDERFLOW(0);
14768 IEM_MC_ENDIF();
14769 IEM_MC_ADVANCE_RIP();
14770
14771 IEM_MC_END();
14772 return VINF_SUCCESS;
14773}
14774
14775
14776/** Opcode 0xda !11/0. */
14777FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14778{
14779 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
14780 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14781}
14782
14783
14784/** Opcode 0xda !11/1. */
14785FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14786{
14787 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
14788 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14789}
14790
14791
14792/** Opcode 0xda !11/2. */
14793FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14794{
14795 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
14796
14797 IEM_MC_BEGIN(3, 3);
14798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14799 IEM_MC_LOCAL(uint16_t, u16Fsw);
14800 IEM_MC_LOCAL(int32_t, i32Val2);
14801 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14802 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14803 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14804
14805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14807
14808 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14809 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14810 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14811
14812 IEM_MC_PREPARE_FPU_USAGE();
14813 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14814 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14815 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14816 IEM_MC_ELSE()
14817 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14818 IEM_MC_ENDIF();
14819 IEM_MC_ADVANCE_RIP();
14820
14821 IEM_MC_END();
14822 return VINF_SUCCESS;
14823}
14824
14825
14826/** Opcode 0xda !11/3. */
14827FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14828{
14829 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
14830
14831 IEM_MC_BEGIN(3, 3);
14832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14833 IEM_MC_LOCAL(uint16_t, u16Fsw);
14834 IEM_MC_LOCAL(int32_t, i32Val2);
14835 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14836 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14837 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14838
14839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14841
14842 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14843 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14844 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14845
14846 IEM_MC_PREPARE_FPU_USAGE();
14847 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14848 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14849 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14850 IEM_MC_ELSE()
14851 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14852 IEM_MC_ENDIF();
14853 IEM_MC_ADVANCE_RIP();
14854
14855 IEM_MC_END();
14856 return VINF_SUCCESS;
14857}
14858
14859
14860/** Opcode 0xda !11/4. */
14861FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14862{
14863 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
14864 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14865}
14866
14867
14868/** Opcode 0xda !11/5. */
14869FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14870{
14871 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
14872 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14873}
14874
14875
14876/** Opcode 0xda !11/6. */
14877FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14878{
14879 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
14880 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14881}
14882
14883
14884/** Opcode 0xda !11/7. */
14885FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14886{
14887 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
14888 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14889}
14890
14891
14892/** Opcode 0xda. */
14893FNIEMOP_DEF(iemOp_EscF2)
14894{
14895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14896 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
14897 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14898 {
14899 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14900 {
14901 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14902 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14903 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14904 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14905 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14906 case 5:
14907 if (bRm == 0xe9)
14908 return FNIEMOP_CALL(iemOp_fucompp);
14909 return IEMOP_RAISE_INVALID_OPCODE();
14910 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14911 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14913 }
14914 }
14915 else
14916 {
14917 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14918 {
14919 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14920 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14921 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14922 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14923 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14924 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14925 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14926 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14928 }
14929 }
14930}
14931
14932
14933/** Opcode 0xdb !11/0. */
14934FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14935{
14936 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
14937
14938 IEM_MC_BEGIN(2, 3);
14939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14940 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14941 IEM_MC_LOCAL(int32_t, i32Val);
14942 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14943 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14944
14945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14947
14948 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14949 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14950 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14951
14952 IEM_MC_PREPARE_FPU_USAGE();
14953 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14954 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14955 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14956 IEM_MC_ELSE()
14957 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14958 IEM_MC_ENDIF();
14959 IEM_MC_ADVANCE_RIP();
14960
14961 IEM_MC_END();
14962 return VINF_SUCCESS;
14963}
14964
14965
14966/** Opcode 0xdb !11/1. */
14967FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14968{
14969 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
14970 IEM_MC_BEGIN(3, 2);
14971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14972 IEM_MC_LOCAL(uint16_t, u16Fsw);
14973 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14974 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14975 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14976
14977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14980 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14981
14982 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14983 IEM_MC_PREPARE_FPU_USAGE();
14984 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14985 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14986 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14987 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14988 IEM_MC_ELSE()
14989 IEM_MC_IF_FCW_IM()
14990 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14991 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14992 IEM_MC_ENDIF();
14993 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14994 IEM_MC_ENDIF();
14995 IEM_MC_ADVANCE_RIP();
14996
14997 IEM_MC_END();
14998 return VINF_SUCCESS;
14999}
15000
15001
15002/** Opcode 0xdb !11/2. */
15003FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15004{
15005 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15006 IEM_MC_BEGIN(3, 2);
15007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15008 IEM_MC_LOCAL(uint16_t, u16Fsw);
15009 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15010 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15011 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15012
15013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15017
15018 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15019 IEM_MC_PREPARE_FPU_USAGE();
15020 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15021 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15022 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15023 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15024 IEM_MC_ELSE()
15025 IEM_MC_IF_FCW_IM()
15026 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15027 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15028 IEM_MC_ENDIF();
15029 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15030 IEM_MC_ENDIF();
15031 IEM_MC_ADVANCE_RIP();
15032
15033 IEM_MC_END();
15034 return VINF_SUCCESS;
15035}
15036
15037
15038/** Opcode 0xdb !11/3. */
15039FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15040{
15041 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15042 IEM_MC_BEGIN(3, 2);
15043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15044 IEM_MC_LOCAL(uint16_t, u16Fsw);
15045 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15046 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15047 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15048
15049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15051 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15052 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15053
15054 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15055 IEM_MC_PREPARE_FPU_USAGE();
15056 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15057 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15058 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15059 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15060 IEM_MC_ELSE()
15061 IEM_MC_IF_FCW_IM()
15062 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15063 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15064 IEM_MC_ENDIF();
15065 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15066 IEM_MC_ENDIF();
15067 IEM_MC_ADVANCE_RIP();
15068
15069 IEM_MC_END();
15070 return VINF_SUCCESS;
15071}
15072
15073
15074/** Opcode 0xdb !11/5. */
15075FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15076{
15077 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15078
15079 IEM_MC_BEGIN(2, 3);
15080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15081 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15082 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15083 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15084 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15085
15086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15088
15089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15090 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15091 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15092
15093 IEM_MC_PREPARE_FPU_USAGE();
15094 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15095 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15096 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15097 IEM_MC_ELSE()
15098 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15099 IEM_MC_ENDIF();
15100 IEM_MC_ADVANCE_RIP();
15101
15102 IEM_MC_END();
15103 return VINF_SUCCESS;
15104}
15105
15106
15107/** Opcode 0xdb !11/7. */
15108FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15109{
15110 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15111 IEM_MC_BEGIN(3, 2);
15112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15113 IEM_MC_LOCAL(uint16_t, u16Fsw);
15114 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15115 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15116 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15117
15118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15122
15123 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15124 IEM_MC_PREPARE_FPU_USAGE();
15125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15126 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15127 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15128 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15129 IEM_MC_ELSE()
15130 IEM_MC_IF_FCW_IM()
15131 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15132 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15133 IEM_MC_ENDIF();
15134 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15135 IEM_MC_ENDIF();
15136 IEM_MC_ADVANCE_RIP();
15137
15138 IEM_MC_END();
15139 return VINF_SUCCESS;
15140}
15141
15142
15143/** Opcode 0xdb 11/0. */
15144FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15145{
15146 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15148
15149 IEM_MC_BEGIN(0, 1);
15150 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15151
15152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15154
15155 IEM_MC_PREPARE_FPU_USAGE();
15156 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15157 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15158 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15159 IEM_MC_ENDIF();
15160 IEM_MC_UPDATE_FPU_OPCODE_IP();
15161 IEM_MC_ELSE()
15162 IEM_MC_FPU_STACK_UNDERFLOW(0);
15163 IEM_MC_ENDIF();
15164 IEM_MC_ADVANCE_RIP();
15165
15166 IEM_MC_END();
15167 return VINF_SUCCESS;
15168}
15169
15170
15171/** Opcode 0xdb 11/1. */
15172FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15173{
15174 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15176
15177 IEM_MC_BEGIN(0, 1);
15178 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15179
15180 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15181 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15182
15183 IEM_MC_PREPARE_FPU_USAGE();
15184 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15185 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15186 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15187 IEM_MC_ENDIF();
15188 IEM_MC_UPDATE_FPU_OPCODE_IP();
15189 IEM_MC_ELSE()
15190 IEM_MC_FPU_STACK_UNDERFLOW(0);
15191 IEM_MC_ENDIF();
15192 IEM_MC_ADVANCE_RIP();
15193
15194 IEM_MC_END();
15195 return VINF_SUCCESS;
15196}
15197
15198
15199/** Opcode 0xdb 11/2. */
15200FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15201{
15202 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15204
15205 IEM_MC_BEGIN(0, 1);
15206 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15207
15208 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15209 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15210
15211 IEM_MC_PREPARE_FPU_USAGE();
15212 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15213 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15214 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15215 IEM_MC_ENDIF();
15216 IEM_MC_UPDATE_FPU_OPCODE_IP();
15217 IEM_MC_ELSE()
15218 IEM_MC_FPU_STACK_UNDERFLOW(0);
15219 IEM_MC_ENDIF();
15220 IEM_MC_ADVANCE_RIP();
15221
15222 IEM_MC_END();
15223 return VINF_SUCCESS;
15224}
15225
15226
15227/** Opcode 0xdb 11/3. */
15228FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15229{
15230 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15232
15233 IEM_MC_BEGIN(0, 1);
15234 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15235
15236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15237 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15238
15239 IEM_MC_PREPARE_FPU_USAGE();
15240 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15241 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15242 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15243 IEM_MC_ENDIF();
15244 IEM_MC_UPDATE_FPU_OPCODE_IP();
15245 IEM_MC_ELSE()
15246 IEM_MC_FPU_STACK_UNDERFLOW(0);
15247 IEM_MC_ENDIF();
15248 IEM_MC_ADVANCE_RIP();
15249
15250 IEM_MC_END();
15251 return VINF_SUCCESS;
15252}
15253
15254
15255/** Opcode 0xdb 0xe0. */
15256FNIEMOP_DEF(iemOp_fneni)
15257{
15258 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
15259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15260 IEM_MC_BEGIN(0,0);
15261 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15262 IEM_MC_ADVANCE_RIP();
15263 IEM_MC_END();
15264 return VINF_SUCCESS;
15265}
15266
15267
15268/** Opcode 0xdb 0xe1. */
15269FNIEMOP_DEF(iemOp_fndisi)
15270{
15271 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
15272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15273 IEM_MC_BEGIN(0,0);
15274 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15275 IEM_MC_ADVANCE_RIP();
15276 IEM_MC_END();
15277 return VINF_SUCCESS;
15278}
15279
15280
15281/** Opcode 0xdb 0xe2. */
15282FNIEMOP_DEF(iemOp_fnclex)
15283{
15284 IEMOP_MNEMONIC(fnclex, "fnclex");
15285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15286
15287 IEM_MC_BEGIN(0,0);
15288 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15289 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15290 IEM_MC_CLEAR_FSW_EX();
15291 IEM_MC_ADVANCE_RIP();
15292 IEM_MC_END();
15293 return VINF_SUCCESS;
15294}
15295
15296
15297/** Opcode 0xdb 0xe3. */
15298FNIEMOP_DEF(iemOp_fninit)
15299{
15300 IEMOP_MNEMONIC(fninit, "fninit");
15301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15302 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15303}
15304
15305
15306/** Opcode 0xdb 0xe4. */
15307FNIEMOP_DEF(iemOp_fnsetpm)
15308{
15309 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15311 IEM_MC_BEGIN(0,0);
15312 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15313 IEM_MC_ADVANCE_RIP();
15314 IEM_MC_END();
15315 return VINF_SUCCESS;
15316}
15317
15318
15319/** Opcode 0xdb 0xe5. */
15320FNIEMOP_DEF(iemOp_frstpm)
15321{
15322 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15323#if 0 /* #UDs on newer CPUs */
15324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15325 IEM_MC_BEGIN(0,0);
15326 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15327 IEM_MC_ADVANCE_RIP();
15328 IEM_MC_END();
15329 return VINF_SUCCESS;
15330#else
15331 return IEMOP_RAISE_INVALID_OPCODE();
15332#endif
15333}
15334
15335
15336/** Opcode 0xdb 11/5. */
15337FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15338{
15339 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
15340 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15341}
15342
15343
15344/** Opcode 0xdb 11/6. */
15345FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15346{
15347 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
15348 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15349}
15350
15351
15352/** Opcode 0xdb. */
15353FNIEMOP_DEF(iemOp_EscF3)
15354{
15355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15356 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
15357 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15358 {
15359 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15360 {
15361 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15362 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15363 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15364 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15365 case 4:
15366 switch (bRm)
15367 {
15368 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15369 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15370 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15371 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15372 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15373 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15374 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15375 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15377 }
15378 break;
15379 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15380 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15381 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15383 }
15384 }
15385 else
15386 {
15387 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15388 {
15389 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15390 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15391 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15392 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15393 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15394 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15395 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15396 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15398 }
15399 }
15400}
15401
15402
15403/**
15404 * Common worker for FPU instructions working on STn and ST0, and storing the
15405 * result in STn unless IE, DE or ZE was raised.
15406 *
15407 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15408 */
15409FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15410{
15411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15412
15413 IEM_MC_BEGIN(3, 1);
15414 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15415 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15416 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15417 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15418
15419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15421
15422 IEM_MC_PREPARE_FPU_USAGE();
15423 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15424 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15425 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15426 IEM_MC_ELSE()
15427 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15428 IEM_MC_ENDIF();
15429 IEM_MC_ADVANCE_RIP();
15430
15431 IEM_MC_END();
15432 return VINF_SUCCESS;
15433}
15434
15435
15436/** Opcode 0xdc 11/0. */
15437FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15438{
15439 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
15440 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15441}
15442
15443
15444/** Opcode 0xdc 11/1. */
15445FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15446{
15447 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
15448 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15449}
15450
15451
15452/** Opcode 0xdc 11/4. */
15453FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15454{
15455 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
15456 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15457}
15458
15459
15460/** Opcode 0xdc 11/5. */
15461FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15462{
15463 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
15464 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15465}
15466
15467
15468/** Opcode 0xdc 11/6. */
15469FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15470{
15471 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
15472 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15473}
15474
15475
15476/** Opcode 0xdc 11/7. */
15477FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15478{
15479 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
15480 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15481}
15482
15483
15484/**
15485 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15486 * memory operand, and storing the result in ST0.
15487 *
15488 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15489 */
15490FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15491{
15492 IEM_MC_BEGIN(3, 3);
15493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15494 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15495 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15496 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15497 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15498 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15499
15500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15504
15505 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15506 IEM_MC_PREPARE_FPU_USAGE();
15507 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15508 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15509 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15510 IEM_MC_ELSE()
15511 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15512 IEM_MC_ENDIF();
15513 IEM_MC_ADVANCE_RIP();
15514
15515 IEM_MC_END();
15516 return VINF_SUCCESS;
15517}
15518
15519
15520/** Opcode 0xdc !11/0. */
15521FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15522{
15523 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
15524 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15525}
15526
15527
15528/** Opcode 0xdc !11/1. */
15529FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15530{
15531 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
15532 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15533}
15534
15535
15536/** Opcode 0xdc !11/2. */
15537FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15538{
15539 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
15540
15541 IEM_MC_BEGIN(3, 3);
15542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15543 IEM_MC_LOCAL(uint16_t, u16Fsw);
15544 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15545 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15546 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15547 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15548
15549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15551
15552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15554 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15555
15556 IEM_MC_PREPARE_FPU_USAGE();
15557 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15558 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15559 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15560 IEM_MC_ELSE()
15561 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15562 IEM_MC_ENDIF();
15563 IEM_MC_ADVANCE_RIP();
15564
15565 IEM_MC_END();
15566 return VINF_SUCCESS;
15567}
15568
15569
15570/** Opcode 0xdc !11/3. */
15571FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15572{
15573 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
15574
15575 IEM_MC_BEGIN(3, 3);
15576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15577 IEM_MC_LOCAL(uint16_t, u16Fsw);
15578 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15579 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15580 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15581 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15582
15583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15585
15586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15587 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15588 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15589
15590 IEM_MC_PREPARE_FPU_USAGE();
15591 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15592 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15593 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15594 IEM_MC_ELSE()
15595 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15596 IEM_MC_ENDIF();
15597 IEM_MC_ADVANCE_RIP();
15598
15599 IEM_MC_END();
15600 return VINF_SUCCESS;
15601}
15602
15603
15604/** Opcode 0xdc !11/4. */
15605FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15606{
15607 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
15608 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15609}
15610
15611
15612/** Opcode 0xdc !11/5. */
15613FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15614{
15615 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
15616 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15617}
15618
15619
15620/** Opcode 0xdc !11/6. */
15621FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15622{
15623 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
15624 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15625}
15626
15627
15628/** Opcode 0xdc !11/7. */
15629FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15630{
15631 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
15632 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15633}
15634
15635
15636/** Opcode 0xdc. */
15637FNIEMOP_DEF(iemOp_EscF4)
15638{
15639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15640 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
15641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15642 {
15643 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15644 {
15645 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15646 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15647 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15648 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15649 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15650 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15651 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15652 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15654 }
15655 }
15656 else
15657 {
15658 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15659 {
15660 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15661 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15662 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15663 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15664 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15665 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15666 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15667 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15669 }
15670 }
15671}
15672
15673
15674/** Opcode 0xdd !11/0.
15675 * @sa iemOp_fld_m32r */
15676FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15677{
15678 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
15679
15680 IEM_MC_BEGIN(2, 3);
15681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15682 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15683 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15684 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15685 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15686
15687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15689 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15690 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15691
15692 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15693 IEM_MC_PREPARE_FPU_USAGE();
15694 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15695 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15696 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15697 IEM_MC_ELSE()
15698 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15699 IEM_MC_ENDIF();
15700 IEM_MC_ADVANCE_RIP();
15701
15702 IEM_MC_END();
15703 return VINF_SUCCESS;
15704}
15705
15706
15707/** Opcode 0xdd !11/0. */
15708FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15709{
15710 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
15711 IEM_MC_BEGIN(3, 2);
15712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15713 IEM_MC_LOCAL(uint16_t, u16Fsw);
15714 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15715 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15716 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15717
15718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15722
15723 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15724 IEM_MC_PREPARE_FPU_USAGE();
15725 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15726 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15727 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15728 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15729 IEM_MC_ELSE()
15730 IEM_MC_IF_FCW_IM()
15731 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15732 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15733 IEM_MC_ENDIF();
15734 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15735 IEM_MC_ENDIF();
15736 IEM_MC_ADVANCE_RIP();
15737
15738 IEM_MC_END();
15739 return VINF_SUCCESS;
15740}
15741
15742
15743/** Opcode 0xdd !11/0. */
15744FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15745{
15746 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
15747 IEM_MC_BEGIN(3, 2);
15748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15749 IEM_MC_LOCAL(uint16_t, u16Fsw);
15750 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15751 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15752 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15753
15754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15758
15759 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15760 IEM_MC_PREPARE_FPU_USAGE();
15761 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15762 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15763 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15764 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15765 IEM_MC_ELSE()
15766 IEM_MC_IF_FCW_IM()
15767 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15768 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15769 IEM_MC_ENDIF();
15770 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15771 IEM_MC_ENDIF();
15772 IEM_MC_ADVANCE_RIP();
15773
15774 IEM_MC_END();
15775 return VINF_SUCCESS;
15776}
15777
15778
15779
15780
15781/** Opcode 0xdd !11/0. */
15782FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15783{
15784 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
15785 IEM_MC_BEGIN(3, 2);
15786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15787 IEM_MC_LOCAL(uint16_t, u16Fsw);
15788 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15789 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15790 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15791
15792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15795 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15796
15797 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15798 IEM_MC_PREPARE_FPU_USAGE();
15799 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15800 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15801 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15802 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15803 IEM_MC_ELSE()
15804 IEM_MC_IF_FCW_IM()
15805 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15806 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15807 IEM_MC_ENDIF();
15808 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15809 IEM_MC_ENDIF();
15810 IEM_MC_ADVANCE_RIP();
15811
15812 IEM_MC_END();
15813 return VINF_SUCCESS;
15814}
15815
15816
15817/** Opcode 0xdd !11/0. */
15818FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15819{
15820 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
15821 IEM_MC_BEGIN(3, 0);
15822 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15823 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15824 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15828 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15829 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15830 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15831 IEM_MC_END();
15832 return VINF_SUCCESS;
15833}
15834
15835
15836/** Opcode 0xdd !11/0. */
15837FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15838{
15839 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
15840 IEM_MC_BEGIN(3, 0);
15841 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15842 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15843 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15847 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15848 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15849 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15850 IEM_MC_END();
15851 return VINF_SUCCESS;
15852
15853}
15854
15855/** Opcode 0xdd !11/0. */
15856FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15857{
15858 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
15859
15860 IEM_MC_BEGIN(0, 2);
15861 IEM_MC_LOCAL(uint16_t, u16Tmp);
15862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15863
15864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15866 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15867
15868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15869 IEM_MC_FETCH_FSW(u16Tmp);
15870 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
15871 IEM_MC_ADVANCE_RIP();
15872
15873/** @todo Debug / drop a hint to the verifier that things may differ
15874 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15875 * NT4SP1. (X86_FSW_PE) */
15876 IEM_MC_END();
15877 return VINF_SUCCESS;
15878}
15879
15880
15881/** Opcode 0xdd 11/0. */
15882FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15883{
15884 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
15885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15886 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15887 unmodified. */
15888
15889 IEM_MC_BEGIN(0, 0);
15890
15891 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15892 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15893
15894 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15895 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15896 IEM_MC_UPDATE_FPU_OPCODE_IP();
15897
15898 IEM_MC_ADVANCE_RIP();
15899 IEM_MC_END();
15900 return VINF_SUCCESS;
15901}
15902
15903
15904/** Opcode 0xdd 11/1. */
15905FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15906{
15907 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
15908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15909
15910 IEM_MC_BEGIN(0, 2);
15911 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15912 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15915
15916 IEM_MC_PREPARE_FPU_USAGE();
15917 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15918 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15919 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15920 IEM_MC_ELSE()
15921 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15922 IEM_MC_ENDIF();
15923
15924 IEM_MC_ADVANCE_RIP();
15925 IEM_MC_END();
15926 return VINF_SUCCESS;
15927}
15928
15929
15930/** Opcode 0xdd 11/3. */
15931FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15932{
15933 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
15934 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15935}
15936
15937
15938/** Opcode 0xdd 11/4. */
15939FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15940{
15941 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
15942 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15943}
15944
15945
15946/** Opcode 0xdd. */
15947FNIEMOP_DEF(iemOp_EscF5)
15948{
15949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15950 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
15951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15952 {
15953 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15954 {
15955 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15956 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15957 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15958 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15959 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15960 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15961 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15962 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15964 }
15965 }
15966 else
15967 {
15968 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15969 {
15970 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15971 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15972 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15973 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15974 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15975 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15976 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15977 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15979 }
15980 }
15981}
15982
15983
15984/** Opcode 0xde 11/0. */
15985FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15986{
15987 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
15988 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15989}
15990
15991
15992/** Opcode 0xde 11/0. */
15993FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15994{
15995 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
15996 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15997}
15998
15999
16000/** Opcode 0xde 0xd9. */
16001FNIEMOP_DEF(iemOp_fcompp)
16002{
16003 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16004 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16005}
16006
16007
16008/** Opcode 0xde 11/4. */
16009FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16010{
16011 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16012 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16013}
16014
16015
16016/** Opcode 0xde 11/5. */
16017FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16018{
16019 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16020 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16021}
16022
16023
16024/** Opcode 0xde 11/6. */
16025FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16026{
16027 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16028 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16029}
16030
16031
16032/** Opcode 0xde 11/7. */
16033FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16034{
16035 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16036 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16037}
16038
16039
16040/**
16041 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16042 * the result in ST0.
16043 *
16044 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16045 */
16046FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16047{
16048 IEM_MC_BEGIN(3, 3);
16049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16050 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16051 IEM_MC_LOCAL(int16_t, i16Val2);
16052 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16053 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16054 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16055
16056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16058
16059 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16060 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16061 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16062
16063 IEM_MC_PREPARE_FPU_USAGE();
16064 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16065 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16066 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16067 IEM_MC_ELSE()
16068 IEM_MC_FPU_STACK_UNDERFLOW(0);
16069 IEM_MC_ENDIF();
16070 IEM_MC_ADVANCE_RIP();
16071
16072 IEM_MC_END();
16073 return VINF_SUCCESS;
16074}
16075
16076
16077/** Opcode 0xde !11/0. */
16078FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16079{
16080 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16081 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16082}
16083
16084
16085/** Opcode 0xde !11/1. */
16086FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16087{
16088 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16089 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16090}
16091
16092
16093/** Opcode 0xde !11/2. */
16094FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16095{
16096 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16097
16098 IEM_MC_BEGIN(3, 3);
16099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16100 IEM_MC_LOCAL(uint16_t, u16Fsw);
16101 IEM_MC_LOCAL(int16_t, i16Val2);
16102 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16103 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16104 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16105
16106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16108
16109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16110 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16111 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16112
16113 IEM_MC_PREPARE_FPU_USAGE();
16114 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16115 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16116 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16117 IEM_MC_ELSE()
16118 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16119 IEM_MC_ENDIF();
16120 IEM_MC_ADVANCE_RIP();
16121
16122 IEM_MC_END();
16123 return VINF_SUCCESS;
16124}
16125
16126
16127/** Opcode 0xde !11/3. */
16128FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16129{
16130 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16131
16132 IEM_MC_BEGIN(3, 3);
16133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16134 IEM_MC_LOCAL(uint16_t, u16Fsw);
16135 IEM_MC_LOCAL(int16_t, i16Val2);
16136 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16137 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16138 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16139
16140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16142
16143 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16144 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16145 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16146
16147 IEM_MC_PREPARE_FPU_USAGE();
16148 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16149 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16150 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16151 IEM_MC_ELSE()
16152 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16153 IEM_MC_ENDIF();
16154 IEM_MC_ADVANCE_RIP();
16155
16156 IEM_MC_END();
16157 return VINF_SUCCESS;
16158}
16159
16160
16161/** Opcode 0xde !11/4. */
16162FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16163{
16164 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16165 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16166}
16167
16168
16169/** Opcode 0xde !11/5. */
16170FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16171{
16172 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16173 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16174}
16175
16176
16177/** Opcode 0xde !11/6. */
16178FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16179{
16180 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16181 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16182}
16183
16184
16185/** Opcode 0xde !11/7. */
16186FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16187{
16188 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16189 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16190}
16191
16192
16193/** Opcode 0xde. */
16194FNIEMOP_DEF(iemOp_EscF6)
16195{
16196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16197 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16199 {
16200 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16201 {
16202 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16203 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16204 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16205 case 3: if (bRm == 0xd9)
16206 return FNIEMOP_CALL(iemOp_fcompp);
16207 return IEMOP_RAISE_INVALID_OPCODE();
16208 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16209 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16210 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16211 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16213 }
16214 }
16215 else
16216 {
16217 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16218 {
16219 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16220 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16221 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16222 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16223 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16224 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16225 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16226 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16228 }
16229 }
16230}
16231
16232
16233/** Opcode 0xdf 11/0.
16234 * Undocument instruction, assumed to work like ffree + fincstp. */
16235FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16236{
16237 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
16238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16239
16240 IEM_MC_BEGIN(0, 0);
16241
16242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16244
16245 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16246 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16247 IEM_MC_FPU_STACK_INC_TOP();
16248 IEM_MC_UPDATE_FPU_OPCODE_IP();
16249
16250 IEM_MC_ADVANCE_RIP();
16251 IEM_MC_END();
16252 return VINF_SUCCESS;
16253}
16254
16255
16256/** Opcode 0xdf 0xe0. */
16257FNIEMOP_DEF(iemOp_fnstsw_ax)
16258{
16259 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
16260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16261
16262 IEM_MC_BEGIN(0, 1);
16263 IEM_MC_LOCAL(uint16_t, u16Tmp);
16264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16265 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16266 IEM_MC_FETCH_FSW(u16Tmp);
16267 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16268 IEM_MC_ADVANCE_RIP();
16269 IEM_MC_END();
16270 return VINF_SUCCESS;
16271}
16272
16273
16274/** Opcode 0xdf 11/5. */
16275FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16276{
16277 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
16278 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16279}
16280
16281
16282/** Opcode 0xdf 11/6. */
16283FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16284{
16285 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
16286 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16287}
16288
16289
16290/** Opcode 0xdf !11/0. */
16291FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16292{
16293 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
16294
16295 IEM_MC_BEGIN(2, 3);
16296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16297 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16298 IEM_MC_LOCAL(int16_t, i16Val);
16299 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16300 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16301
16302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16304
16305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16307 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16308
16309 IEM_MC_PREPARE_FPU_USAGE();
16310 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16311 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16312 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16313 IEM_MC_ELSE()
16314 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16315 IEM_MC_ENDIF();
16316 IEM_MC_ADVANCE_RIP();
16317
16318 IEM_MC_END();
16319 return VINF_SUCCESS;
16320}
16321
16322
16323/** Opcode 0xdf !11/1. */
16324FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16325{
16326 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
16327 IEM_MC_BEGIN(3, 2);
16328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16329 IEM_MC_LOCAL(uint16_t, u16Fsw);
16330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16331 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16332 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16333
16334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16338
16339 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16340 IEM_MC_PREPARE_FPU_USAGE();
16341 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16342 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16343 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16344 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16345 IEM_MC_ELSE()
16346 IEM_MC_IF_FCW_IM()
16347 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16348 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16349 IEM_MC_ENDIF();
16350 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16351 IEM_MC_ENDIF();
16352 IEM_MC_ADVANCE_RIP();
16353
16354 IEM_MC_END();
16355 return VINF_SUCCESS;
16356}
16357
16358
16359/** Opcode 0xdf !11/2. */
16360FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16361{
16362 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
16363 IEM_MC_BEGIN(3, 2);
16364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16365 IEM_MC_LOCAL(uint16_t, u16Fsw);
16366 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16367 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16368 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16369
16370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16372 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16373 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16374
16375 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16376 IEM_MC_PREPARE_FPU_USAGE();
16377 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16378 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16379 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16380 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16381 IEM_MC_ELSE()
16382 IEM_MC_IF_FCW_IM()
16383 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16384 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16385 IEM_MC_ENDIF();
16386 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16387 IEM_MC_ENDIF();
16388 IEM_MC_ADVANCE_RIP();
16389
16390 IEM_MC_END();
16391 return VINF_SUCCESS;
16392}
16393
16394
16395/** Opcode 0xdf !11/3. */
16396FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16397{
16398 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
16399 IEM_MC_BEGIN(3, 2);
16400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16401 IEM_MC_LOCAL(uint16_t, u16Fsw);
16402 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16403 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16404 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16405
16406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16408 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16409 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16410
16411 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16412 IEM_MC_PREPARE_FPU_USAGE();
16413 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16414 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16415 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16416 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16417 IEM_MC_ELSE()
16418 IEM_MC_IF_FCW_IM()
16419 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16420 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16421 IEM_MC_ENDIF();
16422 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16423 IEM_MC_ENDIF();
16424 IEM_MC_ADVANCE_RIP();
16425
16426 IEM_MC_END();
16427 return VINF_SUCCESS;
16428}
16429
16430
16431/** Opcode 0xdf !11/4. */
16432FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16433
16434
16435/** Opcode 0xdf !11/5. */
16436FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16437{
16438 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
16439
16440 IEM_MC_BEGIN(2, 3);
16441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16442 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16443 IEM_MC_LOCAL(int64_t, i64Val);
16444 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16445 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16446
16447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16449
16450 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16451 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16452 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16453
16454 IEM_MC_PREPARE_FPU_USAGE();
16455 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16456 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16457 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16458 IEM_MC_ELSE()
16459 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16460 IEM_MC_ENDIF();
16461 IEM_MC_ADVANCE_RIP();
16462
16463 IEM_MC_END();
16464 return VINF_SUCCESS;
16465}
16466
16467
16468/** Opcode 0xdf !11/6. */
16469FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16470
16471
16472/** Opcode 0xdf !11/7. */
16473FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16474{
16475 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
16476 IEM_MC_BEGIN(3, 2);
16477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16478 IEM_MC_LOCAL(uint16_t, u16Fsw);
16479 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16480 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16481 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16482
16483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16485 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16486 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16487
16488 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16489 IEM_MC_PREPARE_FPU_USAGE();
16490 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16491 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16492 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16493 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16494 IEM_MC_ELSE()
16495 IEM_MC_IF_FCW_IM()
16496 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16497 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16498 IEM_MC_ENDIF();
16499 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16500 IEM_MC_ENDIF();
16501 IEM_MC_ADVANCE_RIP();
16502
16503 IEM_MC_END();
16504 return VINF_SUCCESS;
16505}
16506
16507
16508/** Opcode 0xdf. */
16509FNIEMOP_DEF(iemOp_EscF7)
16510{
16511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16512 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16513 {
16514 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16515 {
16516 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16517 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16518 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16519 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16520 case 4: if (bRm == 0xe0)
16521 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16522 return IEMOP_RAISE_INVALID_OPCODE();
16523 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16524 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16525 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16527 }
16528 }
16529 else
16530 {
16531 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16532 {
16533 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16534 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16535 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16536 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16537 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16538 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16539 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16540 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16542 }
16543 }
16544}
16545
16546
16547/** Opcode 0xe0. */
16548FNIEMOP_DEF(iemOp_loopne_Jb)
16549{
16550 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
16551 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16554
16555 switch (pVCpu->iem.s.enmEffAddrMode)
16556 {
16557 case IEMMODE_16BIT:
16558 IEM_MC_BEGIN(0,0);
16559 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16560 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16561 IEM_MC_REL_JMP_S8(i8Imm);
16562 } IEM_MC_ELSE() {
16563 IEM_MC_ADVANCE_RIP();
16564 } IEM_MC_ENDIF();
16565 IEM_MC_END();
16566 return VINF_SUCCESS;
16567
16568 case IEMMODE_32BIT:
16569 IEM_MC_BEGIN(0,0);
16570 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16571 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16572 IEM_MC_REL_JMP_S8(i8Imm);
16573 } IEM_MC_ELSE() {
16574 IEM_MC_ADVANCE_RIP();
16575 } IEM_MC_ENDIF();
16576 IEM_MC_END();
16577 return VINF_SUCCESS;
16578
16579 case IEMMODE_64BIT:
16580 IEM_MC_BEGIN(0,0);
16581 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16582 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16583 IEM_MC_REL_JMP_S8(i8Imm);
16584 } IEM_MC_ELSE() {
16585 IEM_MC_ADVANCE_RIP();
16586 } IEM_MC_ENDIF();
16587 IEM_MC_END();
16588 return VINF_SUCCESS;
16589
16590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16591 }
16592}
16593
16594
16595/** Opcode 0xe1. */
16596FNIEMOP_DEF(iemOp_loope_Jb)
16597{
16598 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
16599 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16601 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16602
16603 switch (pVCpu->iem.s.enmEffAddrMode)
16604 {
16605 case IEMMODE_16BIT:
16606 IEM_MC_BEGIN(0,0);
16607 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16608 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16609 IEM_MC_REL_JMP_S8(i8Imm);
16610 } IEM_MC_ELSE() {
16611 IEM_MC_ADVANCE_RIP();
16612 } IEM_MC_ENDIF();
16613 IEM_MC_END();
16614 return VINF_SUCCESS;
16615
16616 case IEMMODE_32BIT:
16617 IEM_MC_BEGIN(0,0);
16618 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16619 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16620 IEM_MC_REL_JMP_S8(i8Imm);
16621 } IEM_MC_ELSE() {
16622 IEM_MC_ADVANCE_RIP();
16623 } IEM_MC_ENDIF();
16624 IEM_MC_END();
16625 return VINF_SUCCESS;
16626
16627 case IEMMODE_64BIT:
16628 IEM_MC_BEGIN(0,0);
16629 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16630 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16631 IEM_MC_REL_JMP_S8(i8Imm);
16632 } IEM_MC_ELSE() {
16633 IEM_MC_ADVANCE_RIP();
16634 } IEM_MC_ENDIF();
16635 IEM_MC_END();
16636 return VINF_SUCCESS;
16637
16638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16639 }
16640}
16641
16642
16643/** Opcode 0xe2. */
16644FNIEMOP_DEF(iemOp_loop_Jb)
16645{
16646 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
16647 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16650
16651 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16652 * using the 32-bit operand size override. How can that be restarted? See
16653 * weird pseudo code in intel manual. */
16654 switch (pVCpu->iem.s.enmEffAddrMode)
16655 {
16656 case IEMMODE_16BIT:
16657 IEM_MC_BEGIN(0,0);
16658 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16659 {
16660 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16661 IEM_MC_IF_CX_IS_NZ() {
16662 IEM_MC_REL_JMP_S8(i8Imm);
16663 } IEM_MC_ELSE() {
16664 IEM_MC_ADVANCE_RIP();
16665 } IEM_MC_ENDIF();
16666 }
16667 else
16668 {
16669 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16670 IEM_MC_ADVANCE_RIP();
16671 }
16672 IEM_MC_END();
16673 return VINF_SUCCESS;
16674
16675 case IEMMODE_32BIT:
16676 IEM_MC_BEGIN(0,0);
16677 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16678 {
16679 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16680 IEM_MC_IF_ECX_IS_NZ() {
16681 IEM_MC_REL_JMP_S8(i8Imm);
16682 } IEM_MC_ELSE() {
16683 IEM_MC_ADVANCE_RIP();
16684 } IEM_MC_ENDIF();
16685 }
16686 else
16687 {
16688 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16689 IEM_MC_ADVANCE_RIP();
16690 }
16691 IEM_MC_END();
16692 return VINF_SUCCESS;
16693
16694 case IEMMODE_64BIT:
16695 IEM_MC_BEGIN(0,0);
16696 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16697 {
16698 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16699 IEM_MC_IF_RCX_IS_NZ() {
16700 IEM_MC_REL_JMP_S8(i8Imm);
16701 } IEM_MC_ELSE() {
16702 IEM_MC_ADVANCE_RIP();
16703 } IEM_MC_ENDIF();
16704 }
16705 else
16706 {
16707 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16708 IEM_MC_ADVANCE_RIP();
16709 }
16710 IEM_MC_END();
16711 return VINF_SUCCESS;
16712
16713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16714 }
16715}
16716
16717
16718/** Opcode 0xe3. */
16719FNIEMOP_DEF(iemOp_jecxz_Jb)
16720{
16721 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
16722 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16725
16726 switch (pVCpu->iem.s.enmEffAddrMode)
16727 {
16728 case IEMMODE_16BIT:
16729 IEM_MC_BEGIN(0,0);
16730 IEM_MC_IF_CX_IS_NZ() {
16731 IEM_MC_ADVANCE_RIP();
16732 } IEM_MC_ELSE() {
16733 IEM_MC_REL_JMP_S8(i8Imm);
16734 } IEM_MC_ENDIF();
16735 IEM_MC_END();
16736 return VINF_SUCCESS;
16737
16738 case IEMMODE_32BIT:
16739 IEM_MC_BEGIN(0,0);
16740 IEM_MC_IF_ECX_IS_NZ() {
16741 IEM_MC_ADVANCE_RIP();
16742 } IEM_MC_ELSE() {
16743 IEM_MC_REL_JMP_S8(i8Imm);
16744 } IEM_MC_ENDIF();
16745 IEM_MC_END();
16746 return VINF_SUCCESS;
16747
16748 case IEMMODE_64BIT:
16749 IEM_MC_BEGIN(0,0);
16750 IEM_MC_IF_RCX_IS_NZ() {
16751 IEM_MC_ADVANCE_RIP();
16752 } IEM_MC_ELSE() {
16753 IEM_MC_REL_JMP_S8(i8Imm);
16754 } IEM_MC_ENDIF();
16755 IEM_MC_END();
16756 return VINF_SUCCESS;
16757
16758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16759 }
16760}
16761
16762
16763/** Opcode 0xe4 */
16764FNIEMOP_DEF(iemOp_in_AL_Ib)
16765{
16766 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
16767 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16769 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16770}
16771
16772
16773/** Opcode 0xe5 */
16774FNIEMOP_DEF(iemOp_in_eAX_Ib)
16775{
16776 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
16777 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16779 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16780}
16781
16782
16783/** Opcode 0xe6 */
16784FNIEMOP_DEF(iemOp_out_Ib_AL)
16785{
16786 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
16787 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16789 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16790}
16791
16792
16793/** Opcode 0xe7 */
16794FNIEMOP_DEF(iemOp_out_Ib_eAX)
16795{
16796 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
16797 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16799 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16800}
16801
16802
16803/** Opcode 0xe8. */
16804FNIEMOP_DEF(iemOp_call_Jv)
16805{
16806 IEMOP_MNEMONIC(call_Jv, "call Jv");
16807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16808 switch (pVCpu->iem.s.enmEffOpSize)
16809 {
16810 case IEMMODE_16BIT:
16811 {
16812 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16813 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16814 }
16815
16816 case IEMMODE_32BIT:
16817 {
16818 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16819 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16820 }
16821
16822 case IEMMODE_64BIT:
16823 {
16824 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16825 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16826 }
16827
16828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16829 }
16830}
16831
16832
16833/** Opcode 0xe9. */
16834FNIEMOP_DEF(iemOp_jmp_Jv)
16835{
16836 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
16837 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16838 switch (pVCpu->iem.s.enmEffOpSize)
16839 {
16840 case IEMMODE_16BIT:
16841 {
16842 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16843 IEM_MC_BEGIN(0, 0);
16844 IEM_MC_REL_JMP_S16(i16Imm);
16845 IEM_MC_END();
16846 return VINF_SUCCESS;
16847 }
16848
16849 case IEMMODE_64BIT:
16850 case IEMMODE_32BIT:
16851 {
16852 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16853 IEM_MC_BEGIN(0, 0);
16854 IEM_MC_REL_JMP_S32(i32Imm);
16855 IEM_MC_END();
16856 return VINF_SUCCESS;
16857 }
16858
16859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16860 }
16861}
16862
16863
16864/** Opcode 0xea. */
16865FNIEMOP_DEF(iemOp_jmp_Ap)
16866{
16867 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
16868 IEMOP_HLP_NO_64BIT();
16869
16870 /* Decode the far pointer address and pass it on to the far call C implementation. */
16871 uint32_t offSeg;
16872 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
16873 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16874 else
16875 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16876 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16878 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
16879}
16880
16881
16882/** Opcode 0xeb. */
16883FNIEMOP_DEF(iemOp_jmp_Jb)
16884{
16885 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
16886 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16888 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16889
16890 IEM_MC_BEGIN(0, 0);
16891 IEM_MC_REL_JMP_S8(i8Imm);
16892 IEM_MC_END();
16893 return VINF_SUCCESS;
16894}
16895
16896
16897/** Opcode 0xec */
16898FNIEMOP_DEF(iemOp_in_AL_DX)
16899{
16900 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
16901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16902 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16903}
16904
16905
16906/** Opcode 0xed */
16907FNIEMOP_DEF(iemOp_eAX_DX)
16908{
16909 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
16910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16911 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16912}
16913
16914
16915/** Opcode 0xee */
16916FNIEMOP_DEF(iemOp_out_DX_AL)
16917{
16918 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
16919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16920 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16921}
16922
16923
16924/** Opcode 0xef */
16925FNIEMOP_DEF(iemOp_out_DX_eAX)
16926{
16927 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
16928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16929 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16930}
16931
16932
16933/** Opcode 0xf0. */
16934FNIEMOP_DEF(iemOp_lock)
16935{
16936 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16937 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
16938
16939 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16940 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16941}
16942
16943
16944/** Opcode 0xf1. */
16945FNIEMOP_DEF(iemOp_int_1)
16946{
16947 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
16948 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16949 /** @todo testcase! */
16950 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16951}
16952
16953
16954/** Opcode 0xf2. */
16955FNIEMOP_DEF(iemOp_repne)
16956{
16957 /* This overrides any previous REPE prefix. */
16958 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
16959 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16960 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
16961
16962 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16963 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16964}
16965
16966
16967/** Opcode 0xf3. */
16968FNIEMOP_DEF(iemOp_repe)
16969{
16970 /* This overrides any previous REPNE prefix. */
16971 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
16972 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
16974
16975 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16976 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16977}
16978
16979
16980/** Opcode 0xf4. */
16981FNIEMOP_DEF(iemOp_hlt)
16982{
16983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16984 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16985}
16986
16987
16988/** Opcode 0xf5. */
16989FNIEMOP_DEF(iemOp_cmc)
16990{
16991 IEMOP_MNEMONIC(cmc, "cmc");
16992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16993 IEM_MC_BEGIN(0, 0);
16994 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16995 IEM_MC_ADVANCE_RIP();
16996 IEM_MC_END();
16997 return VINF_SUCCESS;
16998}
16999
17000
17001/**
17002 * Common implementation of 'inc/dec/not/neg Eb'.
17003 *
17004 * @param bRm The RM byte.
17005 * @param pImpl The instruction implementation.
17006 */
17007FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17008{
17009 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17010 {
17011 /* register access */
17012 IEM_MC_BEGIN(2, 0);
17013 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17014 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17015 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17016 IEM_MC_REF_EFLAGS(pEFlags);
17017 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17018 IEM_MC_ADVANCE_RIP();
17019 IEM_MC_END();
17020 }
17021 else
17022 {
17023 /* memory access. */
17024 IEM_MC_BEGIN(2, 2);
17025 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17026 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17028
17029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17030 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17031 IEM_MC_FETCH_EFLAGS(EFlags);
17032 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17033 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17034 else
17035 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17036
17037 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17038 IEM_MC_COMMIT_EFLAGS(EFlags);
17039 IEM_MC_ADVANCE_RIP();
17040 IEM_MC_END();
17041 }
17042 return VINF_SUCCESS;
17043}
17044
17045
17046/**
17047 * Common implementation of 'inc/dec/not/neg Ev'.
17048 *
17049 * @param bRm The RM byte.
17050 * @param pImpl The instruction implementation.
17051 */
17052FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17053{
17054 /* Registers are handled by a common worker. */
17055 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17056 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17057
17058 /* Memory we do here. */
17059 switch (pVCpu->iem.s.enmEffOpSize)
17060 {
17061 case IEMMODE_16BIT:
17062 IEM_MC_BEGIN(2, 2);
17063 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17064 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17066
17067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17068 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17069 IEM_MC_FETCH_EFLAGS(EFlags);
17070 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17071 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17072 else
17073 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17074
17075 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17076 IEM_MC_COMMIT_EFLAGS(EFlags);
17077 IEM_MC_ADVANCE_RIP();
17078 IEM_MC_END();
17079 return VINF_SUCCESS;
17080
17081 case IEMMODE_32BIT:
17082 IEM_MC_BEGIN(2, 2);
17083 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17084 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17086
17087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17088 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17089 IEM_MC_FETCH_EFLAGS(EFlags);
17090 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17091 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17092 else
17093 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17094
17095 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17096 IEM_MC_COMMIT_EFLAGS(EFlags);
17097 IEM_MC_ADVANCE_RIP();
17098 IEM_MC_END();
17099 return VINF_SUCCESS;
17100
17101 case IEMMODE_64BIT:
17102 IEM_MC_BEGIN(2, 2);
17103 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17104 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17106
17107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17108 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17109 IEM_MC_FETCH_EFLAGS(EFlags);
17110 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17111 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17112 else
17113 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17114
17115 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17116 IEM_MC_COMMIT_EFLAGS(EFlags);
17117 IEM_MC_ADVANCE_RIP();
17118 IEM_MC_END();
17119 return VINF_SUCCESS;
17120
17121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17122 }
17123}
17124
17125
17126/** Opcode 0xf6 /0. */
17127FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17128{
17129 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17130 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17131
17132 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17133 {
17134 /* register access */
17135 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17137
17138 IEM_MC_BEGIN(3, 0);
17139 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17140 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17141 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17142 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17143 IEM_MC_REF_EFLAGS(pEFlags);
17144 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17145 IEM_MC_ADVANCE_RIP();
17146 IEM_MC_END();
17147 }
17148 else
17149 {
17150 /* memory access. */
17151 IEM_MC_BEGIN(3, 2);
17152 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17153 IEM_MC_ARG(uint8_t, u8Src, 1);
17154 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17156
17157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17158 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17159 IEM_MC_ASSIGN(u8Src, u8Imm);
17160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17161 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17162 IEM_MC_FETCH_EFLAGS(EFlags);
17163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17164
17165 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17166 IEM_MC_COMMIT_EFLAGS(EFlags);
17167 IEM_MC_ADVANCE_RIP();
17168 IEM_MC_END();
17169 }
17170 return VINF_SUCCESS;
17171}
17172
17173
17174/** Opcode 0xf7 /0. */
17175FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17176{
17177 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17178 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17179
17180 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17181 {
17182 /* register access */
17183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17184 switch (pVCpu->iem.s.enmEffOpSize)
17185 {
17186 case IEMMODE_16BIT:
17187 {
17188 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17189 IEM_MC_BEGIN(3, 0);
17190 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17191 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17192 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17193 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17194 IEM_MC_REF_EFLAGS(pEFlags);
17195 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17196 IEM_MC_ADVANCE_RIP();
17197 IEM_MC_END();
17198 return VINF_SUCCESS;
17199 }
17200
17201 case IEMMODE_32BIT:
17202 {
17203 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17204 IEM_MC_BEGIN(3, 0);
17205 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17206 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17207 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17208 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17209 IEM_MC_REF_EFLAGS(pEFlags);
17210 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17211 /* No clearing the high dword here - test doesn't write back the result. */
17212 IEM_MC_ADVANCE_RIP();
17213 IEM_MC_END();
17214 return VINF_SUCCESS;
17215 }
17216
17217 case IEMMODE_64BIT:
17218 {
17219 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17220 IEM_MC_BEGIN(3, 0);
17221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17222 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17223 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17224 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17225 IEM_MC_REF_EFLAGS(pEFlags);
17226 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17227 IEM_MC_ADVANCE_RIP();
17228 IEM_MC_END();
17229 return VINF_SUCCESS;
17230 }
17231
17232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17233 }
17234 }
17235 else
17236 {
17237 /* memory access. */
17238 switch (pVCpu->iem.s.enmEffOpSize)
17239 {
17240 case IEMMODE_16BIT:
17241 {
17242 IEM_MC_BEGIN(3, 2);
17243 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17244 IEM_MC_ARG(uint16_t, u16Src, 1);
17245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17247
17248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17249 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17250 IEM_MC_ASSIGN(u16Src, u16Imm);
17251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17252 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17253 IEM_MC_FETCH_EFLAGS(EFlags);
17254 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17255
17256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17257 IEM_MC_COMMIT_EFLAGS(EFlags);
17258 IEM_MC_ADVANCE_RIP();
17259 IEM_MC_END();
17260 return VINF_SUCCESS;
17261 }
17262
17263 case IEMMODE_32BIT:
17264 {
17265 IEM_MC_BEGIN(3, 2);
17266 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17267 IEM_MC_ARG(uint32_t, u32Src, 1);
17268 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17270
17271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17272 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17273 IEM_MC_ASSIGN(u32Src, u32Imm);
17274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17275 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17276 IEM_MC_FETCH_EFLAGS(EFlags);
17277 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17278
17279 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17280 IEM_MC_COMMIT_EFLAGS(EFlags);
17281 IEM_MC_ADVANCE_RIP();
17282 IEM_MC_END();
17283 return VINF_SUCCESS;
17284 }
17285
17286 case IEMMODE_64BIT:
17287 {
17288 IEM_MC_BEGIN(3, 2);
17289 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17290 IEM_MC_ARG(uint64_t, u64Src, 1);
17291 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17293
17294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17295 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17296 IEM_MC_ASSIGN(u64Src, u64Imm);
17297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17298 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17299 IEM_MC_FETCH_EFLAGS(EFlags);
17300 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17301
17302 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17303 IEM_MC_COMMIT_EFLAGS(EFlags);
17304 IEM_MC_ADVANCE_RIP();
17305 IEM_MC_END();
17306 return VINF_SUCCESS;
17307 }
17308
17309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17310 }
17311 }
17312}
17313
17314
17315/** Opcode 0xf6 /4, /5, /6 and /7. */
17316FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17317{
17318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17319 {
17320 /* register access */
17321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17322 IEM_MC_BEGIN(3, 1);
17323 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17324 IEM_MC_ARG(uint8_t, u8Value, 1);
17325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17326 IEM_MC_LOCAL(int32_t, rc);
17327
17328 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17329 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17330 IEM_MC_REF_EFLAGS(pEFlags);
17331 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17332 IEM_MC_IF_LOCAL_IS_Z(rc) {
17333 IEM_MC_ADVANCE_RIP();
17334 } IEM_MC_ELSE() {
17335 IEM_MC_RAISE_DIVIDE_ERROR();
17336 } IEM_MC_ENDIF();
17337
17338 IEM_MC_END();
17339 }
17340 else
17341 {
17342 /* memory access. */
17343 IEM_MC_BEGIN(3, 2);
17344 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17345 IEM_MC_ARG(uint8_t, u8Value, 1);
17346 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17348 IEM_MC_LOCAL(int32_t, rc);
17349
17350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17352 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17353 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17354 IEM_MC_REF_EFLAGS(pEFlags);
17355 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17356 IEM_MC_IF_LOCAL_IS_Z(rc) {
17357 IEM_MC_ADVANCE_RIP();
17358 } IEM_MC_ELSE() {
17359 IEM_MC_RAISE_DIVIDE_ERROR();
17360 } IEM_MC_ENDIF();
17361
17362 IEM_MC_END();
17363 }
17364 return VINF_SUCCESS;
17365}
17366
17367
17368/** Opcode 0xf7 /4, /5, /6 and /7. */
17369FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17370{
17371 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17372
17373 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17374 {
17375 /* register access */
17376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17377 switch (pVCpu->iem.s.enmEffOpSize)
17378 {
17379 case IEMMODE_16BIT:
17380 {
17381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17382 IEM_MC_BEGIN(4, 1);
17383 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17384 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17385 IEM_MC_ARG(uint16_t, u16Value, 2);
17386 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17387 IEM_MC_LOCAL(int32_t, rc);
17388
17389 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17390 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17391 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17392 IEM_MC_REF_EFLAGS(pEFlags);
17393 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17394 IEM_MC_IF_LOCAL_IS_Z(rc) {
17395 IEM_MC_ADVANCE_RIP();
17396 } IEM_MC_ELSE() {
17397 IEM_MC_RAISE_DIVIDE_ERROR();
17398 } IEM_MC_ENDIF();
17399
17400 IEM_MC_END();
17401 return VINF_SUCCESS;
17402 }
17403
17404 case IEMMODE_32BIT:
17405 {
17406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17407 IEM_MC_BEGIN(4, 1);
17408 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17409 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17410 IEM_MC_ARG(uint32_t, u32Value, 2);
17411 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17412 IEM_MC_LOCAL(int32_t, rc);
17413
17414 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17415 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17416 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17417 IEM_MC_REF_EFLAGS(pEFlags);
17418 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17419 IEM_MC_IF_LOCAL_IS_Z(rc) {
17420 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17421 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17422 IEM_MC_ADVANCE_RIP();
17423 } IEM_MC_ELSE() {
17424 IEM_MC_RAISE_DIVIDE_ERROR();
17425 } IEM_MC_ENDIF();
17426
17427 IEM_MC_END();
17428 return VINF_SUCCESS;
17429 }
17430
17431 case IEMMODE_64BIT:
17432 {
17433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17434 IEM_MC_BEGIN(4, 1);
17435 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17436 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17437 IEM_MC_ARG(uint64_t, u64Value, 2);
17438 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17439 IEM_MC_LOCAL(int32_t, rc);
17440
17441 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17442 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17443 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17444 IEM_MC_REF_EFLAGS(pEFlags);
17445 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17446 IEM_MC_IF_LOCAL_IS_Z(rc) {
17447 IEM_MC_ADVANCE_RIP();
17448 } IEM_MC_ELSE() {
17449 IEM_MC_RAISE_DIVIDE_ERROR();
17450 } IEM_MC_ENDIF();
17451
17452 IEM_MC_END();
17453 return VINF_SUCCESS;
17454 }
17455
17456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17457 }
17458 }
17459 else
17460 {
17461 /* memory access. */
17462 switch (pVCpu->iem.s.enmEffOpSize)
17463 {
17464 case IEMMODE_16BIT:
17465 {
17466 IEM_MC_BEGIN(4, 2);
17467 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17468 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17469 IEM_MC_ARG(uint16_t, u16Value, 2);
17470 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17472 IEM_MC_LOCAL(int32_t, rc);
17473
17474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17476 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17477 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17478 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17479 IEM_MC_REF_EFLAGS(pEFlags);
17480 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17481 IEM_MC_IF_LOCAL_IS_Z(rc) {
17482 IEM_MC_ADVANCE_RIP();
17483 } IEM_MC_ELSE() {
17484 IEM_MC_RAISE_DIVIDE_ERROR();
17485 } IEM_MC_ENDIF();
17486
17487 IEM_MC_END();
17488 return VINF_SUCCESS;
17489 }
17490
17491 case IEMMODE_32BIT:
17492 {
17493 IEM_MC_BEGIN(4, 2);
17494 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17495 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17496 IEM_MC_ARG(uint32_t, u32Value, 2);
17497 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17499 IEM_MC_LOCAL(int32_t, rc);
17500
17501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17503 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17504 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17505 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17506 IEM_MC_REF_EFLAGS(pEFlags);
17507 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17508 IEM_MC_IF_LOCAL_IS_Z(rc) {
17509 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17510 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17511 IEM_MC_ADVANCE_RIP();
17512 } IEM_MC_ELSE() {
17513 IEM_MC_RAISE_DIVIDE_ERROR();
17514 } IEM_MC_ENDIF();
17515
17516 IEM_MC_END();
17517 return VINF_SUCCESS;
17518 }
17519
17520 case IEMMODE_64BIT:
17521 {
17522 IEM_MC_BEGIN(4, 2);
17523 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17524 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17525 IEM_MC_ARG(uint64_t, u64Value, 2);
17526 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17528 IEM_MC_LOCAL(int32_t, rc);
17529
17530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17532 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17533 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17534 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17535 IEM_MC_REF_EFLAGS(pEFlags);
17536 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17537 IEM_MC_IF_LOCAL_IS_Z(rc) {
17538 IEM_MC_ADVANCE_RIP();
17539 } IEM_MC_ELSE() {
17540 IEM_MC_RAISE_DIVIDE_ERROR();
17541 } IEM_MC_ENDIF();
17542
17543 IEM_MC_END();
17544 return VINF_SUCCESS;
17545 }
17546
17547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17548 }
17549 }
17550}
17551
17552/** Opcode 0xf6. */
17553FNIEMOP_DEF(iemOp_Grp3_Eb)
17554{
17555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17556 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17557 {
17558 case 0:
17559 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17560 case 1:
17561/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17562 return IEMOP_RAISE_INVALID_OPCODE();
17563 case 2:
17564 IEMOP_MNEMONIC(not_Eb, "not Eb");
17565 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17566 case 3:
17567 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
17568 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17569 case 4:
17570 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
17571 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17572 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17573 case 5:
17574 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
17575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17576 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17577 case 6:
17578 IEMOP_MNEMONIC(div_Eb, "div Eb");
17579 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17580 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17581 case 7:
17582 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
17583 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17584 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17586 }
17587}
17588
17589
17590/** Opcode 0xf7. */
17591FNIEMOP_DEF(iemOp_Grp3_Ev)
17592{
17593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17594 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17595 {
17596 case 0:
17597 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17598 case 1:
17599/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17600 return IEMOP_RAISE_INVALID_OPCODE();
17601 case 2:
17602 IEMOP_MNEMONIC(not_Ev, "not Ev");
17603 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17604 case 3:
17605 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
17606 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17607 case 4:
17608 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
17609 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17610 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17611 case 5:
17612 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
17613 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17614 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17615 case 6:
17616 IEMOP_MNEMONIC(div_Ev, "div Ev");
17617 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17618 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17619 case 7:
17620 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
17621 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17622 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17624 }
17625}
17626
17627
17628/** Opcode 0xf8. */
17629FNIEMOP_DEF(iemOp_clc)
17630{
17631 IEMOP_MNEMONIC(clc, "clc");
17632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17633 IEM_MC_BEGIN(0, 0);
17634 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17635 IEM_MC_ADVANCE_RIP();
17636 IEM_MC_END();
17637 return VINF_SUCCESS;
17638}
17639
17640
17641/** Opcode 0xf9. */
17642FNIEMOP_DEF(iemOp_stc)
17643{
17644 IEMOP_MNEMONIC(stc, "stc");
17645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17646 IEM_MC_BEGIN(0, 0);
17647 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17648 IEM_MC_ADVANCE_RIP();
17649 IEM_MC_END();
17650 return VINF_SUCCESS;
17651}
17652
17653
17654/** Opcode 0xfa. */
17655FNIEMOP_DEF(iemOp_cli)
17656{
17657 IEMOP_MNEMONIC(cli, "cli");
17658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17659 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17660}
17661
17662
17663FNIEMOP_DEF(iemOp_sti)
17664{
17665 IEMOP_MNEMONIC(sti, "sti");
17666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17667 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17668}
17669
17670
17671/** Opcode 0xfc. */
17672FNIEMOP_DEF(iemOp_cld)
17673{
17674 IEMOP_MNEMONIC(cld, "cld");
17675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17676 IEM_MC_BEGIN(0, 0);
17677 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17678 IEM_MC_ADVANCE_RIP();
17679 IEM_MC_END();
17680 return VINF_SUCCESS;
17681}
17682
17683
17684/** Opcode 0xfd. */
17685FNIEMOP_DEF(iemOp_std)
17686{
17687 IEMOP_MNEMONIC(std, "std");
17688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17689 IEM_MC_BEGIN(0, 0);
17690 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17691 IEM_MC_ADVANCE_RIP();
17692 IEM_MC_END();
17693 return VINF_SUCCESS;
17694}
17695
17696
17697/** Opcode 0xfe. */
17698FNIEMOP_DEF(iemOp_Grp4)
17699{
17700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17701 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17702 {
17703 case 0:
17704 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
17705 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17706 case 1:
17707 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
17708 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17709 default:
17710 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
17711 return IEMOP_RAISE_INVALID_OPCODE();
17712 }
17713}
17714
17715
17716/**
17717 * Opcode 0xff /2.
17718 * @param bRm The RM byte.
17719 */
17720FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17721{
17722 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
17723 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17724
17725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17726 {
17727 /* The new RIP is taken from a register. */
17728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17729 switch (pVCpu->iem.s.enmEffOpSize)
17730 {
17731 case IEMMODE_16BIT:
17732 IEM_MC_BEGIN(1, 0);
17733 IEM_MC_ARG(uint16_t, u16Target, 0);
17734 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17735 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17736 IEM_MC_END()
17737 return VINF_SUCCESS;
17738
17739 case IEMMODE_32BIT:
17740 IEM_MC_BEGIN(1, 0);
17741 IEM_MC_ARG(uint32_t, u32Target, 0);
17742 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17743 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17744 IEM_MC_END()
17745 return VINF_SUCCESS;
17746
17747 case IEMMODE_64BIT:
17748 IEM_MC_BEGIN(1, 0);
17749 IEM_MC_ARG(uint64_t, u64Target, 0);
17750 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17751 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17752 IEM_MC_END()
17753 return VINF_SUCCESS;
17754
17755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17756 }
17757 }
17758 else
17759 {
17760 /* The new RIP is taken from a register. */
17761 switch (pVCpu->iem.s.enmEffOpSize)
17762 {
17763 case IEMMODE_16BIT:
17764 IEM_MC_BEGIN(1, 1);
17765 IEM_MC_ARG(uint16_t, u16Target, 0);
17766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17769 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17770 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17771 IEM_MC_END()
17772 return VINF_SUCCESS;
17773
17774 case IEMMODE_32BIT:
17775 IEM_MC_BEGIN(1, 1);
17776 IEM_MC_ARG(uint32_t, u32Target, 0);
17777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17780 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17781 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17782 IEM_MC_END()
17783 return VINF_SUCCESS;
17784
17785 case IEMMODE_64BIT:
17786 IEM_MC_BEGIN(1, 1);
17787 IEM_MC_ARG(uint64_t, u64Target, 0);
17788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17791 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17792 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17793 IEM_MC_END()
17794 return VINF_SUCCESS;
17795
17796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17797 }
17798 }
17799}
17800
17801typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17802
17803FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17804{
17805 /* Registers? How?? */
17806 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
17807 { /* likely */ }
17808 else
17809 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17810
17811 /* Far pointer loaded from memory. */
17812 switch (pVCpu->iem.s.enmEffOpSize)
17813 {
17814 case IEMMODE_16BIT:
17815 IEM_MC_BEGIN(3, 1);
17816 IEM_MC_ARG(uint16_t, u16Sel, 0);
17817 IEM_MC_ARG(uint16_t, offSeg, 1);
17818 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17822 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17823 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
17824 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17825 IEM_MC_END();
17826 return VINF_SUCCESS;
17827
17828 case IEMMODE_64BIT:
17829 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17830 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17831 * and call far qword [rsp] encodings. */
17832 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
17833 {
17834 IEM_MC_BEGIN(3, 1);
17835 IEM_MC_ARG(uint16_t, u16Sel, 0);
17836 IEM_MC_ARG(uint64_t, offSeg, 1);
17837 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17841 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17842 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
17843 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17844 IEM_MC_END();
17845 return VINF_SUCCESS;
17846 }
17847 /* AMD falls thru. */
17848
17849 case IEMMODE_32BIT:
17850 IEM_MC_BEGIN(3, 1);
17851 IEM_MC_ARG(uint16_t, u16Sel, 0);
17852 IEM_MC_ARG(uint32_t, offSeg, 1);
17853 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17857 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17858 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
17859 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17860 IEM_MC_END();
17861 return VINF_SUCCESS;
17862
17863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17864 }
17865}
17866
17867
17868/**
17869 * Opcode 0xff /3.
17870 * @param bRm The RM byte.
17871 */
17872FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17873{
17874 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
17875 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17876}
17877
17878
17879/**
17880 * Opcode 0xff /4.
17881 * @param bRm The RM byte.
17882 */
17883FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17884{
17885 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
17886 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17887
17888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17889 {
17890 /* The new RIP is taken from a register. */
17891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17892 switch (pVCpu->iem.s.enmEffOpSize)
17893 {
17894 case IEMMODE_16BIT:
17895 IEM_MC_BEGIN(0, 1);
17896 IEM_MC_LOCAL(uint16_t, u16Target);
17897 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17898 IEM_MC_SET_RIP_U16(u16Target);
17899 IEM_MC_END()
17900 return VINF_SUCCESS;
17901
17902 case IEMMODE_32BIT:
17903 IEM_MC_BEGIN(0, 1);
17904 IEM_MC_LOCAL(uint32_t, u32Target);
17905 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17906 IEM_MC_SET_RIP_U32(u32Target);
17907 IEM_MC_END()
17908 return VINF_SUCCESS;
17909
17910 case IEMMODE_64BIT:
17911 IEM_MC_BEGIN(0, 1);
17912 IEM_MC_LOCAL(uint64_t, u64Target);
17913 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17914 IEM_MC_SET_RIP_U64(u64Target);
17915 IEM_MC_END()
17916 return VINF_SUCCESS;
17917
17918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17919 }
17920 }
17921 else
17922 {
17923 /* The new RIP is taken from a memory location. */
17924 switch (pVCpu->iem.s.enmEffOpSize)
17925 {
17926 case IEMMODE_16BIT:
17927 IEM_MC_BEGIN(0, 2);
17928 IEM_MC_LOCAL(uint16_t, u16Target);
17929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17932 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17933 IEM_MC_SET_RIP_U16(u16Target);
17934 IEM_MC_END()
17935 return VINF_SUCCESS;
17936
17937 case IEMMODE_32BIT:
17938 IEM_MC_BEGIN(0, 2);
17939 IEM_MC_LOCAL(uint32_t, u32Target);
17940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17943 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17944 IEM_MC_SET_RIP_U32(u32Target);
17945 IEM_MC_END()
17946 return VINF_SUCCESS;
17947
17948 case IEMMODE_64BIT:
17949 IEM_MC_BEGIN(0, 2);
17950 IEM_MC_LOCAL(uint64_t, u64Target);
17951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17954 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17955 IEM_MC_SET_RIP_U64(u64Target);
17956 IEM_MC_END()
17957 return VINF_SUCCESS;
17958
17959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17960 }
17961 }
17962}
17963
17964
17965/**
17966 * Opcode 0xff /5.
17967 * @param bRm The RM byte.
17968 */
17969FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17970{
17971 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
17972 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17973}
17974
17975
17976/**
17977 * Opcode 0xff /6.
17978 * @param bRm The RM byte.
17979 */
17980FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17981{
17982 IEMOP_MNEMONIC(push_Ev, "push Ev");
17983
17984 /* Registers are handled by a common worker. */
17985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17986 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17987
17988 /* Memory we do here. */
17989 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17990 switch (pVCpu->iem.s.enmEffOpSize)
17991 {
17992 case IEMMODE_16BIT:
17993 IEM_MC_BEGIN(0, 2);
17994 IEM_MC_LOCAL(uint16_t, u16Src);
17995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17998 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17999 IEM_MC_PUSH_U16(u16Src);
18000 IEM_MC_ADVANCE_RIP();
18001 IEM_MC_END();
18002 return VINF_SUCCESS;
18003
18004 case IEMMODE_32BIT:
18005 IEM_MC_BEGIN(0, 2);
18006 IEM_MC_LOCAL(uint32_t, u32Src);
18007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18010 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18011 IEM_MC_PUSH_U32(u32Src);
18012 IEM_MC_ADVANCE_RIP();
18013 IEM_MC_END();
18014 return VINF_SUCCESS;
18015
18016 case IEMMODE_64BIT:
18017 IEM_MC_BEGIN(0, 2);
18018 IEM_MC_LOCAL(uint64_t, u64Src);
18019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18022 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18023 IEM_MC_PUSH_U64(u64Src);
18024 IEM_MC_ADVANCE_RIP();
18025 IEM_MC_END();
18026 return VINF_SUCCESS;
18027
18028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18029 }
18030}
18031
18032
18033/** Opcode 0xff. */
18034FNIEMOP_DEF(iemOp_Grp5)
18035{
18036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18037 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18038 {
18039 case 0:
18040 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18041 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18042 case 1:
18043 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18044 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18045 case 2:
18046 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18047 case 3:
18048 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18049 case 4:
18050 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18051 case 5:
18052 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18053 case 6:
18054 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18055 case 7:
18056 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18057 return IEMOP_RAISE_INVALID_OPCODE();
18058 }
18059 AssertFailedReturn(VERR_IEM_IPE_3);
18060}
18061
18062
18063
18064const PFNIEMOP g_apfnOneByteMap[256] =
18065{
18066 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18067 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18068 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18069 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18070 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18071 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18072 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18073 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18074 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18075 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18076 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18077 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18078 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18079 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18080 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18081 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18082 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18083 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18084 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18085 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18086 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18087 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18088 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18089 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18090 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18091 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18092 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18093 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18094 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18095 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18096 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18097 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18098 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18099 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18100 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18101 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18102 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18103 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18104 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18105 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18106 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18107 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18108 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18109 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18110 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18111 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18112 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18113 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18114 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18115 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18116 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18117 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18118 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18119 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18120 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18121 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18122 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18123 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18124 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18125 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18126 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18127 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18128 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18129 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18130};
18131
18132
18133/** @} */
18134
18135#ifdef _MSC_VER
18136# pragma warning(pop)
18137#endif
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette