VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65276

Last change on this file since 65276 was 65195, checked in by vboxsync, 8 years ago

IEM: Fixed BT implementation (memory access is R, not R/W).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 629.6 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65195 2017-01-08 23:54:01Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551
552/** @name ..... opcodes.
553 *
554 * @{
555 */
556
557/** @} */
558
559
560/** @name Two byte opcodes (first byte 0x0f).
561 *
562 * @{
563 */
564
565/** Opcode 0x0f 0x00 /0. */
566FNIEMOPRM_DEF(iemOp_Grp6_sldt)
567{
568 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
569 IEMOP_HLP_MIN_286();
570 IEMOP_HLP_NO_REAL_OR_V86_MODE();
571
572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
573 {
574 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
575 switch (pVCpu->iem.s.enmEffOpSize)
576 {
577 case IEMMODE_16BIT:
578 IEM_MC_BEGIN(0, 1);
579 IEM_MC_LOCAL(uint16_t, u16Ldtr);
580 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
581 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 break;
585
586 case IEMMODE_32BIT:
587 IEM_MC_BEGIN(0, 1);
588 IEM_MC_LOCAL(uint32_t, u32Ldtr);
589 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
590 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
591 IEM_MC_ADVANCE_RIP();
592 IEM_MC_END();
593 break;
594
595 case IEMMODE_64BIT:
596 IEM_MC_BEGIN(0, 1);
597 IEM_MC_LOCAL(uint64_t, u64Ldtr);
598 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
599 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
600 IEM_MC_ADVANCE_RIP();
601 IEM_MC_END();
602 break;
603
604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
605 }
606 }
607 else
608 {
609 IEM_MC_BEGIN(0, 2);
610 IEM_MC_LOCAL(uint16_t, u16Ldtr);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
613 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
614 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
615 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
616 IEM_MC_ADVANCE_RIP();
617 IEM_MC_END();
618 }
619 return VINF_SUCCESS;
620}
621
622
623/** Opcode 0x0f 0x00 /1. */
624FNIEMOPRM_DEF(iemOp_Grp6_str)
625{
626 IEMOP_MNEMONIC(str, "str Rv/Mw");
627 IEMOP_HLP_MIN_286();
628 IEMOP_HLP_NO_REAL_OR_V86_MODE();
629
630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
631 {
632 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
633 switch (pVCpu->iem.s.enmEffOpSize)
634 {
635 case IEMMODE_16BIT:
636 IEM_MC_BEGIN(0, 1);
637 IEM_MC_LOCAL(uint16_t, u16Tr);
638 IEM_MC_FETCH_TR_U16(u16Tr);
639 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
640 IEM_MC_ADVANCE_RIP();
641 IEM_MC_END();
642 break;
643
644 case IEMMODE_32BIT:
645 IEM_MC_BEGIN(0, 1);
646 IEM_MC_LOCAL(uint32_t, u32Tr);
647 IEM_MC_FETCH_TR_U32(u32Tr);
648 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
649 IEM_MC_ADVANCE_RIP();
650 IEM_MC_END();
651 break;
652
653 case IEMMODE_64BIT:
654 IEM_MC_BEGIN(0, 1);
655 IEM_MC_LOCAL(uint64_t, u64Tr);
656 IEM_MC_FETCH_TR_U64(u64Tr);
657 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
658 IEM_MC_ADVANCE_RIP();
659 IEM_MC_END();
660 break;
661
662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
663 }
664 }
665 else
666 {
667 IEM_MC_BEGIN(0, 2);
668 IEM_MC_LOCAL(uint16_t, u16Tr);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
671 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
672 IEM_MC_FETCH_TR_U16(u16Tr);
673 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 }
677 return VINF_SUCCESS;
678}
679
680
681/** Opcode 0x0f 0x00 /2. */
682FNIEMOPRM_DEF(iemOp_Grp6_lldt)
683{
684 IEMOP_MNEMONIC(lldt, "lldt Ew");
685 IEMOP_HLP_MIN_286();
686 IEMOP_HLP_NO_REAL_OR_V86_MODE();
687
688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
689 {
690 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
691 IEM_MC_BEGIN(1, 0);
692 IEM_MC_ARG(uint16_t, u16Sel, 0);
693 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
694 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
695 IEM_MC_END();
696 }
697 else
698 {
699 IEM_MC_BEGIN(1, 1);
700 IEM_MC_ARG(uint16_t, u16Sel, 0);
701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
703 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
704 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
705 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
706 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
707 IEM_MC_END();
708 }
709 return VINF_SUCCESS;
710}
711
712
713/** Opcode 0x0f 0x00 /3. */
714FNIEMOPRM_DEF(iemOp_Grp6_ltr)
715{
716 IEMOP_MNEMONIC(ltr, "ltr Ew");
717 IEMOP_HLP_MIN_286();
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
723 IEM_MC_BEGIN(1, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
726 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
727 IEM_MC_END();
728 }
729 else
730 {
731 IEM_MC_BEGIN(1, 1);
732 IEM_MC_ARG(uint16_t, u16Sel, 0);
733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
737 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
738 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
739 IEM_MC_END();
740 }
741 return VINF_SUCCESS;
742}
743
744
745/** Opcode 0x0f 0x00 /3. */
746FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
747{
748 IEMOP_HLP_MIN_286();
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750
751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
752 {
753 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
754 IEM_MC_BEGIN(2, 0);
755 IEM_MC_ARG(uint16_t, u16Sel, 0);
756 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
757 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
758 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
759 IEM_MC_END();
760 }
761 else
762 {
763 IEM_MC_BEGIN(2, 1);
764 IEM_MC_ARG(uint16_t, u16Sel, 0);
765 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
768 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
769 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
770 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
771 IEM_MC_END();
772 }
773 return VINF_SUCCESS;
774}
775
776
777/** Opcode 0x0f 0x00 /4. */
778FNIEMOPRM_DEF(iemOp_Grp6_verr)
779{
780 IEMOP_MNEMONIC(verr, "verr Ew");
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
783}
784
785
786/** Opcode 0x0f 0x00 /5. */
787FNIEMOPRM_DEF(iemOp_Grp6_verw)
788{
789 IEMOP_MNEMONIC(verw, "verw Ew");
790 IEMOP_HLP_MIN_286();
791 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
792}
793
794
795/**
796 * Group 6 jump table.
797 */
798IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
799{
800 iemOp_Grp6_sldt,
801 iemOp_Grp6_str,
802 iemOp_Grp6_lldt,
803 iemOp_Grp6_ltr,
804 iemOp_Grp6_verr,
805 iemOp_Grp6_verw,
806 iemOp_InvalidWithRM,
807 iemOp_InvalidWithRM
808};
809
810/** Opcode 0x0f 0x00. */
811FNIEMOP_DEF(iemOp_Grp6)
812{
813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
814 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
815}
816
817
818/** Opcode 0x0f 0x01 /0. */
819FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
820{
821 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
822 IEMOP_HLP_MIN_286();
823 IEMOP_HLP_64BIT_OP_SIZE();
824 IEM_MC_BEGIN(2, 1);
825 IEM_MC_ARG(uint8_t, iEffSeg, 0);
826 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
829 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
830 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
831 IEM_MC_END();
832 return VINF_SUCCESS;
833}
834
835
836/** Opcode 0x0f 0x01 /0. */
837FNIEMOP_DEF(iemOp_Grp7_vmcall)
838{
839 IEMOP_BITCH_ABOUT_STUB();
840 return IEMOP_RAISE_INVALID_OPCODE();
841}
842
843
844/** Opcode 0x0f 0x01 /0. */
845FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
846{
847 IEMOP_BITCH_ABOUT_STUB();
848 return IEMOP_RAISE_INVALID_OPCODE();
849}
850
851
852/** Opcode 0x0f 0x01 /0. */
853FNIEMOP_DEF(iemOp_Grp7_vmresume)
854{
855 IEMOP_BITCH_ABOUT_STUB();
856 return IEMOP_RAISE_INVALID_OPCODE();
857}
858
859
860/** Opcode 0x0f 0x01 /0. */
861FNIEMOP_DEF(iemOp_Grp7_vmxoff)
862{
863 IEMOP_BITCH_ABOUT_STUB();
864 return IEMOP_RAISE_INVALID_OPCODE();
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC(sidt, "sidt Ms");
872 IEMOP_HLP_MIN_286();
873 IEMOP_HLP_64BIT_OP_SIZE();
874 IEM_MC_BEGIN(2, 1);
875 IEM_MC_ARG(uint8_t, iEffSeg, 0);
876 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
880 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 /1. */
887FNIEMOP_DEF(iemOp_Grp7_monitor)
888{
889 IEMOP_MNEMONIC(monitor, "monitor");
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
891 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
892}
893
894
895/** Opcode 0x0f 0x01 /1. */
896FNIEMOP_DEF(iemOp_Grp7_mwait)
897{
898 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
901}
902
903
904/** Opcode 0x0f 0x01 /2. */
905FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
906{
907 IEMOP_MNEMONIC(lgdt, "lgdt");
908 IEMOP_HLP_64BIT_OP_SIZE();
909 IEM_MC_BEGIN(3, 1);
910 IEM_MC_ARG(uint8_t, iEffSeg, 0);
911 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
912 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
915 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
916 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
917 IEM_MC_END();
918 return VINF_SUCCESS;
919}
920
921
922/** Opcode 0x0f 0x01 0xd0. */
923FNIEMOP_DEF(iemOp_Grp7_xgetbv)
924{
925 IEMOP_MNEMONIC(xgetbv, "xgetbv");
926 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
927 {
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
929 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
930 }
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 0xd1. */
936FNIEMOP_DEF(iemOp_Grp7_xsetbv)
937{
938 IEMOP_MNEMONIC(xsetbv, "xsetbv");
939 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
940 {
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
942 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
943 }
944 return IEMOP_RAISE_INVALID_OPCODE();
945}
946
947
948/** Opcode 0x0f 0x01 /3. */
949FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
950{
951 IEMOP_MNEMONIC(lidt, "lidt");
952 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
953 ? IEMMODE_64BIT
954 : pVCpu->iem.s.enmEffOpSize;
955 IEM_MC_BEGIN(3, 1);
956 IEM_MC_ARG(uint8_t, iEffSeg, 0);
957 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
958 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
961 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
962 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x01 0xd8. */
969FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
970
971/** Opcode 0x0f 0x01 0xd9. */
972FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
973
974/** Opcode 0x0f 0x01 0xda. */
975FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
976
977/** Opcode 0x0f 0x01 0xdb. */
978FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
979
980/** Opcode 0x0f 0x01 0xdc. */
981FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
982
983/** Opcode 0x0f 0x01 0xdd. */
984FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
985
986/** Opcode 0x0f 0x01 0xde. */
987FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
988
989/** Opcode 0x0f 0x01 0xdf. */
990FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
991
992/** Opcode 0x0f 0x01 /4. */
993FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
994{
995 IEMOP_MNEMONIC(smsw, "smsw");
996 IEMOP_HLP_MIN_286();
997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
998 {
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 switch (pVCpu->iem.s.enmEffOpSize)
1001 {
1002 case IEMMODE_16BIT:
1003 IEM_MC_BEGIN(0, 1);
1004 IEM_MC_LOCAL(uint16_t, u16Tmp);
1005 IEM_MC_FETCH_CR0_U16(u16Tmp);
1006 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1007 { /* likely */ }
1008 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1009 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1010 else
1011 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1012 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1013 IEM_MC_ADVANCE_RIP();
1014 IEM_MC_END();
1015 return VINF_SUCCESS;
1016
1017 case IEMMODE_32BIT:
1018 IEM_MC_BEGIN(0, 1);
1019 IEM_MC_LOCAL(uint32_t, u32Tmp);
1020 IEM_MC_FETCH_CR0_U32(u32Tmp);
1021 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 return VINF_SUCCESS;
1025
1026 case IEMMODE_64BIT:
1027 IEM_MC_BEGIN(0, 1);
1028 IEM_MC_LOCAL(uint64_t, u64Tmp);
1029 IEM_MC_FETCH_CR0_U64(u64Tmp);
1030 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1031 IEM_MC_ADVANCE_RIP();
1032 IEM_MC_END();
1033 return VINF_SUCCESS;
1034
1035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1036 }
1037 }
1038 else
1039 {
1040 /* Ignore operand size here, memory refs are always 16-bit. */
1041 IEM_MC_BEGIN(0, 2);
1042 IEM_MC_LOCAL(uint16_t, u16Tmp);
1043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1046 IEM_MC_FETCH_CR0_U16(u16Tmp);
1047 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1048 { /* likely */ }
1049 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1050 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1051 else
1052 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1053 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1054 IEM_MC_ADVANCE_RIP();
1055 IEM_MC_END();
1056 return VINF_SUCCESS;
1057 }
1058}
1059
1060
1061/** Opcode 0x0f 0x01 /6. */
1062FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1063{
1064 /* The operand size is effectively ignored, all is 16-bit and only the
1065 lower 3-bits are used. */
1066 IEMOP_MNEMONIC(lmsw, "lmsw");
1067 IEMOP_HLP_MIN_286();
1068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1069 {
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_BEGIN(1, 0);
1072 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1073 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1074 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1075 IEM_MC_END();
1076 }
1077 else
1078 {
1079 IEM_MC_BEGIN(1, 1);
1080 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1084 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1085 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1086 IEM_MC_END();
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/** Opcode 0x0f 0x01 /7. */
1093FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1094{
1095 IEMOP_MNEMONIC(invlpg, "invlpg");
1096 IEMOP_HLP_MIN_486();
1097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1098 IEM_MC_BEGIN(1, 1);
1099 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1101 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1102 IEM_MC_END();
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/** Opcode 0x0f 0x01 /7. */
1108FNIEMOP_DEF(iemOp_Grp7_swapgs)
1109{
1110 IEMOP_MNEMONIC(swapgs, "swapgs");
1111 IEMOP_HLP_ONLY_64BIT();
1112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1113 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1114}
1115
1116
1117/** Opcode 0x0f 0x01 /7. */
1118FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1119{
1120 NOREF(pVCpu);
1121 IEMOP_BITCH_ABOUT_STUB();
1122 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1123}
1124
1125
1126/** Opcode 0x0f 0x01. */
1127FNIEMOP_DEF(iemOp_Grp7)
1128{
1129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1130 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1131 {
1132 case 0:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1138 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1139 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1140 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1141 }
1142 return IEMOP_RAISE_INVALID_OPCODE();
1143
1144 case 1:
1145 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1146 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1147 switch (bRm & X86_MODRM_RM_MASK)
1148 {
1149 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1150 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1151 }
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 2:
1155 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1156 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1157 switch (bRm & X86_MODRM_RM_MASK)
1158 {
1159 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1160 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1161 }
1162 return IEMOP_RAISE_INVALID_OPCODE();
1163
1164 case 3:
1165 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1166 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1167 switch (bRm & X86_MODRM_RM_MASK)
1168 {
1169 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1170 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1171 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1172 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1173 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1174 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1175 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1176 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1178 }
1179
1180 case 4:
1181 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1182
1183 case 5:
1184 return IEMOP_RAISE_INVALID_OPCODE();
1185
1186 case 6:
1187 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1188
1189 case 7:
1190 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1191 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1192 switch (bRm & X86_MODRM_RM_MASK)
1193 {
1194 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1195 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1196 }
1197 return IEMOP_RAISE_INVALID_OPCODE();
1198
1199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1200 }
1201}
1202
1203/** Opcode 0x0f 0x00 /3. */
1204FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1205{
1206 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1208
1209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1210 {
1211 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1212 switch (pVCpu->iem.s.enmEffOpSize)
1213 {
1214 case IEMMODE_16BIT:
1215 {
1216 IEM_MC_BEGIN(3, 0);
1217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1218 IEM_MC_ARG(uint16_t, u16Sel, 1);
1219 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1220
1221 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1222 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1223 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1224
1225 IEM_MC_END();
1226 return VINF_SUCCESS;
1227 }
1228
1229 case IEMMODE_32BIT:
1230 case IEMMODE_64BIT:
1231 {
1232 IEM_MC_BEGIN(3, 0);
1233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1234 IEM_MC_ARG(uint16_t, u16Sel, 1);
1235 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1236
1237 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1238 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1239 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1246 }
1247 }
1248 else
1249 {
1250 switch (pVCpu->iem.s.enmEffOpSize)
1251 {
1252 case IEMMODE_16BIT:
1253 {
1254 IEM_MC_BEGIN(3, 1);
1255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1256 IEM_MC_ARG(uint16_t, u16Sel, 1);
1257 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259
1260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1261 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1262
1263 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1264 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1265 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1266
1267 IEM_MC_END();
1268 return VINF_SUCCESS;
1269 }
1270
1271 case IEMMODE_32BIT:
1272 case IEMMODE_64BIT:
1273 {
1274 IEM_MC_BEGIN(3, 1);
1275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1276 IEM_MC_ARG(uint16_t, u16Sel, 1);
1277 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279
1280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1281 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1282/** @todo testcase: make sure it's a 16-bit read. */
1283
1284 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1286 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1287
1288 IEM_MC_END();
1289 return VINF_SUCCESS;
1290 }
1291
1292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1293 }
1294 }
1295}
1296
1297
1298
1299/** Opcode 0x0f 0x02. */
1300FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1301{
1302 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1303 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1304}
1305
1306
1307/** Opcode 0x0f 0x03. */
1308FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1309{
1310 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1311 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1312}
1313
1314
1315/** Opcode 0x0f 0x05. */
1316FNIEMOP_DEF(iemOp_syscall)
1317{
1318 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1321}
1322
1323
1324/** Opcode 0x0f 0x06. */
1325FNIEMOP_DEF(iemOp_clts)
1326{
1327 IEMOP_MNEMONIC(clts, "clts");
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1330}
1331
1332
1333/** Opcode 0x0f 0x07. */
1334FNIEMOP_DEF(iemOp_sysret)
1335{
1336 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1339}
1340
1341
1342/** Opcode 0x0f 0x08. */
1343FNIEMOP_STUB(iemOp_invd);
1344// IEMOP_HLP_MIN_486();
1345
1346
1347/** Opcode 0x0f 0x09. */
1348FNIEMOP_DEF(iemOp_wbinvd)
1349{
1350 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1351 IEMOP_HLP_MIN_486();
1352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1353 IEM_MC_BEGIN(0, 0);
1354 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1355 IEM_MC_ADVANCE_RIP();
1356 IEM_MC_END();
1357 return VINF_SUCCESS; /* ignore for now */
1358}
1359
1360
1361/** Opcode 0x0f 0x0b. */
1362FNIEMOP_DEF(iemOp_ud2)
1363{
1364 IEMOP_MNEMONIC(ud2, "ud2");
1365 return IEMOP_RAISE_INVALID_OPCODE();
1366}
1367
1368/** Opcode 0x0f 0x0d. */
1369FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1370{
1371 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1372 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1373 {
1374 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1375 return IEMOP_RAISE_INVALID_OPCODE();
1376 }
1377
1378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1380 {
1381 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383 }
1384
1385 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1386 {
1387 case 2: /* Aliased to /0 for the time being. */
1388 case 4: /* Aliased to /0 for the time being. */
1389 case 5: /* Aliased to /0 for the time being. */
1390 case 6: /* Aliased to /0 for the time being. */
1391 case 7: /* Aliased to /0 for the time being. */
1392 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1393 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1394 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1396 }
1397
1398 IEM_MC_BEGIN(0, 1);
1399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1402 /* Currently a NOP. */
1403 NOREF(GCPtrEffSrc);
1404 IEM_MC_ADVANCE_RIP();
1405 IEM_MC_END();
1406 return VINF_SUCCESS;
1407}
1408
1409
1410/** Opcode 0x0f 0x0e. */
1411FNIEMOP_STUB(iemOp_femms);
1412
1413
1414/** Opcode 0x0f 0x0f 0x0c. */
1415FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1416
1417/** Opcode 0x0f 0x0f 0x0d. */
1418FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1419
1420/** Opcode 0x0f 0x0f 0x1c. */
1421FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1422
1423/** Opcode 0x0f 0x0f 0x1d. */
1424FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1425
1426/** Opcode 0x0f 0x0f 0x8a. */
1427FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1428
1429/** Opcode 0x0f 0x0f 0x8e. */
1430FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1431
1432/** Opcode 0x0f 0x0f 0x90. */
1433FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1434
1435/** Opcode 0x0f 0x0f 0x94. */
1436FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1437
1438/** Opcode 0x0f 0x0f 0x96. */
1439FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1440
1441/** Opcode 0x0f 0x0f 0x97. */
1442FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1443
1444/** Opcode 0x0f 0x0f 0x9a. */
1445FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1446
1447/** Opcode 0x0f 0x0f 0x9e. */
1448FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1449
1450/** Opcode 0x0f 0x0f 0xa0. */
1451FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1452
1453/** Opcode 0x0f 0x0f 0xa4. */
1454FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1455
1456/** Opcode 0x0f 0x0f 0xa6. */
1457FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1458
1459/** Opcode 0x0f 0x0f 0xa7. */
1460FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1461
1462/** Opcode 0x0f 0x0f 0xaa. */
1463FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1464
1465/** Opcode 0x0f 0x0f 0xae. */
1466FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1467
1468/** Opcode 0x0f 0x0f 0xb0. */
1469FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1470
1471/** Opcode 0x0f 0x0f 0xb4. */
1472FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1473
1474/** Opcode 0x0f 0x0f 0xb6. */
1475FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1476
1477/** Opcode 0x0f 0x0f 0xb7. */
1478FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1479
1480/** Opcode 0x0f 0x0f 0xbb. */
1481FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1482
1483/** Opcode 0x0f 0x0f 0xbf. */
1484FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1485
1486
1487/** Opcode 0x0f 0x0f. */
1488FNIEMOP_DEF(iemOp_3Dnow)
1489{
1490 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1491 {
1492 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1493 return IEMOP_RAISE_INVALID_OPCODE();
1494 }
1495
1496 /* This is pretty sparse, use switch instead of table. */
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 switch (b)
1499 {
1500 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1501 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1502 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1503 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1504 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1505 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1506 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1507 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1508 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1509 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1510 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1511 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1512 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1513 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1514 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1515 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1516 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1517 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1518 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1519 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1520 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1521 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1522 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1523 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1524 default:
1525 return IEMOP_RAISE_INVALID_OPCODE();
1526 }
1527}
1528
1529
1530/** Opcode 0x0f 0x10. */
1531FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1532
1533
1534/** Opcode 0x0f 0x11. */
1535FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1536{
1537 /* Quick hack. Need to restructure all of this later some time. */
1538 uint32_t const fRelevantPrefix = pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ);
1539 if (fRelevantPrefix == 0)
1540 {
1541 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1544 {
1545 /*
1546 * Register, register.
1547 */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_BEGIN(0, 0);
1550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1552 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1553 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 }
1557 else
1558 {
1559 /*
1560 * Memory, register.
1561 */
1562 IEM_MC_BEGIN(0, 2);
1563 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1565
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1568 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1570
1571 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1572 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1573
1574 IEM_MC_ADVANCE_RIP();
1575 IEM_MC_END();
1576 }
1577 }
1578 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ)
1579 {
1580 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1583 {
1584 /*
1585 * Register, register.
1586 */
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1588 IEM_MC_BEGIN(0, 1);
1589 IEM_MC_LOCAL(uint64_t, uSrc);
1590
1591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1593 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1594 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1595
1596 IEM_MC_ADVANCE_RIP();
1597 IEM_MC_END();
1598 }
1599 else
1600 {
1601 /*
1602 * Memory, register.
1603 */
1604 IEM_MC_BEGIN(0, 2);
1605 IEM_MC_LOCAL(uint64_t, uSrc);
1606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1607
1608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1610 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1611 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1612
1613 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 }
1620 else
1621 {
1622 IEMOP_BITCH_ABOUT_STUB();
1623 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1624 }
1625 return VINF_SUCCESS;
1626}
1627
1628
1629/** Opcode 0x0f 0x12. */
1630FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1631
1632
1633/** Opcode 0x0f 0x13. */
1634FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1635{
1636 /* Quick hack. Need to restructure all of this later some time. */
1637 if (pVCpu->iem.s.fPrefixes == IEM_OP_PRF_SIZE_OP)
1638 {
1639 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1642 {
1643#if 0
1644 /*
1645 * Register, register.
1646 */
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1648 IEM_MC_BEGIN(0, 1);
1649 IEM_MC_LOCAL(uint64_t, uSrc);
1650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1652 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1653 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1654 IEM_MC_ADVANCE_RIP();
1655 IEM_MC_END();
1656#else
1657 return IEMOP_RAISE_INVALID_OPCODE();
1658#endif
1659 }
1660 else
1661 {
1662 /*
1663 * Memory, register.
1664 */
1665 IEM_MC_BEGIN(0, 2);
1666 IEM_MC_LOCAL(uint64_t, uSrc);
1667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1668
1669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1670 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1671 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1673
1674 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1675 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1676
1677 IEM_MC_ADVANCE_RIP();
1678 IEM_MC_END();
1679 }
1680 return VINF_SUCCESS;
1681 }
1682
1683 IEMOP_BITCH_ABOUT_STUB();
1684 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1685}
1686
1687
1688/** Opcode 0x0f 0x14. */
1689FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1690/** Opcode 0x0f 0x15. */
1691FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1692/** Opcode 0x0f 0x16. */
1693FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1694/** Opcode 0x0f 0x17. */
1695FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1696
1697
1698/** Opcode 0x0f 0x18. */
1699FNIEMOP_DEF(iemOp_prefetch_Grp16)
1700{
1701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1702 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1703 {
1704 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1705 {
1706 case 4: /* Aliased to /0 for the time being according to AMD. */
1707 case 5: /* Aliased to /0 for the time being according to AMD. */
1708 case 6: /* Aliased to /0 for the time being according to AMD. */
1709 case 7: /* Aliased to /0 for the time being according to AMD. */
1710 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1711 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1712 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1713 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1715 }
1716
1717 IEM_MC_BEGIN(0, 1);
1718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1721 /* Currently a NOP. */
1722 NOREF(GCPtrEffSrc);
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 return VINF_SUCCESS;
1726 }
1727
1728 return IEMOP_RAISE_INVALID_OPCODE();
1729}
1730
1731
1732/** Opcode 0x0f 0x19..0x1f. */
1733FNIEMOP_DEF(iemOp_nop_Ev)
1734{
1735 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1738 {
1739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740 IEM_MC_BEGIN(0, 0);
1741 IEM_MC_ADVANCE_RIP();
1742 IEM_MC_END();
1743 }
1744 else
1745 {
1746 IEM_MC_BEGIN(0, 1);
1747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 /* Currently a NOP. */
1751 NOREF(GCPtrEffSrc);
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756}
1757
1758
1759/** Opcode 0x0f 0x20. */
1760FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1761{
1762 /* mod is ignored, as is operand size overrides. */
1763 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1764 IEMOP_HLP_MIN_386();
1765 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1766 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1767 else
1768 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1769
1770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1771 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1773 {
1774 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1776 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1777 iCrReg |= 8;
1778 }
1779 switch (iCrReg)
1780 {
1781 case 0: case 2: case 3: case 4: case 8:
1782 break;
1783 default:
1784 return IEMOP_RAISE_INVALID_OPCODE();
1785 }
1786 IEMOP_HLP_DONE_DECODING();
1787
1788 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1789}
1790
1791
1792/** Opcode 0x0f 0x21. */
1793FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1794{
1795 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1796 IEMOP_HLP_MIN_386();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1800 return IEMOP_RAISE_INVALID_OPCODE();
1801 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1802 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1803 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1804}
1805
1806
1807/** Opcode 0x0f 0x22. */
1808FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1809{
1810 /* mod is ignored, as is operand size overrides. */
1811 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1812 IEMOP_HLP_MIN_386();
1813 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1814 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1815 else
1816 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1817
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1821 {
1822 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1823 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1824 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1825 iCrReg |= 8;
1826 }
1827 switch (iCrReg)
1828 {
1829 case 0: case 2: case 3: case 4: case 8:
1830 break;
1831 default:
1832 return IEMOP_RAISE_INVALID_OPCODE();
1833 }
1834 IEMOP_HLP_DONE_DECODING();
1835
1836 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1837}
1838
1839
1840/** Opcode 0x0f 0x23. */
1841FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1842{
1843 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1844 IEMOP_HLP_MIN_386();
1845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1848 return IEMOP_RAISE_INVALID_OPCODE();
1849 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1850 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1851 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1852}
1853
1854
1855/** Opcode 0x0f 0x24. */
1856FNIEMOP_DEF(iemOp_mov_Rd_Td)
1857{
1858 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1859 /** @todo works on 386 and 486. */
1860 /* The RM byte is not considered, see testcase. */
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862}
1863
1864
1865/** Opcode 0x0f 0x26. */
1866FNIEMOP_DEF(iemOp_mov_Td_Rd)
1867{
1868 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1869 /** @todo works on 386 and 486. */
1870 /* The RM byte is not considered, see testcase. */
1871 return IEMOP_RAISE_INVALID_OPCODE();
1872}
1873
1874
1875/** Opcode 0x0f 0x28. */
1876FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1877{
1878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1879 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1880 else
1881 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1884 {
1885 /*
1886 * Register, register.
1887 */
1888 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1889 IEM_MC_BEGIN(0, 0);
1890 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1892 else
1893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1895 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1896 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1897 IEM_MC_ADVANCE_RIP();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 /*
1903 * Register, memory.
1904 */
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1911 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1912 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1913 else
1914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1916
1917 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1918 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1919
1920 IEM_MC_ADVANCE_RIP();
1921 IEM_MC_END();
1922 }
1923 return VINF_SUCCESS;
1924}
1925
1926
1927/** Opcode 0x0f 0x29. */
1928FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1929{
1930 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1931 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1932 else
1933 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1936 {
1937 /*
1938 * Register, register.
1939 */
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1941 IEM_MC_BEGIN(0, 0);
1942 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1944 else
1945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1946 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1947 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1948 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1949 IEM_MC_ADVANCE_RIP();
1950 IEM_MC_END();
1951 }
1952 else
1953 {
1954 /*
1955 * Memory, register.
1956 */
1957 IEM_MC_BEGIN(0, 2);
1958 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1960
1961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1964 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1965 else
1966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1968
1969 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1970 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1971
1972 IEM_MC_ADVANCE_RIP();
1973 IEM_MC_END();
1974 }
1975 return VINF_SUCCESS;
1976}
1977
1978
1979/** Opcode 0x0f 0x2a. */
1980FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1981
1982
1983/** Opcode 0x0f 0x2b. */
1984FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1985{
1986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1987 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1988 else
1989 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1991 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1992 {
1993 /*
1994 * memory, register.
1995 */
1996 IEM_MC_BEGIN(0, 2);
1997 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1999
2000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2002 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2003 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2004 else
2005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2007
2008 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 /* The register, register encoding is invalid. */
2015 else
2016 return IEMOP_RAISE_INVALID_OPCODE();
2017 return VINF_SUCCESS;
2018}
2019
2020
2021/** Opcode 0x0f 0x2c. */
2022FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
2023/** Opcode 0x0f 0x2d. */
2024FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
2025/** Opcode 0x0f 0x2e. */
2026FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
2027/** Opcode 0x0f 0x2f. */
2028FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
2029
2030
2031/** Opcode 0x0f 0x30. */
2032FNIEMOP_DEF(iemOp_wrmsr)
2033{
2034 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2037}
2038
2039
2040/** Opcode 0x0f 0x31. */
2041FNIEMOP_DEF(iemOp_rdtsc)
2042{
2043 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2046}
2047
2048
2049/** Opcode 0x0f 0x33. */
2050FNIEMOP_DEF(iemOp_rdmsr)
2051{
2052 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2055}
2056
2057
2058/** Opcode 0x0f 0x34. */
2059FNIEMOP_STUB(iemOp_rdpmc);
2060/** Opcode 0x0f 0x34. */
2061FNIEMOP_STUB(iemOp_sysenter);
2062/** Opcode 0x0f 0x35. */
2063FNIEMOP_STUB(iemOp_sysexit);
2064/** Opcode 0x0f 0x37. */
2065FNIEMOP_STUB(iemOp_getsec);
2066/** Opcode 0x0f 0x38. */
2067FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2068/** Opcode 0x0f 0x3a. */
2069FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2070
2071
2072/**
2073 * Implements a conditional move.
2074 *
2075 * Wish there was an obvious way to do this where we could share and reduce
2076 * code bloat.
2077 *
2078 * @param a_Cnd The conditional "microcode" operation.
2079 */
2080#define CMOV_X(a_Cnd) \
2081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2083 { \
2084 switch (pVCpu->iem.s.enmEffOpSize) \
2085 { \
2086 case IEMMODE_16BIT: \
2087 IEM_MC_BEGIN(0, 1); \
2088 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2089 a_Cnd { \
2090 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2091 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2092 } IEM_MC_ENDIF(); \
2093 IEM_MC_ADVANCE_RIP(); \
2094 IEM_MC_END(); \
2095 return VINF_SUCCESS; \
2096 \
2097 case IEMMODE_32BIT: \
2098 IEM_MC_BEGIN(0, 1); \
2099 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2100 a_Cnd { \
2101 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2102 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2103 } IEM_MC_ELSE() { \
2104 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2105 } IEM_MC_ENDIF(); \
2106 IEM_MC_ADVANCE_RIP(); \
2107 IEM_MC_END(); \
2108 return VINF_SUCCESS; \
2109 \
2110 case IEMMODE_64BIT: \
2111 IEM_MC_BEGIN(0, 1); \
2112 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2113 a_Cnd { \
2114 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2115 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2116 } IEM_MC_ENDIF(); \
2117 IEM_MC_ADVANCE_RIP(); \
2118 IEM_MC_END(); \
2119 return VINF_SUCCESS; \
2120 \
2121 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2122 } \
2123 } \
2124 else \
2125 { \
2126 switch (pVCpu->iem.s.enmEffOpSize) \
2127 { \
2128 case IEMMODE_16BIT: \
2129 IEM_MC_BEGIN(0, 2); \
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2131 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2133 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2134 a_Cnd { \
2135 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2136 } IEM_MC_ENDIF(); \
2137 IEM_MC_ADVANCE_RIP(); \
2138 IEM_MC_END(); \
2139 return VINF_SUCCESS; \
2140 \
2141 case IEMMODE_32BIT: \
2142 IEM_MC_BEGIN(0, 2); \
2143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2144 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2146 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2147 a_Cnd { \
2148 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2149 } IEM_MC_ELSE() { \
2150 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2151 } IEM_MC_ENDIF(); \
2152 IEM_MC_ADVANCE_RIP(); \
2153 IEM_MC_END(); \
2154 return VINF_SUCCESS; \
2155 \
2156 case IEMMODE_64BIT: \
2157 IEM_MC_BEGIN(0, 2); \
2158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2159 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2161 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2162 a_Cnd { \
2163 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2164 } IEM_MC_ENDIF(); \
2165 IEM_MC_ADVANCE_RIP(); \
2166 IEM_MC_END(); \
2167 return VINF_SUCCESS; \
2168 \
2169 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2170 } \
2171 } do {} while (0)
2172
2173
2174
2175/** Opcode 0x0f 0x40. */
2176FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2177{
2178 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2179 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2180}
2181
2182
2183/** Opcode 0x0f 0x41. */
2184FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2185{
2186 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2187 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2188}
2189
2190
2191/** Opcode 0x0f 0x42. */
2192FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2193{
2194 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2195 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2196}
2197
2198
2199/** Opcode 0x0f 0x43. */
2200FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2201{
2202 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2203 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2204}
2205
2206
2207/** Opcode 0x0f 0x44. */
2208FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2209{
2210 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2211 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2212}
2213
2214
2215/** Opcode 0x0f 0x45. */
2216FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2217{
2218 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2219 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2220}
2221
2222
2223/** Opcode 0x0f 0x46. */
2224FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2225{
2226 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2227 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2228}
2229
2230
2231/** Opcode 0x0f 0x47. */
2232FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2233{
2234 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2235 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2236}
2237
2238
2239/** Opcode 0x0f 0x48. */
2240FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2241{
2242 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2243 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2244}
2245
2246
2247/** Opcode 0x0f 0x49. */
2248FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2249{
2250 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2251 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2252}
2253
2254
2255/** Opcode 0x0f 0x4a. */
2256FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2257{
2258 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2259 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2260}
2261
2262
2263/** Opcode 0x0f 0x4b. */
2264FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2265{
2266 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2267 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2268}
2269
2270
2271/** Opcode 0x0f 0x4c. */
2272FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2273{
2274 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2275 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2276}
2277
2278
2279/** Opcode 0x0f 0x4d. */
2280FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2281{
2282 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2283 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2284}
2285
2286
2287/** Opcode 0x0f 0x4e. */
2288FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2289{
2290 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2291 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2292}
2293
2294
2295/** Opcode 0x0f 0x4f. */
2296FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2297{
2298 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2299 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2300}
2301
2302#undef CMOV_X
2303
2304/** Opcode 0x0f 0x50. */
2305FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2306/** Opcode 0x0f 0x51. */
2307FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2308/** Opcode 0x0f 0x52. */
2309FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2310/** Opcode 0x0f 0x53. */
2311FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2312/** Opcode 0x0f 0x54. */
2313FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2314/** Opcode 0x0f 0x55. */
2315FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2316/** Opcode 0x0f 0x56. */
2317FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2318/** Opcode 0x0f 0x57. */
2319FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2320/** Opcode 0x0f 0x58. */
2321FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2322/** Opcode 0x0f 0x59. */
2323FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2324/** Opcode 0x0f 0x5a. */
2325FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2326/** Opcode 0x0f 0x5b. */
2327FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2328/** Opcode 0x0f 0x5c. */
2329FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2330/** Opcode 0x0f 0x5d. */
2331FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2332/** Opcode 0x0f 0x5e. */
2333FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2334/** Opcode 0x0f 0x5f. */
2335FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2336
2337
2338/**
2339 * Common worker for SSE2 and MMX instructions on the forms:
2340 * pxxxx xmm1, xmm2/mem128
2341 * pxxxx mm1, mm2/mem32
2342 *
2343 * The 2nd operand is the first half of a register, which in the memory case
2344 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2345 * memory accessed for MMX.
2346 *
2347 * Exceptions type 4.
2348 */
2349FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2350{
2351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2352 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2353 {
2354 case IEM_OP_PRF_SIZE_OP: /* SSE */
2355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2356 {
2357 /*
2358 * Register, register.
2359 */
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_BEGIN(2, 0);
2362 IEM_MC_ARG(uint128_t *, pDst, 0);
2363 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_PREPARE_SSE_USAGE();
2366 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2367 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2368 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 else
2373 {
2374 /*
2375 * Register, memory.
2376 */
2377 IEM_MC_BEGIN(2, 2);
2378 IEM_MC_ARG(uint128_t *, pDst, 0);
2379 IEM_MC_LOCAL(uint64_t, uSrc);
2380 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2386 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2387
2388 IEM_MC_PREPARE_SSE_USAGE();
2389 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2390 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2391
2392 IEM_MC_ADVANCE_RIP();
2393 IEM_MC_END();
2394 }
2395 return VINF_SUCCESS;
2396
2397 case 0: /* MMX */
2398 if (!pImpl->pfnU64)
2399 return IEMOP_RAISE_INVALID_OPCODE();
2400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2401 {
2402 /*
2403 * Register, register.
2404 */
2405 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2406 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2408 IEM_MC_BEGIN(2, 0);
2409 IEM_MC_ARG(uint64_t *, pDst, 0);
2410 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2411 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2412 IEM_MC_PREPARE_FPU_USAGE();
2413 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2414 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2415 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2416 IEM_MC_ADVANCE_RIP();
2417 IEM_MC_END();
2418 }
2419 else
2420 {
2421 /*
2422 * Register, memory.
2423 */
2424 IEM_MC_BEGIN(2, 2);
2425 IEM_MC_ARG(uint64_t *, pDst, 0);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2429
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2432 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2433 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2434
2435 IEM_MC_PREPARE_FPU_USAGE();
2436 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2437 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2438
2439 IEM_MC_ADVANCE_RIP();
2440 IEM_MC_END();
2441 }
2442 return VINF_SUCCESS;
2443
2444 default:
2445 return IEMOP_RAISE_INVALID_OPCODE();
2446 }
2447}
2448
2449
2450/** Opcode 0x0f 0x60. */
2451FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2452{
2453 IEMOP_MNEMONIC(punpcklbw, "punpcklbw");
2454 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2455}
2456
2457
2458/** Opcode 0x0f 0x61. */
2459FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2460{
2461 IEMOP_MNEMONIC(punpcklwd, "punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2462 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2463}
2464
2465
2466/** Opcode 0x0f 0x62. */
2467FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2468{
2469 IEMOP_MNEMONIC(punpckldq, "punpckldq");
2470 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2471}
2472
2473
2474/** Opcode 0x0f 0x63. */
2475FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2476/** Opcode 0x0f 0x64. */
2477FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2478/** Opcode 0x0f 0x65. */
2479FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2480/** Opcode 0x0f 0x66. */
2481FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2482/** Opcode 0x0f 0x67. */
2483FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2484
2485
2486/**
2487 * Common worker for SSE2 and MMX instructions on the forms:
2488 * pxxxx xmm1, xmm2/mem128
2489 * pxxxx mm1, mm2/mem64
2490 *
2491 * The 2nd operand is the second half of a register, which in the memory case
2492 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2493 * where it may read the full 128 bits or only the upper 64 bits.
2494 *
2495 * Exceptions type 4.
2496 */
2497FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2498{
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2501 {
2502 case IEM_OP_PRF_SIZE_OP: /* SSE */
2503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2504 {
2505 /*
2506 * Register, register.
2507 */
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_BEGIN(2, 0);
2510 IEM_MC_ARG(uint128_t *, pDst, 0);
2511 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2512 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2513 IEM_MC_PREPARE_SSE_USAGE();
2514 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2515 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2516 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2517 IEM_MC_ADVANCE_RIP();
2518 IEM_MC_END();
2519 }
2520 else
2521 {
2522 /*
2523 * Register, memory.
2524 */
2525 IEM_MC_BEGIN(2, 2);
2526 IEM_MC_ARG(uint128_t *, pDst, 0);
2527 IEM_MC_LOCAL(uint128_t, uSrc);
2528 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2535
2536 IEM_MC_PREPARE_SSE_USAGE();
2537 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2538 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2539
2540 IEM_MC_ADVANCE_RIP();
2541 IEM_MC_END();
2542 }
2543 return VINF_SUCCESS;
2544
2545 case 0: /* MMX */
2546 if (!pImpl->pfnU64)
2547 return IEMOP_RAISE_INVALID_OPCODE();
2548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2549 {
2550 /*
2551 * Register, register.
2552 */
2553 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2554 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2556 IEM_MC_BEGIN(2, 0);
2557 IEM_MC_ARG(uint64_t *, pDst, 0);
2558 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2560 IEM_MC_PREPARE_FPU_USAGE();
2561 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2562 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2563 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2564 IEM_MC_ADVANCE_RIP();
2565 IEM_MC_END();
2566 }
2567 else
2568 {
2569 /*
2570 * Register, memory.
2571 */
2572 IEM_MC_BEGIN(2, 2);
2573 IEM_MC_ARG(uint64_t *, pDst, 0);
2574 IEM_MC_LOCAL(uint64_t, uSrc);
2575 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2577
2578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2582
2583 IEM_MC_PREPARE_FPU_USAGE();
2584 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2585 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2586
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 return VINF_SUCCESS;
2591
2592 default:
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594 }
2595}
2596
2597
2598/** Opcode 0x0f 0x68. */
2599FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2600{
2601 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2602 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2603}
2604
2605
2606/** Opcode 0x0f 0x69. */
2607FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2608{
2609 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2610 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2611}
2612
2613
2614/** Opcode 0x0f 0x6a. */
2615FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2616{
2617 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2618 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2619}
2620
2621/** Opcode 0x0f 0x6b. */
2622FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2623
2624
2625/** Opcode 0x0f 0x6c. */
2626FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2627{
2628 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq");
2629 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2630}
2631
2632
2633/** Opcode 0x0f 0x6d. */
2634FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2635{
2636 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2637 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2638}
2639
2640
2641/** Opcode 0x0f 0x6e. */
2642FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2646 {
2647 case IEM_OP_PRF_SIZE_OP: /* SSE */
2648 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2649 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2650 else
2651 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2653 {
2654 /* XMM, greg*/
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_BEGIN(0, 1);
2657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2660 {
2661 IEM_MC_LOCAL(uint64_t, u64Tmp);
2662 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2663 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2664 }
2665 else
2666 {
2667 IEM_MC_LOCAL(uint32_t, u32Tmp);
2668 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2669 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2670 }
2671 IEM_MC_ADVANCE_RIP();
2672 IEM_MC_END();
2673 }
2674 else
2675 {
2676 /* XMM, [mem] */
2677 IEM_MC_BEGIN(0, 2);
2678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2679 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2683 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2684 {
2685 IEM_MC_LOCAL(uint64_t, u64Tmp);
2686 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2687 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2688 }
2689 else
2690 {
2691 IEM_MC_LOCAL(uint32_t, u32Tmp);
2692 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2693 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2694 }
2695 IEM_MC_ADVANCE_RIP();
2696 IEM_MC_END();
2697 }
2698 return VINF_SUCCESS;
2699
2700 case 0: /* MMX */
2701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2702 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2703 else
2704 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2706 {
2707 /* MMX, greg */
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEM_MC_BEGIN(0, 1);
2710 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2712 IEM_MC_LOCAL(uint64_t, u64Tmp);
2713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2714 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2715 else
2716 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2717 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2718 IEM_MC_ADVANCE_RIP();
2719 IEM_MC_END();
2720 }
2721 else
2722 {
2723 /* MMX, [mem] */
2724 IEM_MC_BEGIN(0, 2);
2725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2726 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2730 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2731 {
2732 IEM_MC_LOCAL(uint64_t, u64Tmp);
2733 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2734 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2735 }
2736 else
2737 {
2738 IEM_MC_LOCAL(uint32_t, u32Tmp);
2739 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2740 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2741 }
2742 IEM_MC_ADVANCE_RIP();
2743 IEM_MC_END();
2744 }
2745 return VINF_SUCCESS;
2746
2747 default:
2748 return IEMOP_RAISE_INVALID_OPCODE();
2749 }
2750}
2751
2752
2753/** Opcode 0x0f 0x6f. */
2754FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2755{
2756 bool fAligned = false;
2757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2758 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2759 {
2760 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2761 fAligned = true;
2762 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2763 if (fAligned)
2764 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2765 else
2766 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2768 {
2769 /*
2770 * Register, register.
2771 */
2772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2773 IEM_MC_BEGIN(0, 0);
2774 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2775 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2776 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2777 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 else
2782 {
2783 /*
2784 * Register, memory.
2785 */
2786 IEM_MC_BEGIN(0, 2);
2787 IEM_MC_LOCAL(uint128_t, u128Tmp);
2788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2789
2790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2792 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 if (fAligned)
2795 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2796 else
2797 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2798 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2799
2800 IEM_MC_ADVANCE_RIP();
2801 IEM_MC_END();
2802 }
2803 return VINF_SUCCESS;
2804
2805 case 0: /* MMX */
2806 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2808 {
2809 /*
2810 * Register, register.
2811 */
2812 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2813 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEM_MC_BEGIN(0, 1);
2816 IEM_MC_LOCAL(uint64_t, u64Tmp);
2817 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2819 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2820 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2821 IEM_MC_ADVANCE_RIP();
2822 IEM_MC_END();
2823 }
2824 else
2825 {
2826 /*
2827 * Register, memory.
2828 */
2829 IEM_MC_BEGIN(0, 2);
2830 IEM_MC_LOCAL(uint64_t, u64Tmp);
2831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2832
2833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2835 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2836 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2837 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2838 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2839
2840 IEM_MC_ADVANCE_RIP();
2841 IEM_MC_END();
2842 }
2843 return VINF_SUCCESS;
2844
2845 default:
2846 return IEMOP_RAISE_INVALID_OPCODE();
2847 }
2848}
2849
2850
2851/** Opcode 0x0f 0x70. The immediate here is evil! */
2852FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2853{
2854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2855 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2856 {
2857 case IEM_OP_PRF_SIZE_OP: /* SSE */
2858 case IEM_OP_PRF_REPNZ: /* SSE */
2859 case IEM_OP_PRF_REPZ: /* SSE */
2860 {
2861 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2862 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2863 {
2864 case IEM_OP_PRF_SIZE_OP:
2865 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
2866 pfnAImpl = iemAImpl_pshufd;
2867 break;
2868 case IEM_OP_PRF_REPNZ:
2869 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
2870 pfnAImpl = iemAImpl_pshuflw;
2871 break;
2872 case IEM_OP_PRF_REPZ:
2873 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
2874 pfnAImpl = iemAImpl_pshufhw;
2875 break;
2876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2877 }
2878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2879 {
2880 /*
2881 * Register, register.
2882 */
2883 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2885
2886 IEM_MC_BEGIN(3, 0);
2887 IEM_MC_ARG(uint128_t *, pDst, 0);
2888 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2889 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2891 IEM_MC_PREPARE_SSE_USAGE();
2892 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2893 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2894 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2895 IEM_MC_ADVANCE_RIP();
2896 IEM_MC_END();
2897 }
2898 else
2899 {
2900 /*
2901 * Register, memory.
2902 */
2903 IEM_MC_BEGIN(3, 2);
2904 IEM_MC_ARG(uint128_t *, pDst, 0);
2905 IEM_MC_LOCAL(uint128_t, uSrc);
2906 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2908
2909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2910 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2911 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2914
2915 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2916 IEM_MC_PREPARE_SSE_USAGE();
2917 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2918 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2919
2920 IEM_MC_ADVANCE_RIP();
2921 IEM_MC_END();
2922 }
2923 return VINF_SUCCESS;
2924 }
2925
2926 case 0: /* MMX Extension */
2927 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2929 {
2930 /*
2931 * Register, register.
2932 */
2933 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935
2936 IEM_MC_BEGIN(3, 0);
2937 IEM_MC_ARG(uint64_t *, pDst, 0);
2938 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2939 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2940 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2941 IEM_MC_PREPARE_FPU_USAGE();
2942 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2943 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2944 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2945 IEM_MC_ADVANCE_RIP();
2946 IEM_MC_END();
2947 }
2948 else
2949 {
2950 /*
2951 * Register, memory.
2952 */
2953 IEM_MC_BEGIN(3, 2);
2954 IEM_MC_ARG(uint64_t *, pDst, 0);
2955 IEM_MC_LOCAL(uint64_t, uSrc);
2956 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2958
2959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2960 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2961 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2964
2965 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2966 IEM_MC_PREPARE_FPU_USAGE();
2967 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2968 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2969
2970 IEM_MC_ADVANCE_RIP();
2971 IEM_MC_END();
2972 }
2973 return VINF_SUCCESS;
2974
2975 default:
2976 return IEMOP_RAISE_INVALID_OPCODE();
2977 }
2978}
2979
2980
2981/** Opcode 0x0f 0x71 11/2. */
2982FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2983
2984/** Opcode 0x66 0x0f 0x71 11/2. */
2985FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2986
2987/** Opcode 0x0f 0x71 11/4. */
2988FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2989
2990/** Opcode 0x66 0x0f 0x71 11/4. */
2991FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2992
2993/** Opcode 0x0f 0x71 11/6. */
2994FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2995
2996/** Opcode 0x66 0x0f 0x71 11/6. */
2997FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2998
2999
3000/** Opcode 0x0f 0x71. */
3001FNIEMOP_DEF(iemOp_Grp12)
3002{
3003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3004 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3005 return IEMOP_RAISE_INVALID_OPCODE();
3006 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3007 {
3008 case 0: case 1: case 3: case 5: case 7:
3009 return IEMOP_RAISE_INVALID_OPCODE();
3010 case 2:
3011 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3012 {
3013 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3014 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3015 default: return IEMOP_RAISE_INVALID_OPCODE();
3016 }
3017 case 4:
3018 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3019 {
3020 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3021 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3022 default: return IEMOP_RAISE_INVALID_OPCODE();
3023 }
3024 case 6:
3025 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3026 {
3027 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3028 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3029 default: return IEMOP_RAISE_INVALID_OPCODE();
3030 }
3031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3032 }
3033}
3034
3035
3036/** Opcode 0x0f 0x72 11/2. */
3037FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3038
3039/** Opcode 0x66 0x0f 0x72 11/2. */
3040FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3041
3042/** Opcode 0x0f 0x72 11/4. */
3043FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3044
3045/** Opcode 0x66 0x0f 0x72 11/4. */
3046FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3047
3048/** Opcode 0x0f 0x72 11/6. */
3049FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3050
3051/** Opcode 0x66 0x0f 0x72 11/6. */
3052FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3053
3054
3055/** Opcode 0x0f 0x72. */
3056FNIEMOP_DEF(iemOp_Grp13)
3057{
3058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3059 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3060 return IEMOP_RAISE_INVALID_OPCODE();
3061 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3062 {
3063 case 0: case 1: case 3: case 5: case 7:
3064 return IEMOP_RAISE_INVALID_OPCODE();
3065 case 2:
3066 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3067 {
3068 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3069 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3070 default: return IEMOP_RAISE_INVALID_OPCODE();
3071 }
3072 case 4:
3073 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3074 {
3075 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3076 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3077 default: return IEMOP_RAISE_INVALID_OPCODE();
3078 }
3079 case 6:
3080 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3081 {
3082 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3083 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3084 default: return IEMOP_RAISE_INVALID_OPCODE();
3085 }
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088}
3089
3090
3091/** Opcode 0x0f 0x73 11/2. */
3092FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3093
3094/** Opcode 0x66 0x0f 0x73 11/2. */
3095FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3096
3097/** Opcode 0x66 0x0f 0x73 11/3. */
3098FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3099
3100/** Opcode 0x0f 0x73 11/6. */
3101FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3102
3103/** Opcode 0x66 0x0f 0x73 11/6. */
3104FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3105
3106/** Opcode 0x66 0x0f 0x73 11/7. */
3107FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3108
3109
3110/** Opcode 0x0f 0x73. */
3111FNIEMOP_DEF(iemOp_Grp14)
3112{
3113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3114 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3115 return IEMOP_RAISE_INVALID_OPCODE();
3116 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3117 {
3118 case 0: case 1: case 4: case 5:
3119 return IEMOP_RAISE_INVALID_OPCODE();
3120 case 2:
3121 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3122 {
3123 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3124 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3125 default: return IEMOP_RAISE_INVALID_OPCODE();
3126 }
3127 case 3:
3128 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3129 {
3130 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3131 default: return IEMOP_RAISE_INVALID_OPCODE();
3132 }
3133 case 6:
3134 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3135 {
3136 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3137 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3138 default: return IEMOP_RAISE_INVALID_OPCODE();
3139 }
3140 case 7:
3141 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3142 {
3143 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3144 default: return IEMOP_RAISE_INVALID_OPCODE();
3145 }
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148}
3149
3150
3151/**
3152 * Common worker for SSE2 and MMX instructions on the forms:
3153 * pxxx mm1, mm2/mem64
3154 * pxxx xmm1, xmm2/mem128
3155 *
3156 * Proper alignment of the 128-bit operand is enforced.
3157 * Exceptions type 4. SSE2 and MMX cpuid checks.
3158 */
3159FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3160{
3161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3162 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3163 {
3164 case IEM_OP_PRF_SIZE_OP: /* SSE */
3165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3166 {
3167 /*
3168 * Register, register.
3169 */
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171 IEM_MC_BEGIN(2, 0);
3172 IEM_MC_ARG(uint128_t *, pDst, 0);
3173 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3174 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3175 IEM_MC_PREPARE_SSE_USAGE();
3176 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3177 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3178 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3179 IEM_MC_ADVANCE_RIP();
3180 IEM_MC_END();
3181 }
3182 else
3183 {
3184 /*
3185 * Register, memory.
3186 */
3187 IEM_MC_BEGIN(2, 2);
3188 IEM_MC_ARG(uint128_t *, pDst, 0);
3189 IEM_MC_LOCAL(uint128_t, uSrc);
3190 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3192
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3196 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3197
3198 IEM_MC_PREPARE_SSE_USAGE();
3199 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3200 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3201
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 }
3205 return VINF_SUCCESS;
3206
3207 case 0: /* MMX */
3208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3209 {
3210 /*
3211 * Register, register.
3212 */
3213 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3214 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3216 IEM_MC_BEGIN(2, 0);
3217 IEM_MC_ARG(uint64_t *, pDst, 0);
3218 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3219 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3220 IEM_MC_PREPARE_FPU_USAGE();
3221 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3222 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3223 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3224 IEM_MC_ADVANCE_RIP();
3225 IEM_MC_END();
3226 }
3227 else
3228 {
3229 /*
3230 * Register, memory.
3231 */
3232 IEM_MC_BEGIN(2, 2);
3233 IEM_MC_ARG(uint64_t *, pDst, 0);
3234 IEM_MC_LOCAL(uint64_t, uSrc);
3235 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3241 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3242
3243 IEM_MC_PREPARE_FPU_USAGE();
3244 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3245 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3246
3247 IEM_MC_ADVANCE_RIP();
3248 IEM_MC_END();
3249 }
3250 return VINF_SUCCESS;
3251
3252 default:
3253 return IEMOP_RAISE_INVALID_OPCODE();
3254 }
3255}
3256
3257
3258/** Opcode 0x0f 0x74. */
3259FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3260{
3261 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3262 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3263}
3264
3265
3266/** Opcode 0x0f 0x75. */
3267FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3268{
3269 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3270 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3271}
3272
3273
3274/** Opcode 0x0f 0x76. */
3275FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3276{
3277 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3278 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3279}
3280
3281
3282/** Opcode 0x0f 0x77. */
3283FNIEMOP_STUB(iemOp_emms);
3284/** Opcode 0x0f 0x78. */
3285FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3286/** Opcode 0x0f 0x79. */
3287FNIEMOP_UD_STUB(iemOp_vmwrite);
3288/** Opcode 0x0f 0x7c. */
3289FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3290/** Opcode 0x0f 0x7d. */
3291FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3292
3293
3294/** Opcode 0x0f 0x7e. */
3295FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3296{
3297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3298 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3299 {
3300 case IEM_OP_PRF_SIZE_OP: /* SSE */
3301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3302 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3303 else
3304 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3306 {
3307 /* greg, XMM */
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 IEM_MC_BEGIN(0, 1);
3310 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3312 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3313 {
3314 IEM_MC_LOCAL(uint64_t, u64Tmp);
3315 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3316 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3317 }
3318 else
3319 {
3320 IEM_MC_LOCAL(uint32_t, u32Tmp);
3321 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3322 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3323 }
3324 IEM_MC_ADVANCE_RIP();
3325 IEM_MC_END();
3326 }
3327 else
3328 {
3329 /* [mem], XMM */
3330 IEM_MC_BEGIN(0, 2);
3331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3335 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3337 {
3338 IEM_MC_LOCAL(uint64_t, u64Tmp);
3339 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3340 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3341 }
3342 else
3343 {
3344 IEM_MC_LOCAL(uint32_t, u32Tmp);
3345 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3346 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3347 }
3348 IEM_MC_ADVANCE_RIP();
3349 IEM_MC_END();
3350 }
3351 return VINF_SUCCESS;
3352
3353 case 0: /* MMX */
3354 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3355 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3356 else
3357 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3358 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3359 {
3360 /* greg, MMX */
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_BEGIN(0, 1);
3363 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3365 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3366 {
3367 IEM_MC_LOCAL(uint64_t, u64Tmp);
3368 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3369 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3370 }
3371 else
3372 {
3373 IEM_MC_LOCAL(uint32_t, u32Tmp);
3374 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3375 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3376 }
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 }
3380 else
3381 {
3382 /* [mem], MMX */
3383 IEM_MC_BEGIN(0, 2);
3384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3389 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3390 {
3391 IEM_MC_LOCAL(uint64_t, u64Tmp);
3392 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3393 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3394 }
3395 else
3396 {
3397 IEM_MC_LOCAL(uint32_t, u32Tmp);
3398 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3399 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3400 }
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 return VINF_SUCCESS;
3405
3406 default:
3407 return IEMOP_RAISE_INVALID_OPCODE();
3408 }
3409}
3410
3411
3412/** Opcode 0x0f 0x7f. */
3413FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3414{
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 bool fAligned = false;
3417 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3418 {
3419 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3420 fAligned = true;
3421 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3422 if (fAligned)
3423 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3424 else
3425 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3427 {
3428 /*
3429 * Register, register.
3430 */
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3432 IEM_MC_BEGIN(0, 0);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3436 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3437 IEM_MC_ADVANCE_RIP();
3438 IEM_MC_END();
3439 }
3440 else
3441 {
3442 /*
3443 * Register, memory.
3444 */
3445 IEM_MC_BEGIN(0, 2);
3446 IEM_MC_LOCAL(uint128_t, u128Tmp);
3447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3453
3454 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3455 if (fAligned)
3456 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3457 else
3458 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3459
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 }
3463 return VINF_SUCCESS;
3464
3465 case 0: /* MMX */
3466 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3467
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /*
3471 * Register, register.
3472 */
3473 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3474 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3476 IEM_MC_BEGIN(0, 1);
3477 IEM_MC_LOCAL(uint64_t, u64Tmp);
3478 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3479 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3480 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3481 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /*
3488 * Register, memory.
3489 */
3490 IEM_MC_BEGIN(0, 2);
3491 IEM_MC_LOCAL(uint64_t, u64Tmp);
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3498
3499 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3500 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3501
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 return VINF_SUCCESS;
3506
3507 default:
3508 return IEMOP_RAISE_INVALID_OPCODE();
3509 }
3510}
3511
3512
3513
3514/** Opcode 0x0f 0x80. */
3515FNIEMOP_DEF(iemOp_jo_Jv)
3516{
3517 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3518 IEMOP_HLP_MIN_386();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3520 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3521 {
3522 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524
3525 IEM_MC_BEGIN(0, 0);
3526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3527 IEM_MC_REL_JMP_S16(i16Imm);
3528 } IEM_MC_ELSE() {
3529 IEM_MC_ADVANCE_RIP();
3530 } IEM_MC_ENDIF();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537
3538 IEM_MC_BEGIN(0, 0);
3539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3540 IEM_MC_REL_JMP_S32(i32Imm);
3541 } IEM_MC_ELSE() {
3542 IEM_MC_ADVANCE_RIP();
3543 } IEM_MC_ENDIF();
3544 IEM_MC_END();
3545 }
3546 return VINF_SUCCESS;
3547}
3548
3549
3550/** Opcode 0x0f 0x81. */
3551FNIEMOP_DEF(iemOp_jno_Jv)
3552{
3553 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3554 IEMOP_HLP_MIN_386();
3555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3556 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3557 {
3558 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3563 IEM_MC_ADVANCE_RIP();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S16(i16Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3576 IEM_MC_ADVANCE_RIP();
3577 } IEM_MC_ELSE() {
3578 IEM_MC_REL_JMP_S32(i32Imm);
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581 }
3582 return VINF_SUCCESS;
3583}
3584
3585
3586/** Opcode 0x0f 0x82. */
3587FNIEMOP_DEF(iemOp_jc_Jv)
3588{
3589 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3590 IEMOP_HLP_MIN_386();
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3592 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3593 {
3594 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596
3597 IEM_MC_BEGIN(0, 0);
3598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3599 IEM_MC_REL_JMP_S16(i16Imm);
3600 } IEM_MC_ELSE() {
3601 IEM_MC_ADVANCE_RIP();
3602 } IEM_MC_ENDIF();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3609
3610 IEM_MC_BEGIN(0, 0);
3611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3612 IEM_MC_REL_JMP_S32(i32Imm);
3613 } IEM_MC_ELSE() {
3614 IEM_MC_ADVANCE_RIP();
3615 } IEM_MC_ENDIF();
3616 IEM_MC_END();
3617 }
3618 return VINF_SUCCESS;
3619}
3620
3621
3622/** Opcode 0x0f 0x83. */
3623FNIEMOP_DEF(iemOp_jnc_Jv)
3624{
3625 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3626 IEMOP_HLP_MIN_386();
3627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3628 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3629 {
3630 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0);
3634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3635 IEM_MC_ADVANCE_RIP();
3636 } IEM_MC_ELSE() {
3637 IEM_MC_REL_JMP_S16(i16Imm);
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640 }
3641 else
3642 {
3643 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645
3646 IEM_MC_BEGIN(0, 0);
3647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3648 IEM_MC_ADVANCE_RIP();
3649 } IEM_MC_ELSE() {
3650 IEM_MC_REL_JMP_S32(i32Imm);
3651 } IEM_MC_ENDIF();
3652 IEM_MC_END();
3653 }
3654 return VINF_SUCCESS;
3655}
3656
3657
3658/** Opcode 0x0f 0x84. */
3659FNIEMOP_DEF(iemOp_je_Jv)
3660{
3661 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3662 IEMOP_HLP_MIN_386();
3663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3664 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3665 {
3666 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3668
3669 IEM_MC_BEGIN(0, 0);
3670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3671 IEM_MC_REL_JMP_S16(i16Imm);
3672 } IEM_MC_ELSE() {
3673 IEM_MC_ADVANCE_RIP();
3674 } IEM_MC_ENDIF();
3675 IEM_MC_END();
3676 }
3677 else
3678 {
3679 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681
3682 IEM_MC_BEGIN(0, 0);
3683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3684 IEM_MC_REL_JMP_S32(i32Imm);
3685 } IEM_MC_ELSE() {
3686 IEM_MC_ADVANCE_RIP();
3687 } IEM_MC_ENDIF();
3688 IEM_MC_END();
3689 }
3690 return VINF_SUCCESS;
3691}
3692
3693
3694/** Opcode 0x0f 0x85. */
3695FNIEMOP_DEF(iemOp_jne_Jv)
3696{
3697 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3698 IEMOP_HLP_MIN_386();
3699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3700 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3701 {
3702 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3704
3705 IEM_MC_BEGIN(0, 0);
3706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3707 IEM_MC_ADVANCE_RIP();
3708 } IEM_MC_ELSE() {
3709 IEM_MC_REL_JMP_S16(i16Imm);
3710 } IEM_MC_ENDIF();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3720 IEM_MC_ADVANCE_RIP();
3721 } IEM_MC_ELSE() {
3722 IEM_MC_REL_JMP_S32(i32Imm);
3723 } IEM_MC_ENDIF();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729
3730/** Opcode 0x0f 0x86. */
3731FNIEMOP_DEF(iemOp_jbe_Jv)
3732{
3733 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3734 IEMOP_HLP_MIN_386();
3735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3737 {
3738 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740
3741 IEM_MC_BEGIN(0, 0);
3742 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3743 IEM_MC_REL_JMP_S16(i16Imm);
3744 } IEM_MC_ELSE() {
3745 IEM_MC_ADVANCE_RIP();
3746 } IEM_MC_ENDIF();
3747 IEM_MC_END();
3748 }
3749 else
3750 {
3751 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0);
3755 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3756 IEM_MC_REL_JMP_S32(i32Imm);
3757 } IEM_MC_ELSE() {
3758 IEM_MC_ADVANCE_RIP();
3759 } IEM_MC_ENDIF();
3760 IEM_MC_END();
3761 }
3762 return VINF_SUCCESS;
3763}
3764
3765
3766/** Opcode 0x0f 0x87. */
3767FNIEMOP_DEF(iemOp_jnbe_Jv)
3768{
3769 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3770 IEMOP_HLP_MIN_386();
3771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3772 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3773 {
3774 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776
3777 IEM_MC_BEGIN(0, 0);
3778 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3779 IEM_MC_ADVANCE_RIP();
3780 } IEM_MC_ELSE() {
3781 IEM_MC_REL_JMP_S16(i16Imm);
3782 } IEM_MC_ENDIF();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789
3790 IEM_MC_BEGIN(0, 0);
3791 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3792 IEM_MC_ADVANCE_RIP();
3793 } IEM_MC_ELSE() {
3794 IEM_MC_REL_JMP_S32(i32Imm);
3795 } IEM_MC_ENDIF();
3796 IEM_MC_END();
3797 }
3798 return VINF_SUCCESS;
3799}
3800
3801
3802/** Opcode 0x0f 0x88. */
3803FNIEMOP_DEF(iemOp_js_Jv)
3804{
3805 IEMOP_MNEMONIC(js_Jv, "js Jv");
3806 IEMOP_HLP_MIN_386();
3807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3809 {
3810 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3812
3813 IEM_MC_BEGIN(0, 0);
3814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3815 IEM_MC_REL_JMP_S16(i16Imm);
3816 } IEM_MC_ELSE() {
3817 IEM_MC_ADVANCE_RIP();
3818 } IEM_MC_ENDIF();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825
3826 IEM_MC_BEGIN(0, 0);
3827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3828 IEM_MC_REL_JMP_S32(i32Imm);
3829 } IEM_MC_ELSE() {
3830 IEM_MC_ADVANCE_RIP();
3831 } IEM_MC_ENDIF();
3832 IEM_MC_END();
3833 }
3834 return VINF_SUCCESS;
3835}
3836
3837
3838/** Opcode 0x0f 0x89. */
3839FNIEMOP_DEF(iemOp_jns_Jv)
3840{
3841 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3842 IEMOP_HLP_MIN_386();
3843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3844 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3845 {
3846 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3848
3849 IEM_MC_BEGIN(0, 0);
3850 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3851 IEM_MC_ADVANCE_RIP();
3852 } IEM_MC_ELSE() {
3853 IEM_MC_REL_JMP_S16(i16Imm);
3854 } IEM_MC_ENDIF();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861
3862 IEM_MC_BEGIN(0, 0);
3863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3864 IEM_MC_ADVANCE_RIP();
3865 } IEM_MC_ELSE() {
3866 IEM_MC_REL_JMP_S32(i32Imm);
3867 } IEM_MC_ENDIF();
3868 IEM_MC_END();
3869 }
3870 return VINF_SUCCESS;
3871}
3872
3873
3874/** Opcode 0x0f 0x8a. */
3875FNIEMOP_DEF(iemOp_jp_Jv)
3876{
3877 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3878 IEMOP_HLP_MIN_386();
3879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3881 {
3882 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3884
3885 IEM_MC_BEGIN(0, 0);
3886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3887 IEM_MC_REL_JMP_S16(i16Imm);
3888 } IEM_MC_ELSE() {
3889 IEM_MC_ADVANCE_RIP();
3890 } IEM_MC_ENDIF();
3891 IEM_MC_END();
3892 }
3893 else
3894 {
3895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3897
3898 IEM_MC_BEGIN(0, 0);
3899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3900 IEM_MC_REL_JMP_S32(i32Imm);
3901 } IEM_MC_ELSE() {
3902 IEM_MC_ADVANCE_RIP();
3903 } IEM_MC_ENDIF();
3904 IEM_MC_END();
3905 }
3906 return VINF_SUCCESS;
3907}
3908
3909
3910/** Opcode 0x0f 0x8b. */
3911FNIEMOP_DEF(iemOp_jnp_Jv)
3912{
3913 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3914 IEMOP_HLP_MIN_386();
3915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3916 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3917 {
3918 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920
3921 IEM_MC_BEGIN(0, 0);
3922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3923 IEM_MC_ADVANCE_RIP();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_REL_JMP_S16(i16Imm);
3926 } IEM_MC_ENDIF();
3927 IEM_MC_END();
3928 }
3929 else
3930 {
3931 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0);
3935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3936 IEM_MC_ADVANCE_RIP();
3937 } IEM_MC_ELSE() {
3938 IEM_MC_REL_JMP_S32(i32Imm);
3939 } IEM_MC_ENDIF();
3940 IEM_MC_END();
3941 }
3942 return VINF_SUCCESS;
3943}
3944
3945
3946/** Opcode 0x0f 0x8c. */
3947FNIEMOP_DEF(iemOp_jl_Jv)
3948{
3949 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3950 IEMOP_HLP_MIN_386();
3951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3952 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3953 {
3954 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3956
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3959 IEM_MC_REL_JMP_S16(i16Imm);
3960 } IEM_MC_ELSE() {
3961 IEM_MC_ADVANCE_RIP();
3962 } IEM_MC_ENDIF();
3963 IEM_MC_END();
3964 }
3965 else
3966 {
3967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3969
3970 IEM_MC_BEGIN(0, 0);
3971 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3972 IEM_MC_REL_JMP_S32(i32Imm);
3973 } IEM_MC_ELSE() {
3974 IEM_MC_ADVANCE_RIP();
3975 } IEM_MC_ENDIF();
3976 IEM_MC_END();
3977 }
3978 return VINF_SUCCESS;
3979}
3980
3981
3982/** Opcode 0x0f 0x8d. */
3983FNIEMOP_DEF(iemOp_jnl_Jv)
3984{
3985 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3986 IEMOP_HLP_MIN_386();
3987 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3988 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3989 {
3990 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3992
3993 IEM_MC_BEGIN(0, 0);
3994 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3995 IEM_MC_ADVANCE_RIP();
3996 } IEM_MC_ELSE() {
3997 IEM_MC_REL_JMP_S16(i16Imm);
3998 } IEM_MC_ENDIF();
3999 IEM_MC_END();
4000 }
4001 else
4002 {
4003 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005
4006 IEM_MC_BEGIN(0, 0);
4007 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4008 IEM_MC_ADVANCE_RIP();
4009 } IEM_MC_ELSE() {
4010 IEM_MC_REL_JMP_S32(i32Imm);
4011 } IEM_MC_ENDIF();
4012 IEM_MC_END();
4013 }
4014 return VINF_SUCCESS;
4015}
4016
4017
4018/** Opcode 0x0f 0x8e. */
4019FNIEMOP_DEF(iemOp_jle_Jv)
4020{
4021 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4022 IEMOP_HLP_MIN_386();
4023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4024 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4025 {
4026 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4028
4029 IEM_MC_BEGIN(0, 0);
4030 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4031 IEM_MC_REL_JMP_S16(i16Imm);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_ADVANCE_RIP();
4034 } IEM_MC_ENDIF();
4035 IEM_MC_END();
4036 }
4037 else
4038 {
4039 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4041
4042 IEM_MC_BEGIN(0, 0);
4043 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4044 IEM_MC_REL_JMP_S32(i32Imm);
4045 } IEM_MC_ELSE() {
4046 IEM_MC_ADVANCE_RIP();
4047 } IEM_MC_ENDIF();
4048 IEM_MC_END();
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0x0f 0x8f. */
4055FNIEMOP_DEF(iemOp_jnle_Jv)
4056{
4057 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4058 IEMOP_HLP_MIN_386();
4059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4061 {
4062 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4064
4065 IEM_MC_BEGIN(0, 0);
4066 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4067 IEM_MC_ADVANCE_RIP();
4068 } IEM_MC_ELSE() {
4069 IEM_MC_REL_JMP_S16(i16Imm);
4070 } IEM_MC_ENDIF();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077
4078 IEM_MC_BEGIN(0, 0);
4079 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4080 IEM_MC_ADVANCE_RIP();
4081 } IEM_MC_ELSE() {
4082 IEM_MC_REL_JMP_S32(i32Imm);
4083 } IEM_MC_ENDIF();
4084 IEM_MC_END();
4085 }
4086 return VINF_SUCCESS;
4087}
4088
4089
4090/** Opcode 0x0f 0x90. */
4091FNIEMOP_DEF(iemOp_seto_Eb)
4092{
4093 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4094 IEMOP_HLP_MIN_386();
4095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4096
4097 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4098 * any way. AMD says it's "unused", whatever that means. We're
4099 * ignoring for now. */
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 {
4102 /* register target */
4103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4104 IEM_MC_BEGIN(0, 0);
4105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4107 } IEM_MC_ELSE() {
4108 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4109 } IEM_MC_ENDIF();
4110 IEM_MC_ADVANCE_RIP();
4111 IEM_MC_END();
4112 }
4113 else
4114 {
4115 /* memory target */
4116 IEM_MC_BEGIN(0, 1);
4117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4122 } IEM_MC_ELSE() {
4123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4124 } IEM_MC_ENDIF();
4125 IEM_MC_ADVANCE_RIP();
4126 IEM_MC_END();
4127 }
4128 return VINF_SUCCESS;
4129}
4130
4131
4132/** Opcode 0x0f 0x91. */
4133FNIEMOP_DEF(iemOp_setno_Eb)
4134{
4135 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4136 IEMOP_HLP_MIN_386();
4137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4138
4139 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4140 * any way. AMD says it's "unused", whatever that means. We're
4141 * ignoring for now. */
4142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4143 {
4144 /* register target */
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146 IEM_MC_BEGIN(0, 0);
4147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4149 } IEM_MC_ELSE() {
4150 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4151 } IEM_MC_ENDIF();
4152 IEM_MC_ADVANCE_RIP();
4153 IEM_MC_END();
4154 }
4155 else
4156 {
4157 /* memory target */
4158 IEM_MC_BEGIN(0, 1);
4159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4164 } IEM_MC_ELSE() {
4165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4166 } IEM_MC_ENDIF();
4167 IEM_MC_ADVANCE_RIP();
4168 IEM_MC_END();
4169 }
4170 return VINF_SUCCESS;
4171}
4172
4173
4174/** Opcode 0x0f 0x92. */
4175FNIEMOP_DEF(iemOp_setc_Eb)
4176{
4177 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4178 IEMOP_HLP_MIN_386();
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180
4181 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4182 * any way. AMD says it's "unused", whatever that means. We're
4183 * ignoring for now. */
4184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4185 {
4186 /* register target */
4187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4188 IEM_MC_BEGIN(0, 0);
4189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4190 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4191 } IEM_MC_ELSE() {
4192 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4193 } IEM_MC_ENDIF();
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 }
4197 else
4198 {
4199 /* memory target */
4200 IEM_MC_BEGIN(0, 1);
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4206 } IEM_MC_ELSE() {
4207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x93. */
4217FNIEMOP_DEF(iemOp_setnc_Eb)
4218{
4219 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4220 IEMOP_HLP_MIN_386();
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222
4223 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4224 * any way. AMD says it's "unused", whatever that means. We're
4225 * ignoring for now. */
4226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4227 {
4228 /* register target */
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4230 IEM_MC_BEGIN(0, 0);
4231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4232 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4233 } IEM_MC_ELSE() {
4234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4235 } IEM_MC_ENDIF();
4236 IEM_MC_ADVANCE_RIP();
4237 IEM_MC_END();
4238 }
4239 else
4240 {
4241 /* memory target */
4242 IEM_MC_BEGIN(0, 1);
4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4247 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4248 } IEM_MC_ELSE() {
4249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4250 } IEM_MC_ENDIF();
4251 IEM_MC_ADVANCE_RIP();
4252 IEM_MC_END();
4253 }
4254 return VINF_SUCCESS;
4255}
4256
4257
4258/** Opcode 0x0f 0x94. */
4259FNIEMOP_DEF(iemOp_sete_Eb)
4260{
4261 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4262 IEMOP_HLP_MIN_386();
4263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4264
4265 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4266 * any way. AMD says it's "unused", whatever that means. We're
4267 * ignoring for now. */
4268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4269 {
4270 /* register target */
4271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4272 IEM_MC_BEGIN(0, 0);
4273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4274 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4275 } IEM_MC_ELSE() {
4276 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4277 } IEM_MC_ENDIF();
4278 IEM_MC_ADVANCE_RIP();
4279 IEM_MC_END();
4280 }
4281 else
4282 {
4283 /* memory target */
4284 IEM_MC_BEGIN(0, 1);
4285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4290 } IEM_MC_ELSE() {
4291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4292 } IEM_MC_ENDIF();
4293 IEM_MC_ADVANCE_RIP();
4294 IEM_MC_END();
4295 }
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** Opcode 0x0f 0x95. */
4301FNIEMOP_DEF(iemOp_setne_Eb)
4302{
4303 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4304 IEMOP_HLP_MIN_386();
4305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4306
4307 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4308 * any way. AMD says it's "unused", whatever that means. We're
4309 * ignoring for now. */
4310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4311 {
4312 /* register target */
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314 IEM_MC_BEGIN(0, 0);
4315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 /* memory target */
4326 IEM_MC_BEGIN(0, 1);
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4332 } IEM_MC_ELSE() {
4333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4334 } IEM_MC_ENDIF();
4335 IEM_MC_ADVANCE_RIP();
4336 IEM_MC_END();
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0x0f 0x96. */
4343FNIEMOP_DEF(iemOp_setbe_Eb)
4344{
4345 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4346 IEMOP_HLP_MIN_386();
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348
4349 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4350 * any way. AMD says it's "unused", whatever that means. We're
4351 * ignoring for now. */
4352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4353 {
4354 /* register target */
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356 IEM_MC_BEGIN(0, 0);
4357 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4358 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4359 } IEM_MC_ELSE() {
4360 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4361 } IEM_MC_ENDIF();
4362 IEM_MC_ADVANCE_RIP();
4363 IEM_MC_END();
4364 }
4365 else
4366 {
4367 /* memory target */
4368 IEM_MC_BEGIN(0, 1);
4369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4373 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4374 } IEM_MC_ELSE() {
4375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_ADVANCE_RIP();
4378 IEM_MC_END();
4379 }
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/** Opcode 0x0f 0x97. */
4385FNIEMOP_DEF(iemOp_setnbe_Eb)
4386{
4387 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4388 IEMOP_HLP_MIN_386();
4389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4390
4391 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4392 * any way. AMD says it's "unused", whatever that means. We're
4393 * ignoring for now. */
4394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4395 {
4396 /* register target */
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_BEGIN(0, 0);
4399 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4400 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4401 } IEM_MC_ELSE() {
4402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4403 } IEM_MC_ENDIF();
4404 IEM_MC_ADVANCE_RIP();
4405 IEM_MC_END();
4406 }
4407 else
4408 {
4409 /* memory target */
4410 IEM_MC_BEGIN(0, 1);
4411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4414 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4416 } IEM_MC_ELSE() {
4417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4418 } IEM_MC_ENDIF();
4419 IEM_MC_ADVANCE_RIP();
4420 IEM_MC_END();
4421 }
4422 return VINF_SUCCESS;
4423}
4424
4425
4426/** Opcode 0x0f 0x98. */
4427FNIEMOP_DEF(iemOp_sets_Eb)
4428{
4429 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4430 IEMOP_HLP_MIN_386();
4431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4432
4433 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4434 * any way. AMD says it's "unused", whatever that means. We're
4435 * ignoring for now. */
4436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4437 {
4438 /* register target */
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4440 IEM_MC_BEGIN(0, 0);
4441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4442 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4443 } IEM_MC_ELSE() {
4444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4445 } IEM_MC_ENDIF();
4446 IEM_MC_ADVANCE_RIP();
4447 IEM_MC_END();
4448 }
4449 else
4450 {
4451 /* memory target */
4452 IEM_MC_BEGIN(0, 1);
4453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4458 } IEM_MC_ELSE() {
4459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4460 } IEM_MC_ENDIF();
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x99. */
4469FNIEMOP_DEF(iemOp_setns_Eb)
4470{
4471 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4472 IEMOP_HLP_MIN_386();
4473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4474
4475 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4476 * any way. AMD says it's "unused", whatever that means. We're
4477 * ignoring for now. */
4478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4479 {
4480 /* register target */
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482 IEM_MC_BEGIN(0, 0);
4483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4484 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4485 } IEM_MC_ELSE() {
4486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4487 } IEM_MC_ENDIF();
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 }
4491 else
4492 {
4493 /* memory target */
4494 IEM_MC_BEGIN(0, 1);
4495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4500 } IEM_MC_ELSE() {
4501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4502 } IEM_MC_ENDIF();
4503 IEM_MC_ADVANCE_RIP();
4504 IEM_MC_END();
4505 }
4506 return VINF_SUCCESS;
4507}
4508
4509
4510/** Opcode 0x0f 0x9a. */
4511FNIEMOP_DEF(iemOp_setp_Eb)
4512{
4513 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4514 IEMOP_HLP_MIN_386();
4515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4516
4517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4518 * any way. AMD says it's "unused", whatever that means. We're
4519 * ignoring for now. */
4520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4521 {
4522 /* register target */
4523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4524 IEM_MC_BEGIN(0, 0);
4525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4526 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4527 } IEM_MC_ELSE() {
4528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4529 } IEM_MC_ENDIF();
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 else
4534 {
4535 /* memory target */
4536 IEM_MC_BEGIN(0, 1);
4537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4542 } IEM_MC_ELSE() {
4543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4544 } IEM_MC_ENDIF();
4545 IEM_MC_ADVANCE_RIP();
4546 IEM_MC_END();
4547 }
4548 return VINF_SUCCESS;
4549}
4550
4551
4552/** Opcode 0x0f 0x9b. */
4553FNIEMOP_DEF(iemOp_setnp_Eb)
4554{
4555 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4556 IEMOP_HLP_MIN_386();
4557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4558
4559 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4560 * any way. AMD says it's "unused", whatever that means. We're
4561 * ignoring for now. */
4562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4563 {
4564 /* register target */
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566 IEM_MC_BEGIN(0, 0);
4567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4568 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4569 } IEM_MC_ELSE() {
4570 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4571 } IEM_MC_ENDIF();
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 }
4575 else
4576 {
4577 /* memory target */
4578 IEM_MC_BEGIN(0, 1);
4579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4583 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4584 } IEM_MC_ELSE() {
4585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4586 } IEM_MC_ENDIF();
4587 IEM_MC_ADVANCE_RIP();
4588 IEM_MC_END();
4589 }
4590 return VINF_SUCCESS;
4591}
4592
4593
4594/** Opcode 0x0f 0x9c. */
4595FNIEMOP_DEF(iemOp_setl_Eb)
4596{
4597 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4598 IEMOP_HLP_MIN_386();
4599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4600
4601 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4602 * any way. AMD says it's "unused", whatever that means. We're
4603 * ignoring for now. */
4604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4605 {
4606 /* register target */
4607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4608 IEM_MC_BEGIN(0, 0);
4609 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4610 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4611 } IEM_MC_ELSE() {
4612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4613 } IEM_MC_ENDIF();
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 }
4617 else
4618 {
4619 /* memory target */
4620 IEM_MC_BEGIN(0, 1);
4621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4625 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4626 } IEM_MC_ELSE() {
4627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4628 } IEM_MC_ENDIF();
4629 IEM_MC_ADVANCE_RIP();
4630 IEM_MC_END();
4631 }
4632 return VINF_SUCCESS;
4633}
4634
4635
4636/** Opcode 0x0f 0x9d. */
4637FNIEMOP_DEF(iemOp_setnl_Eb)
4638{
4639 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4640 IEMOP_HLP_MIN_386();
4641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4642
4643 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4644 * any way. AMD says it's "unused", whatever that means. We're
4645 * ignoring for now. */
4646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4647 {
4648 /* register target */
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_BEGIN(0, 0);
4651 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4652 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4653 } IEM_MC_ELSE() {
4654 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4655 } IEM_MC_ENDIF();
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 else
4660 {
4661 /* memory target */
4662 IEM_MC_BEGIN(0, 1);
4663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4666 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4667 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4668 } IEM_MC_ELSE() {
4669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4670 } IEM_MC_ENDIF();
4671 IEM_MC_ADVANCE_RIP();
4672 IEM_MC_END();
4673 }
4674 return VINF_SUCCESS;
4675}
4676
4677
4678/** Opcode 0x0f 0x9e. */
4679FNIEMOP_DEF(iemOp_setle_Eb)
4680{
4681 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4682 IEMOP_HLP_MIN_386();
4683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4684
4685 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4686 * any way. AMD says it's "unused", whatever that means. We're
4687 * ignoring for now. */
4688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4689 {
4690 /* register target */
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692 IEM_MC_BEGIN(0, 0);
4693 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4694 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4695 } IEM_MC_ELSE() {
4696 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4697 } IEM_MC_ENDIF();
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 /* memory target */
4704 IEM_MC_BEGIN(0, 1);
4705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4708 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4709 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4710 } IEM_MC_ELSE() {
4711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4712 } IEM_MC_ENDIF();
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 }
4716 return VINF_SUCCESS;
4717}
4718
4719
4720/** Opcode 0x0f 0x9f. */
4721FNIEMOP_DEF(iemOp_setnle_Eb)
4722{
4723 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4724 IEMOP_HLP_MIN_386();
4725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4726
4727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4728 * any way. AMD says it's "unused", whatever that means. We're
4729 * ignoring for now. */
4730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4731 {
4732 /* register target */
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4736 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4737 } IEM_MC_ELSE() {
4738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4739 } IEM_MC_ENDIF();
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 else
4744 {
4745 /* memory target */
4746 IEM_MC_BEGIN(0, 1);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4751 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4752 } IEM_MC_ELSE() {
4753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4754 } IEM_MC_ENDIF();
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 }
4758 return VINF_SUCCESS;
4759}
4760
4761
4762/**
4763 * Common 'push segment-register' helper.
4764 */
4765FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4766{
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768 if (iReg < X86_SREG_FS)
4769 IEMOP_HLP_NO_64BIT();
4770 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4771
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0, 1);
4776 IEM_MC_LOCAL(uint16_t, u16Value);
4777 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4778 IEM_MC_PUSH_U16(u16Value);
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 break;
4782
4783 case IEMMODE_32BIT:
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(uint32_t, u32Value);
4786 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4787 IEM_MC_PUSH_U32_SREG(u32Value);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 break;
4791
4792 case IEMMODE_64BIT:
4793 IEM_MC_BEGIN(0, 1);
4794 IEM_MC_LOCAL(uint64_t, u64Value);
4795 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4796 IEM_MC_PUSH_U64(u64Value);
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 break;
4800 }
4801
4802 return VINF_SUCCESS;
4803}
4804
4805
4806/** Opcode 0x0f 0xa0. */
4807FNIEMOP_DEF(iemOp_push_fs)
4808{
4809 IEMOP_MNEMONIC(push_fs, "push fs");
4810 IEMOP_HLP_MIN_386();
4811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4812 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4813}
4814
4815
4816/** Opcode 0x0f 0xa1. */
4817FNIEMOP_DEF(iemOp_pop_fs)
4818{
4819 IEMOP_MNEMONIC(pop_fs, "pop fs");
4820 IEMOP_HLP_MIN_386();
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4823}
4824
4825
4826/** Opcode 0x0f 0xa2. */
4827FNIEMOP_DEF(iemOp_cpuid)
4828{
4829 IEMOP_MNEMONIC(cpuid, "cpuid");
4830 IEMOP_HLP_MIN_486(); /* not all 486es. */
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4833}
4834
4835
4836/**
4837 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4838 * iemOp_bts_Ev_Gv.
4839 */
4840FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4841{
4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4843 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4844
4845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4846 {
4847 /* register destination. */
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849 switch (pVCpu->iem.s.enmEffOpSize)
4850 {
4851 case IEMMODE_16BIT:
4852 IEM_MC_BEGIN(3, 0);
4853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4854 IEM_MC_ARG(uint16_t, u16Src, 1);
4855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4856
4857 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4858 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4859 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4860 IEM_MC_REF_EFLAGS(pEFlags);
4861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4862
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 return VINF_SUCCESS;
4866
4867 case IEMMODE_32BIT:
4868 IEM_MC_BEGIN(3, 0);
4869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4870 IEM_MC_ARG(uint32_t, u32Src, 1);
4871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4872
4873 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4874 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4875 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4876 IEM_MC_REF_EFLAGS(pEFlags);
4877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4878
4879 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 case IEMMODE_64BIT:
4885 IEM_MC_BEGIN(3, 0);
4886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4887 IEM_MC_ARG(uint64_t, u64Src, 1);
4888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4889
4890 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4891 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4892 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4893 IEM_MC_REF_EFLAGS(pEFlags);
4894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4895
4896 IEM_MC_ADVANCE_RIP();
4897 IEM_MC_END();
4898 return VINF_SUCCESS;
4899
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4901 }
4902 }
4903 else
4904 {
4905 /* memory destination. */
4906
4907 uint32_t fAccess;
4908 if (pImpl->pfnLockedU16)
4909 fAccess = IEM_ACCESS_DATA_RW;
4910 else /* BT */
4911 fAccess = IEM_ACCESS_DATA_R;
4912
4913 /** @todo test negative bit offsets! */
4914 switch (pVCpu->iem.s.enmEffOpSize)
4915 {
4916 case IEMMODE_16BIT:
4917 IEM_MC_BEGIN(3, 2);
4918 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4919 IEM_MC_ARG(uint16_t, u16Src, 1);
4920 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4922 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4923
4924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4925 if (pImpl->pfnLockedU16)
4926 IEMOP_HLP_DONE_DECODING();
4927 else
4928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4929 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4930 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4931 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4932 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4933 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4934 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4935 IEM_MC_FETCH_EFLAGS(EFlags);
4936
4937 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4940 else
4941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4942 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4943
4944 IEM_MC_COMMIT_EFLAGS(EFlags);
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 return VINF_SUCCESS;
4948
4949 case IEMMODE_32BIT:
4950 IEM_MC_BEGIN(3, 2);
4951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4952 IEM_MC_ARG(uint32_t, u32Src, 1);
4953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4955 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4956
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4958 if (pImpl->pfnLockedU16)
4959 IEMOP_HLP_DONE_DECODING();
4960 else
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4963 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4964 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4965 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4966 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4967 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4968 IEM_MC_FETCH_EFLAGS(EFlags);
4969
4970 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4971 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4973 else
4974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4975 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4976
4977 IEM_MC_COMMIT_EFLAGS(EFlags);
4978 IEM_MC_ADVANCE_RIP();
4979 IEM_MC_END();
4980 return VINF_SUCCESS;
4981
4982 case IEMMODE_64BIT:
4983 IEM_MC_BEGIN(3, 2);
4984 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4985 IEM_MC_ARG(uint64_t, u64Src, 1);
4986 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4988 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4989
4990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4991 if (pImpl->pfnLockedU16)
4992 IEMOP_HLP_DONE_DECODING();
4993 else
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4996 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4997 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4998 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4999 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5000 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5001 IEM_MC_FETCH_EFLAGS(EFlags);
5002
5003 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5004 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5006 else
5007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5009
5010 IEM_MC_COMMIT_EFLAGS(EFlags);
5011 IEM_MC_ADVANCE_RIP();
5012 IEM_MC_END();
5013 return VINF_SUCCESS;
5014
5015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5016 }
5017 }
5018}
5019
5020
5021/** Opcode 0x0f 0xa3. */
5022FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5023{
5024 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5025 IEMOP_HLP_MIN_386();
5026 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5027}
5028
5029
5030/**
5031 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5032 */
5033FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5034{
5035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5036 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5037
5038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5039 {
5040 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5042
5043 switch (pVCpu->iem.s.enmEffOpSize)
5044 {
5045 case IEMMODE_16BIT:
5046 IEM_MC_BEGIN(4, 0);
5047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5048 IEM_MC_ARG(uint16_t, u16Src, 1);
5049 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5050 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5051
5052 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5053 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5054 IEM_MC_REF_EFLAGS(pEFlags);
5055 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5056
5057 IEM_MC_ADVANCE_RIP();
5058 IEM_MC_END();
5059 return VINF_SUCCESS;
5060
5061 case IEMMODE_32BIT:
5062 IEM_MC_BEGIN(4, 0);
5063 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5064 IEM_MC_ARG(uint32_t, u32Src, 1);
5065 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5066 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5067
5068 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5069 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5070 IEM_MC_REF_EFLAGS(pEFlags);
5071 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5072
5073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5074 IEM_MC_ADVANCE_RIP();
5075 IEM_MC_END();
5076 return VINF_SUCCESS;
5077
5078 case IEMMODE_64BIT:
5079 IEM_MC_BEGIN(4, 0);
5080 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5081 IEM_MC_ARG(uint64_t, u64Src, 1);
5082 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5083 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5084
5085 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5086 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5087 IEM_MC_REF_EFLAGS(pEFlags);
5088 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5089
5090 IEM_MC_ADVANCE_RIP();
5091 IEM_MC_END();
5092 return VINF_SUCCESS;
5093
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5095 }
5096 }
5097 else
5098 {
5099 switch (pVCpu->iem.s.enmEffOpSize)
5100 {
5101 case IEMMODE_16BIT:
5102 IEM_MC_BEGIN(4, 2);
5103 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5104 IEM_MC_ARG(uint16_t, u16Src, 1);
5105 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5106 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5108
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5110 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5111 IEM_MC_ASSIGN(cShiftArg, cShift);
5112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5113 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5114 IEM_MC_FETCH_EFLAGS(EFlags);
5115 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5116 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5117
5118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5119 IEM_MC_COMMIT_EFLAGS(EFlags);
5120 IEM_MC_ADVANCE_RIP();
5121 IEM_MC_END();
5122 return VINF_SUCCESS;
5123
5124 case IEMMODE_32BIT:
5125 IEM_MC_BEGIN(4, 2);
5126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5127 IEM_MC_ARG(uint32_t, u32Src, 1);
5128 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5129 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5131
5132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5133 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5134 IEM_MC_ASSIGN(cShiftArg, cShift);
5135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5136 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5137 IEM_MC_FETCH_EFLAGS(EFlags);
5138 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5139 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5140
5141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5142 IEM_MC_COMMIT_EFLAGS(EFlags);
5143 IEM_MC_ADVANCE_RIP();
5144 IEM_MC_END();
5145 return VINF_SUCCESS;
5146
5147 case IEMMODE_64BIT:
5148 IEM_MC_BEGIN(4, 2);
5149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5150 IEM_MC_ARG(uint64_t, u64Src, 1);
5151 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5154
5155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5156 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5157 IEM_MC_ASSIGN(cShiftArg, cShift);
5158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5159 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5160 IEM_MC_FETCH_EFLAGS(EFlags);
5161 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5162 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5163
5164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5165 IEM_MC_COMMIT_EFLAGS(EFlags);
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 return VINF_SUCCESS;
5169
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172 }
5173}
5174
5175
5176/**
5177 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5178 */
5179FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5180{
5181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5182 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5183
5184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5185 {
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187
5188 switch (pVCpu->iem.s.enmEffOpSize)
5189 {
5190 case IEMMODE_16BIT:
5191 IEM_MC_BEGIN(4, 0);
5192 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5193 IEM_MC_ARG(uint16_t, u16Src, 1);
5194 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5196
5197 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5198 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5199 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5200 IEM_MC_REF_EFLAGS(pEFlags);
5201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5202
5203 IEM_MC_ADVANCE_RIP();
5204 IEM_MC_END();
5205 return VINF_SUCCESS;
5206
5207 case IEMMODE_32BIT:
5208 IEM_MC_BEGIN(4, 0);
5209 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5210 IEM_MC_ARG(uint32_t, u32Src, 1);
5211 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5212 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5213
5214 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5215 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5216 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5217 IEM_MC_REF_EFLAGS(pEFlags);
5218 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5219
5220 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5221 IEM_MC_ADVANCE_RIP();
5222 IEM_MC_END();
5223 return VINF_SUCCESS;
5224
5225 case IEMMODE_64BIT:
5226 IEM_MC_BEGIN(4, 0);
5227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5228 IEM_MC_ARG(uint64_t, u64Src, 1);
5229 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5230 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5231
5232 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5233 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5234 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5235 IEM_MC_REF_EFLAGS(pEFlags);
5236 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5237
5238 IEM_MC_ADVANCE_RIP();
5239 IEM_MC_END();
5240 return VINF_SUCCESS;
5241
5242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5243 }
5244 }
5245 else
5246 {
5247 switch (pVCpu->iem.s.enmEffOpSize)
5248 {
5249 case IEMMODE_16BIT:
5250 IEM_MC_BEGIN(4, 2);
5251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5252 IEM_MC_ARG(uint16_t, u16Src, 1);
5253 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5256
5257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5260 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5261 IEM_MC_FETCH_EFLAGS(EFlags);
5262 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5264
5265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5266 IEM_MC_COMMIT_EFLAGS(EFlags);
5267 IEM_MC_ADVANCE_RIP();
5268 IEM_MC_END();
5269 return VINF_SUCCESS;
5270
5271 case IEMMODE_32BIT:
5272 IEM_MC_BEGIN(4, 2);
5273 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5274 IEM_MC_ARG(uint32_t, u32Src, 1);
5275 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5278
5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5281 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5282 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5283 IEM_MC_FETCH_EFLAGS(EFlags);
5284 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5286
5287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5288 IEM_MC_COMMIT_EFLAGS(EFlags);
5289 IEM_MC_ADVANCE_RIP();
5290 IEM_MC_END();
5291 return VINF_SUCCESS;
5292
5293 case IEMMODE_64BIT:
5294 IEM_MC_BEGIN(4, 2);
5295 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5296 IEM_MC_ARG(uint64_t, u64Src, 1);
5297 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5304 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5305 IEM_MC_FETCH_EFLAGS(EFlags);
5306 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5307 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5308
5309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5310 IEM_MC_COMMIT_EFLAGS(EFlags);
5311 IEM_MC_ADVANCE_RIP();
5312 IEM_MC_END();
5313 return VINF_SUCCESS;
5314
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 }
5318}
5319
5320
5321
5322/** Opcode 0x0f 0xa4. */
5323FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5324{
5325 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5326 IEMOP_HLP_MIN_386();
5327 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5328}
5329
5330
5331/** Opcode 0x0f 0xa5. */
5332FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5333{
5334 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5335 IEMOP_HLP_MIN_386();
5336 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5337}
5338
5339
5340/** Opcode 0x0f 0xa8. */
5341FNIEMOP_DEF(iemOp_push_gs)
5342{
5343 IEMOP_MNEMONIC(push_gs, "push gs");
5344 IEMOP_HLP_MIN_386();
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5347}
5348
5349
5350/** Opcode 0x0f 0xa9. */
5351FNIEMOP_DEF(iemOp_pop_gs)
5352{
5353 IEMOP_MNEMONIC(pop_gs, "pop gs");
5354 IEMOP_HLP_MIN_386();
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5357}
5358
5359
5360/** Opcode 0x0f 0xaa. */
5361FNIEMOP_STUB(iemOp_rsm);
5362//IEMOP_HLP_MIN_386();
5363
5364
5365/** Opcode 0x0f 0xab. */
5366FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5367{
5368 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5369 IEMOP_HLP_MIN_386();
5370 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5371}
5372
5373
5374/** Opcode 0x0f 0xac. */
5375FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5376{
5377 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5378 IEMOP_HLP_MIN_386();
5379 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5380}
5381
5382
5383/** Opcode 0x0f 0xad. */
5384FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5385{
5386 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5387 IEMOP_HLP_MIN_386();
5388 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5389}
5390
5391
5392/** Opcode 0x0f 0xae mem/0. */
5393FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5394{
5395 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5396 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5397 return IEMOP_RAISE_INVALID_OPCODE();
5398
5399 IEM_MC_BEGIN(3, 1);
5400 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5401 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5402 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5406 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5407 IEM_MC_END();
5408 return VINF_SUCCESS;
5409}
5410
5411
5412/** Opcode 0x0f 0xae mem/1. */
5413FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5414{
5415 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5416 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5417 return IEMOP_RAISE_INVALID_OPCODE();
5418
5419 IEM_MC_BEGIN(3, 1);
5420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5421 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5422 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5426 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5427 IEM_MC_END();
5428 return VINF_SUCCESS;
5429}
5430
5431
5432/** Opcode 0x0f 0xae mem/2. */
5433FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5434
5435/** Opcode 0x0f 0xae mem/3. */
5436FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5437
5438/** Opcode 0x0f 0xae mem/4. */
5439FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5440
5441/** Opcode 0x0f 0xae mem/5. */
5442FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5443
5444/** Opcode 0x0f 0xae mem/6. */
5445FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5446
5447/** Opcode 0x0f 0xae mem/7. */
5448FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5449
5450
5451/** Opcode 0x0f 0xae 11b/5. */
5452FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5453{
5454 RT_NOREF_PV(bRm);
5455 IEMOP_MNEMONIC(lfence, "lfence");
5456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5457 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5458 return IEMOP_RAISE_INVALID_OPCODE();
5459
5460 IEM_MC_BEGIN(0, 0);
5461 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5462 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5463 else
5464 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5465 IEM_MC_ADVANCE_RIP();
5466 IEM_MC_END();
5467 return VINF_SUCCESS;
5468}
5469
5470
5471/** Opcode 0x0f 0xae 11b/6. */
5472FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5473{
5474 RT_NOREF_PV(bRm);
5475 IEMOP_MNEMONIC(mfence, "mfence");
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5478 return IEMOP_RAISE_INVALID_OPCODE();
5479
5480 IEM_MC_BEGIN(0, 0);
5481 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5482 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5483 else
5484 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487 return VINF_SUCCESS;
5488}
5489
5490
5491/** Opcode 0x0f 0xae 11b/7. */
5492FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5493{
5494 RT_NOREF_PV(bRm);
5495 IEMOP_MNEMONIC(sfence, "sfence");
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5498 return IEMOP_RAISE_INVALID_OPCODE();
5499
5500 IEM_MC_BEGIN(0, 0);
5501 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5502 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5503 else
5504 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508}
5509
5510
5511/** Opcode 0xf3 0x0f 0xae 11b/0. */
5512FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5513
5514/** Opcode 0xf3 0x0f 0xae 11b/1. */
5515FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5516
5517/** Opcode 0xf3 0x0f 0xae 11b/2. */
5518FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5519
5520/** Opcode 0xf3 0x0f 0xae 11b/3. */
5521FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5522
5523
5524/** Opcode 0x0f 0xae. */
5525FNIEMOP_DEF(iemOp_Grp15)
5526{
5527 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5529 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5530 {
5531 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5532 {
5533 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5534 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5535 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5536 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5537 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5538 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5539 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5540 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5542 }
5543 }
5544 else
5545 {
5546 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5547 {
5548 case 0:
5549 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5550 {
5551 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5552 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5553 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5554 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5555 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5556 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5557 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5558 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5560 }
5561 break;
5562
5563 case IEM_OP_PRF_REPZ:
5564 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5565 {
5566 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5567 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5568 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5569 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5570 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5571 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5572 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5573 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5575 }
5576 break;
5577
5578 default:
5579 return IEMOP_RAISE_INVALID_OPCODE();
5580 }
5581 }
5582}
5583
5584
5585/** Opcode 0x0f 0xaf. */
5586FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5587{
5588 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5589 IEMOP_HLP_MIN_386();
5590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5592}
5593
5594
5595/** Opcode 0x0f 0xb0. */
5596FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5597{
5598 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5599 IEMOP_HLP_MIN_486();
5600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5601
5602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5603 {
5604 IEMOP_HLP_DONE_DECODING();
5605 IEM_MC_BEGIN(4, 0);
5606 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5607 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5608 IEM_MC_ARG(uint8_t, u8Src, 2);
5609 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5610
5611 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5612 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5613 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5614 IEM_MC_REF_EFLAGS(pEFlags);
5615 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5616 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5617 else
5618 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5619
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 }
5623 else
5624 {
5625 IEM_MC_BEGIN(4, 3);
5626 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5627 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5628 IEM_MC_ARG(uint8_t, u8Src, 2);
5629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5631 IEM_MC_LOCAL(uint8_t, u8Al);
5632
5633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5634 IEMOP_HLP_DONE_DECODING();
5635 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5636 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5637 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5638 IEM_MC_FETCH_EFLAGS(EFlags);
5639 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5640 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5641 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5642 else
5643 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5644
5645 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5646 IEM_MC_COMMIT_EFLAGS(EFlags);
5647 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5648 IEM_MC_ADVANCE_RIP();
5649 IEM_MC_END();
5650 }
5651 return VINF_SUCCESS;
5652}
5653
5654/** Opcode 0x0f 0xb1. */
5655FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5656{
5657 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5658 IEMOP_HLP_MIN_486();
5659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5660
5661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5662 {
5663 IEMOP_HLP_DONE_DECODING();
5664 switch (pVCpu->iem.s.enmEffOpSize)
5665 {
5666 case IEMMODE_16BIT:
5667 IEM_MC_BEGIN(4, 0);
5668 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5669 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5670 IEM_MC_ARG(uint16_t, u16Src, 2);
5671 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5672
5673 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5674 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5675 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5676 IEM_MC_REF_EFLAGS(pEFlags);
5677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5678 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5679 else
5680 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5681
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 return VINF_SUCCESS;
5685
5686 case IEMMODE_32BIT:
5687 IEM_MC_BEGIN(4, 0);
5688 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5689 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5690 IEM_MC_ARG(uint32_t, u32Src, 2);
5691 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5692
5693 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5694 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5695 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5696 IEM_MC_REF_EFLAGS(pEFlags);
5697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5698 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5699 else
5700 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5701
5702 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5703 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 return VINF_SUCCESS;
5707
5708 case IEMMODE_64BIT:
5709 IEM_MC_BEGIN(4, 0);
5710 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5711 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5712#ifdef RT_ARCH_X86
5713 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5714#else
5715 IEM_MC_ARG(uint64_t, u64Src, 2);
5716#endif
5717 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5718
5719 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5720 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5721 IEM_MC_REF_EFLAGS(pEFlags);
5722#ifdef RT_ARCH_X86
5723 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5724 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5726 else
5727 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5728#else
5729 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5731 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5732 else
5733 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5734#endif
5735
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 return VINF_SUCCESS;
5739
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 }
5743 else
5744 {
5745 switch (pVCpu->iem.s.enmEffOpSize)
5746 {
5747 case IEMMODE_16BIT:
5748 IEM_MC_BEGIN(4, 3);
5749 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5750 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5751 IEM_MC_ARG(uint16_t, u16Src, 2);
5752 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5754 IEM_MC_LOCAL(uint16_t, u16Ax);
5755
5756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5757 IEMOP_HLP_DONE_DECODING();
5758 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5759 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5760 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5761 IEM_MC_FETCH_EFLAGS(EFlags);
5762 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5763 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5764 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5765 else
5766 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5767
5768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5769 IEM_MC_COMMIT_EFLAGS(EFlags);
5770 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5771 IEM_MC_ADVANCE_RIP();
5772 IEM_MC_END();
5773 return VINF_SUCCESS;
5774
5775 case IEMMODE_32BIT:
5776 IEM_MC_BEGIN(4, 3);
5777 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5778 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5779 IEM_MC_ARG(uint32_t, u32Src, 2);
5780 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5782 IEM_MC_LOCAL(uint32_t, u32Eax);
5783
5784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5785 IEMOP_HLP_DONE_DECODING();
5786 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5787 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5788 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5789 IEM_MC_FETCH_EFLAGS(EFlags);
5790 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5791 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5792 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5793 else
5794 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5795
5796 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5797 IEM_MC_COMMIT_EFLAGS(EFlags);
5798 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5799 IEM_MC_ADVANCE_RIP();
5800 IEM_MC_END();
5801 return VINF_SUCCESS;
5802
5803 case IEMMODE_64BIT:
5804 IEM_MC_BEGIN(4, 3);
5805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5806 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5807#ifdef RT_ARCH_X86
5808 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5809#else
5810 IEM_MC_ARG(uint64_t, u64Src, 2);
5811#endif
5812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5814 IEM_MC_LOCAL(uint64_t, u64Rax);
5815
5816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5817 IEMOP_HLP_DONE_DECODING();
5818 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5819 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5820 IEM_MC_FETCH_EFLAGS(EFlags);
5821 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5822#ifdef RT_ARCH_X86
5823 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5824 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5825 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5826 else
5827 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5828#else
5829 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5830 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5831 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5832 else
5833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5834#endif
5835
5836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5837 IEM_MC_COMMIT_EFLAGS(EFlags);
5838 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 return VINF_SUCCESS;
5842
5843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5844 }
5845 }
5846}
5847
5848
5849FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5850{
5851 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5852 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5853
5854 switch (pVCpu->iem.s.enmEffOpSize)
5855 {
5856 case IEMMODE_16BIT:
5857 IEM_MC_BEGIN(5, 1);
5858 IEM_MC_ARG(uint16_t, uSel, 0);
5859 IEM_MC_ARG(uint16_t, offSeg, 1);
5860 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5861 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5862 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5863 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5867 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5868 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5869 IEM_MC_END();
5870 return VINF_SUCCESS;
5871
5872 case IEMMODE_32BIT:
5873 IEM_MC_BEGIN(5, 1);
5874 IEM_MC_ARG(uint16_t, uSel, 0);
5875 IEM_MC_ARG(uint32_t, offSeg, 1);
5876 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5877 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5878 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5879 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5882 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5883 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5884 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5885 IEM_MC_END();
5886 return VINF_SUCCESS;
5887
5888 case IEMMODE_64BIT:
5889 IEM_MC_BEGIN(5, 1);
5890 IEM_MC_ARG(uint16_t, uSel, 0);
5891 IEM_MC_ARG(uint64_t, offSeg, 1);
5892 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5893 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5894 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5895 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5898 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5899 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5900 else
5901 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5902 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5903 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5908 }
5909}
5910
5911
5912/** Opcode 0x0f 0xb2. */
5913FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5914{
5915 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5916 IEMOP_HLP_MIN_386();
5917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5919 return IEMOP_RAISE_INVALID_OPCODE();
5920 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5921}
5922
5923
5924/** Opcode 0x0f 0xb3. */
5925FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5926{
5927 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5928 IEMOP_HLP_MIN_386();
5929 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5930}
5931
5932
5933/** Opcode 0x0f 0xb4. */
5934FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5935{
5936 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5937 IEMOP_HLP_MIN_386();
5938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5940 return IEMOP_RAISE_INVALID_OPCODE();
5941 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5942}
5943
5944
5945/** Opcode 0x0f 0xb5. */
5946FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5947{
5948 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5949 IEMOP_HLP_MIN_386();
5950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5952 return IEMOP_RAISE_INVALID_OPCODE();
5953 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5954}
5955
5956
5957/** Opcode 0x0f 0xb6. */
5958FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5959{
5960 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5961 IEMOP_HLP_MIN_386();
5962
5963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5964
5965 /*
5966 * If rm is denoting a register, no more instruction bytes.
5967 */
5968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5969 {
5970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5971 switch (pVCpu->iem.s.enmEffOpSize)
5972 {
5973 case IEMMODE_16BIT:
5974 IEM_MC_BEGIN(0, 1);
5975 IEM_MC_LOCAL(uint16_t, u16Value);
5976 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5977 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5978 IEM_MC_ADVANCE_RIP();
5979 IEM_MC_END();
5980 return VINF_SUCCESS;
5981
5982 case IEMMODE_32BIT:
5983 IEM_MC_BEGIN(0, 1);
5984 IEM_MC_LOCAL(uint32_t, u32Value);
5985 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5986 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5987 IEM_MC_ADVANCE_RIP();
5988 IEM_MC_END();
5989 return VINF_SUCCESS;
5990
5991 case IEMMODE_64BIT:
5992 IEM_MC_BEGIN(0, 1);
5993 IEM_MC_LOCAL(uint64_t, u64Value);
5994 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5995 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 return VINF_SUCCESS;
5999
6000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6001 }
6002 }
6003 else
6004 {
6005 /*
6006 * We're loading a register from memory.
6007 */
6008 switch (pVCpu->iem.s.enmEffOpSize)
6009 {
6010 case IEMMODE_16BIT:
6011 IEM_MC_BEGIN(0, 2);
6012 IEM_MC_LOCAL(uint16_t, u16Value);
6013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6016 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6017 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 return VINF_SUCCESS;
6021
6022 case IEMMODE_32BIT:
6023 IEM_MC_BEGIN(0, 2);
6024 IEM_MC_LOCAL(uint32_t, u32Value);
6025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6029 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 return VINF_SUCCESS;
6033
6034 case IEMMODE_64BIT:
6035 IEM_MC_BEGIN(0, 2);
6036 IEM_MC_LOCAL(uint64_t, u64Value);
6037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6041 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6042 IEM_MC_ADVANCE_RIP();
6043 IEM_MC_END();
6044 return VINF_SUCCESS;
6045
6046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6047 }
6048 }
6049}
6050
6051
6052/** Opcode 0x0f 0xb7. */
6053FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6054{
6055 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6056 IEMOP_HLP_MIN_386();
6057
6058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6059
6060 /** @todo Not entirely sure how the operand size prefix is handled here,
6061 * assuming that it will be ignored. Would be nice to have a few
6062 * test for this. */
6063 /*
6064 * If rm is denoting a register, no more instruction bytes.
6065 */
6066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6067 {
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6070 {
6071 IEM_MC_BEGIN(0, 1);
6072 IEM_MC_LOCAL(uint32_t, u32Value);
6073 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6074 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6075 IEM_MC_ADVANCE_RIP();
6076 IEM_MC_END();
6077 }
6078 else
6079 {
6080 IEM_MC_BEGIN(0, 1);
6081 IEM_MC_LOCAL(uint64_t, u64Value);
6082 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6083 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 }
6087 }
6088 else
6089 {
6090 /*
6091 * We're loading a register from memory.
6092 */
6093 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6094 {
6095 IEM_MC_BEGIN(0, 2);
6096 IEM_MC_LOCAL(uint32_t, u32Value);
6097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6100 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6101 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6102 IEM_MC_ADVANCE_RIP();
6103 IEM_MC_END();
6104 }
6105 else
6106 {
6107 IEM_MC_BEGIN(0, 2);
6108 IEM_MC_LOCAL(uint64_t, u64Value);
6109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6112 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6113 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 }
6117 }
6118 return VINF_SUCCESS;
6119}
6120
6121
6122/** Opcode 0x0f 0xb8. */
6123FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
6124
6125
6126/** Opcode 0x0f 0xb9. */
6127FNIEMOP_DEF(iemOp_Grp10)
6128{
6129 Log(("iemOp_Grp10 -> #UD\n"));
6130 return IEMOP_RAISE_INVALID_OPCODE();
6131}
6132
6133
6134/** Opcode 0x0f 0xba. */
6135FNIEMOP_DEF(iemOp_Grp8)
6136{
6137 IEMOP_HLP_MIN_386();
6138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6139 PCIEMOPBINSIZES pImpl;
6140 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6141 {
6142 case 0: case 1: case 2: case 3:
6143 return IEMOP_RAISE_INVALID_OPCODE();
6144 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6145 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6146 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6147 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6151
6152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6153 {
6154 /* register destination. */
6155 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6157
6158 switch (pVCpu->iem.s.enmEffOpSize)
6159 {
6160 case IEMMODE_16BIT:
6161 IEM_MC_BEGIN(3, 0);
6162 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6163 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6165
6166 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6167 IEM_MC_REF_EFLAGS(pEFlags);
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6169
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_32BIT:
6175 IEM_MC_BEGIN(3, 0);
6176 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6177 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6179
6180 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6181 IEM_MC_REF_EFLAGS(pEFlags);
6182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6183
6184 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6185 IEM_MC_ADVANCE_RIP();
6186 IEM_MC_END();
6187 return VINF_SUCCESS;
6188
6189 case IEMMODE_64BIT:
6190 IEM_MC_BEGIN(3, 0);
6191 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6192 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6194
6195 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6196 IEM_MC_REF_EFLAGS(pEFlags);
6197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6198
6199 IEM_MC_ADVANCE_RIP();
6200 IEM_MC_END();
6201 return VINF_SUCCESS;
6202
6203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6204 }
6205 }
6206 else
6207 {
6208 /* memory destination. */
6209
6210 uint32_t fAccess;
6211 if (pImpl->pfnLockedU16)
6212 fAccess = IEM_ACCESS_DATA_RW;
6213 else /* BT */
6214 fAccess = IEM_ACCESS_DATA_R;
6215
6216 /** @todo test negative bit offsets! */
6217 switch (pVCpu->iem.s.enmEffOpSize)
6218 {
6219 case IEMMODE_16BIT:
6220 IEM_MC_BEGIN(3, 1);
6221 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6222 IEM_MC_ARG(uint16_t, u16Src, 1);
6223 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6225
6226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6227 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6228 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6229 if (pImpl->pfnLockedU16)
6230 IEMOP_HLP_DONE_DECODING();
6231 else
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233 IEM_MC_FETCH_EFLAGS(EFlags);
6234 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6235 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6237 else
6238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6240
6241 IEM_MC_COMMIT_EFLAGS(EFlags);
6242 IEM_MC_ADVANCE_RIP();
6243 IEM_MC_END();
6244 return VINF_SUCCESS;
6245
6246 case IEMMODE_32BIT:
6247 IEM_MC_BEGIN(3, 1);
6248 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6249 IEM_MC_ARG(uint32_t, u32Src, 1);
6250 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6252
6253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6254 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6255 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6256 if (pImpl->pfnLockedU16)
6257 IEMOP_HLP_DONE_DECODING();
6258 else
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6260 IEM_MC_FETCH_EFLAGS(EFlags);
6261 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6262 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6264 else
6265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6266 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6267
6268 IEM_MC_COMMIT_EFLAGS(EFlags);
6269 IEM_MC_ADVANCE_RIP();
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272
6273 case IEMMODE_64BIT:
6274 IEM_MC_BEGIN(3, 1);
6275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6276 IEM_MC_ARG(uint64_t, u64Src, 1);
6277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6281 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6282 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6283 if (pImpl->pfnLockedU16)
6284 IEMOP_HLP_DONE_DECODING();
6285 else
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6287 IEM_MC_FETCH_EFLAGS(EFlags);
6288 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6289 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6291 else
6292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6294
6295 IEM_MC_COMMIT_EFLAGS(EFlags);
6296 IEM_MC_ADVANCE_RIP();
6297 IEM_MC_END();
6298 return VINF_SUCCESS;
6299
6300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6301 }
6302 }
6303
6304}
6305
6306
6307/** Opcode 0x0f 0xbb. */
6308FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6309{
6310 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6311 IEMOP_HLP_MIN_386();
6312 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6313}
6314
6315
6316/** Opcode 0x0f 0xbc. */
6317FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6318{
6319 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6320 IEMOP_HLP_MIN_386();
6321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6323}
6324
6325
6326/** Opcode 0x0f 0xbd. */
6327FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6328{
6329 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6330 IEMOP_HLP_MIN_386();
6331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6333}
6334
6335
6336/** Opcode 0x0f 0xbe. */
6337FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6338{
6339 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6340 IEMOP_HLP_MIN_386();
6341
6342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6343
6344 /*
6345 * If rm is denoting a register, no more instruction bytes.
6346 */
6347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6348 {
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 switch (pVCpu->iem.s.enmEffOpSize)
6351 {
6352 case IEMMODE_16BIT:
6353 IEM_MC_BEGIN(0, 1);
6354 IEM_MC_LOCAL(uint16_t, u16Value);
6355 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6356 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6357 IEM_MC_ADVANCE_RIP();
6358 IEM_MC_END();
6359 return VINF_SUCCESS;
6360
6361 case IEMMODE_32BIT:
6362 IEM_MC_BEGIN(0, 1);
6363 IEM_MC_LOCAL(uint32_t, u32Value);
6364 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6365 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6366 IEM_MC_ADVANCE_RIP();
6367 IEM_MC_END();
6368 return VINF_SUCCESS;
6369
6370 case IEMMODE_64BIT:
6371 IEM_MC_BEGIN(0, 1);
6372 IEM_MC_LOCAL(uint64_t, u64Value);
6373 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6374 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6375 IEM_MC_ADVANCE_RIP();
6376 IEM_MC_END();
6377 return VINF_SUCCESS;
6378
6379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6380 }
6381 }
6382 else
6383 {
6384 /*
6385 * We're loading a register from memory.
6386 */
6387 switch (pVCpu->iem.s.enmEffOpSize)
6388 {
6389 case IEMMODE_16BIT:
6390 IEM_MC_BEGIN(0, 2);
6391 IEM_MC_LOCAL(uint16_t, u16Value);
6392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6395 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6396 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_32BIT:
6402 IEM_MC_BEGIN(0, 2);
6403 IEM_MC_LOCAL(uint32_t, u32Value);
6404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6407 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6408 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 case IEMMODE_64BIT:
6414 IEM_MC_BEGIN(0, 2);
6415 IEM_MC_LOCAL(uint64_t, u64Value);
6416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6420 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6421 IEM_MC_ADVANCE_RIP();
6422 IEM_MC_END();
6423 return VINF_SUCCESS;
6424
6425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6426 }
6427 }
6428}
6429
6430
6431/** Opcode 0x0f 0xbf. */
6432FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6433{
6434 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6435 IEMOP_HLP_MIN_386();
6436
6437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6438
6439 /** @todo Not entirely sure how the operand size prefix is handled here,
6440 * assuming that it will be ignored. Would be nice to have a few
6441 * test for this. */
6442 /*
6443 * If rm is denoting a register, no more instruction bytes.
6444 */
6445 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6446 {
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6449 {
6450 IEM_MC_BEGIN(0, 1);
6451 IEM_MC_LOCAL(uint32_t, u32Value);
6452 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6453 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6454 IEM_MC_ADVANCE_RIP();
6455 IEM_MC_END();
6456 }
6457 else
6458 {
6459 IEM_MC_BEGIN(0, 1);
6460 IEM_MC_LOCAL(uint64_t, u64Value);
6461 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6462 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6463 IEM_MC_ADVANCE_RIP();
6464 IEM_MC_END();
6465 }
6466 }
6467 else
6468 {
6469 /*
6470 * We're loading a register from memory.
6471 */
6472 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6473 {
6474 IEM_MC_BEGIN(0, 2);
6475 IEM_MC_LOCAL(uint32_t, u32Value);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6480 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6481 IEM_MC_ADVANCE_RIP();
6482 IEM_MC_END();
6483 }
6484 else
6485 {
6486 IEM_MC_BEGIN(0, 2);
6487 IEM_MC_LOCAL(uint64_t, u64Value);
6488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6491 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6492 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6493 IEM_MC_ADVANCE_RIP();
6494 IEM_MC_END();
6495 }
6496 }
6497 return VINF_SUCCESS;
6498}
6499
6500
6501/** Opcode 0x0f 0xc0. */
6502FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6503{
6504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6505 IEMOP_HLP_MIN_486();
6506 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6507
6508 /*
6509 * If rm is denoting a register, no more instruction bytes.
6510 */
6511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6512 {
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514
6515 IEM_MC_BEGIN(3, 0);
6516 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6517 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6519
6520 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6521 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6522 IEM_MC_REF_EFLAGS(pEFlags);
6523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6524
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 }
6528 else
6529 {
6530 /*
6531 * We're accessing memory.
6532 */
6533 IEM_MC_BEGIN(3, 3);
6534 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6535 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6536 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6537 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6539
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6541 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6542 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6543 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6544 IEM_MC_FETCH_EFLAGS(EFlags);
6545 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6546 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6547 else
6548 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6549
6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6551 IEM_MC_COMMIT_EFLAGS(EFlags);
6552 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6553 IEM_MC_ADVANCE_RIP();
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556 }
6557 return VINF_SUCCESS;
6558}
6559
6560
6561/** Opcode 0x0f 0xc1. */
6562FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6563{
6564 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6565 IEMOP_HLP_MIN_486();
6566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6567
6568 /*
6569 * If rm is denoting a register, no more instruction bytes.
6570 */
6571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6572 {
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574
6575 switch (pVCpu->iem.s.enmEffOpSize)
6576 {
6577 case IEMMODE_16BIT:
6578 IEM_MC_BEGIN(3, 0);
6579 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6580 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6582
6583 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6584 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6585 IEM_MC_REF_EFLAGS(pEFlags);
6586 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6587
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 case IEMMODE_32BIT:
6593 IEM_MC_BEGIN(3, 0);
6594 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6595 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6597
6598 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6599 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6600 IEM_MC_REF_EFLAGS(pEFlags);
6601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6602
6603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6604 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6605 IEM_MC_ADVANCE_RIP();
6606 IEM_MC_END();
6607 return VINF_SUCCESS;
6608
6609 case IEMMODE_64BIT:
6610 IEM_MC_BEGIN(3, 0);
6611 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6612 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6613 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6614
6615 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6616 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6617 IEM_MC_REF_EFLAGS(pEFlags);
6618 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6619
6620 IEM_MC_ADVANCE_RIP();
6621 IEM_MC_END();
6622 return VINF_SUCCESS;
6623
6624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6625 }
6626 }
6627 else
6628 {
6629 /*
6630 * We're accessing memory.
6631 */
6632 switch (pVCpu->iem.s.enmEffOpSize)
6633 {
6634 case IEMMODE_16BIT:
6635 IEM_MC_BEGIN(3, 3);
6636 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6637 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6638 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6639 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6641
6642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6643 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6644 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6645 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6646 IEM_MC_FETCH_EFLAGS(EFlags);
6647 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6648 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6649 else
6650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6651
6652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6653 IEM_MC_COMMIT_EFLAGS(EFlags);
6654 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 return VINF_SUCCESS;
6658
6659 case IEMMODE_32BIT:
6660 IEM_MC_BEGIN(3, 3);
6661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6662 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6663 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6664 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6666
6667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6668 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6669 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6670 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6671 IEM_MC_FETCH_EFLAGS(EFlags);
6672 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6673 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6674 else
6675 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6676
6677 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6678 IEM_MC_COMMIT_EFLAGS(EFlags);
6679 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6680 IEM_MC_ADVANCE_RIP();
6681 IEM_MC_END();
6682 return VINF_SUCCESS;
6683
6684 case IEMMODE_64BIT:
6685 IEM_MC_BEGIN(3, 3);
6686 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6687 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6688 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6689 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6691
6692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6693 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6694 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6695 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6696 IEM_MC_FETCH_EFLAGS(EFlags);
6697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6699 else
6700 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6701
6702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6703 IEM_MC_COMMIT_EFLAGS(EFlags);
6704 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 return VINF_SUCCESS;
6708
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711 }
6712}
6713
6714/** Opcode 0x0f 0xc2. */
6715FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6716
6717
6718/** Opcode 0x0f 0xc3. */
6719FNIEMOP_DEF(iemOp_movnti_My_Gy)
6720{
6721 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6722
6723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6724
6725 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6726 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6727 {
6728 switch (pVCpu->iem.s.enmEffOpSize)
6729 {
6730 case IEMMODE_32BIT:
6731 IEM_MC_BEGIN(0, 2);
6732 IEM_MC_LOCAL(uint32_t, u32Value);
6733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6734
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6738 return IEMOP_RAISE_INVALID_OPCODE();
6739
6740 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6741 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6742 IEM_MC_ADVANCE_RIP();
6743 IEM_MC_END();
6744 break;
6745
6746 case IEMMODE_64BIT:
6747 IEM_MC_BEGIN(0, 2);
6748 IEM_MC_LOCAL(uint64_t, u64Value);
6749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6750
6751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6753 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6754 return IEMOP_RAISE_INVALID_OPCODE();
6755
6756 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6757 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6758 IEM_MC_ADVANCE_RIP();
6759 IEM_MC_END();
6760 break;
6761
6762 case IEMMODE_16BIT:
6763 /** @todo check this form. */
6764 return IEMOP_RAISE_INVALID_OPCODE();
6765 }
6766 }
6767 else
6768 return IEMOP_RAISE_INVALID_OPCODE();
6769 return VINF_SUCCESS;
6770}
6771
6772
6773/** Opcode 0x0f 0xc4. */
6774FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6775
6776/** Opcode 0x0f 0xc5. */
6777FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6778
6779/** Opcode 0x0f 0xc6. */
6780FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6781
6782
6783/** Opcode 0x0f 0xc7 !11/1. */
6784FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6785{
6786 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6787
6788 IEM_MC_BEGIN(4, 3);
6789 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6790 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6791 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6792 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6793 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6794 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6796
6797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6798 IEMOP_HLP_DONE_DECODING();
6799 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6800
6801 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6802 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6803 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6804
6805 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6806 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6807 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6808
6809 IEM_MC_FETCH_EFLAGS(EFlags);
6810 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6811 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6812 else
6813 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6814
6815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6816 IEM_MC_COMMIT_EFLAGS(EFlags);
6817 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6818 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6819 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6820 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6821 IEM_MC_ENDIF();
6822 IEM_MC_ADVANCE_RIP();
6823
6824 IEM_MC_END();
6825 return VINF_SUCCESS;
6826}
6827
6828
6829/** Opcode REX.W 0x0f 0xc7 !11/1. */
6830FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6831
6832/** Opcode 0x0f 0xc7 11/6. */
6833FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6834
6835/** Opcode 0x0f 0xc7 !11/6. */
6836FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6837
6838/** Opcode 0x66 0x0f 0xc7 !11/6. */
6839FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6840
6841/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6842FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6843
6844/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6845FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6846
6847
6848/** Opcode 0x0f 0xc7. */
6849FNIEMOP_DEF(iemOp_Grp9)
6850{
6851 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6853 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6854 {
6855 case 0: case 2: case 3: case 4: case 5:
6856 return IEMOP_RAISE_INVALID_OPCODE();
6857 case 1:
6858 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6859 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6860 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6861 return IEMOP_RAISE_INVALID_OPCODE();
6862 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6863 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6864 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6865 case 6:
6866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6867 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6868 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6869 {
6870 case 0:
6871 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6872 case IEM_OP_PRF_SIZE_OP:
6873 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6874 case IEM_OP_PRF_REPZ:
6875 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6876 default:
6877 return IEMOP_RAISE_INVALID_OPCODE();
6878 }
6879 case 7:
6880 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6881 {
6882 case 0:
6883 case IEM_OP_PRF_REPZ:
6884 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6885 default:
6886 return IEMOP_RAISE_INVALID_OPCODE();
6887 }
6888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6889 }
6890}
6891
6892
6893/**
6894 * Common 'bswap register' helper.
6895 */
6896FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6897{
6898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6899 switch (pVCpu->iem.s.enmEffOpSize)
6900 {
6901 case IEMMODE_16BIT:
6902 IEM_MC_BEGIN(1, 0);
6903 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6904 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6905 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6906 IEM_MC_ADVANCE_RIP();
6907 IEM_MC_END();
6908 return VINF_SUCCESS;
6909
6910 case IEMMODE_32BIT:
6911 IEM_MC_BEGIN(1, 0);
6912 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6913 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6914 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6915 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6916 IEM_MC_ADVANCE_RIP();
6917 IEM_MC_END();
6918 return VINF_SUCCESS;
6919
6920 case IEMMODE_64BIT:
6921 IEM_MC_BEGIN(1, 0);
6922 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6923 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6924 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6925 IEM_MC_ADVANCE_RIP();
6926 IEM_MC_END();
6927 return VINF_SUCCESS;
6928
6929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6930 }
6931}
6932
6933
6934/** Opcode 0x0f 0xc8. */
6935FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6936{
6937 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
6938 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6939 prefix. REX.B is the correct prefix it appears. For a parallel
6940 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6941 IEMOP_HLP_MIN_486();
6942 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6943}
6944
6945
6946/** Opcode 0x0f 0xc9. */
6947FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6948{
6949 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
6950 IEMOP_HLP_MIN_486();
6951 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6952}
6953
6954
6955/** Opcode 0x0f 0xca. */
6956FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6957{
6958 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
6959 IEMOP_HLP_MIN_486();
6960 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6961}
6962
6963
6964/** Opcode 0x0f 0xcb. */
6965FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6966{
6967 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
6968 IEMOP_HLP_MIN_486();
6969 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6970}
6971
6972
6973/** Opcode 0x0f 0xcc. */
6974FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6975{
6976 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
6977 IEMOP_HLP_MIN_486();
6978 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6979}
6980
6981
6982/** Opcode 0x0f 0xcd. */
6983FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6984{
6985 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
6986 IEMOP_HLP_MIN_486();
6987 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6988}
6989
6990
6991/** Opcode 0x0f 0xce. */
6992FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6993{
6994 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
6995 IEMOP_HLP_MIN_486();
6996 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6997}
6998
6999
7000/** Opcode 0x0f 0xcf. */
7001FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7002{
7003 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7004 IEMOP_HLP_MIN_486();
7005 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7006}
7007
7008
7009
7010/** Opcode 0x0f 0xd0. */
7011FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
7012/** Opcode 0x0f 0xd1. */
7013FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
7014/** Opcode 0x0f 0xd2. */
7015FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
7016/** Opcode 0x0f 0xd3. */
7017FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
7018/** Opcode 0x0f 0xd4. */
7019FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
7020/** Opcode 0x0f 0xd5. */
7021FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
7022/** Opcode 0x0f 0xd6. */
7023FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq); /** @todo Win10 w/o np may need this: 66 0f d6 0a */
7024
7025
7026/** Opcode 0x0f 0xd7. */
7027FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7028{
7029 /* Docs says register only. */
7030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7031 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7032 return IEMOP_RAISE_INVALID_OPCODE();
7033
7034 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7035 /** @todo testcase: Check that the instruction implicitly clears the high
7036 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7037 * and opcode modifications are made to work with the whole width (not
7038 * just 128). */
7039 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7040 {
7041 case IEM_OP_PRF_SIZE_OP: /* SSE */
7042 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7043 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7044 IEM_MC_BEGIN(2, 0);
7045 IEM_MC_ARG(uint64_t *, pDst, 0);
7046 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7047 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7048 IEM_MC_PREPARE_SSE_USAGE();
7049 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7050 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7051 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7052 IEM_MC_ADVANCE_RIP();
7053 IEM_MC_END();
7054 return VINF_SUCCESS;
7055
7056 case 0: /* MMX */
7057 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7058 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7059 IEM_MC_BEGIN(2, 0);
7060 IEM_MC_ARG(uint64_t *, pDst, 0);
7061 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7062 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7063 IEM_MC_PREPARE_FPU_USAGE();
7064 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7065 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7066 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7067 IEM_MC_ADVANCE_RIP();
7068 IEM_MC_END();
7069 return VINF_SUCCESS;
7070
7071 default:
7072 return IEMOP_RAISE_INVALID_OPCODE();
7073 }
7074}
7075
7076
7077/** Opcode 0x0f 0xd8. */
7078FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
7079/** Opcode 0x0f 0xd9. */
7080FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
7081/** Opcode 0x0f 0xda. */
7082FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
7083/** Opcode 0x0f 0xdb. */
7084FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
7085/** Opcode 0x0f 0xdc. */
7086FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
7087/** Opcode 0x0f 0xdd. */
7088FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
7089/** Opcode 0x0f 0xde. */
7090FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
7091/** Opcode 0x0f 0xdf. */
7092FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
7093/** Opcode 0x0f 0xe0. */
7094FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
7095/** Opcode 0x0f 0xe1. */
7096FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
7097/** Opcode 0x0f 0xe2. */
7098FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
7099/** Opcode 0x0f 0xe3. */
7100FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
7101/** Opcode 0x0f 0xe4. */
7102FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
7103/** Opcode 0x0f 0xe5. */
7104FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
7105/** Opcode 0x0f 0xe6. */
7106FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
7107
7108
7109/** Opcode 0x0f 0xe7. */
7110FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7111{
7112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7114 {
7115 /*
7116 * Register, memory.
7117 */
7118/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7119 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7120 {
7121
7122 case IEM_OP_PRF_SIZE_OP: /* SSE */
7123 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7124 IEM_MC_BEGIN(0, 2);
7125 IEM_MC_LOCAL(uint128_t, uSrc);
7126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7127
7128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7131 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7132
7133 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7134 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7135
7136 IEM_MC_ADVANCE_RIP();
7137 IEM_MC_END();
7138 break;
7139
7140 case 0: /* MMX */
7141 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7142 IEM_MC_BEGIN(0, 2);
7143 IEM_MC_LOCAL(uint64_t, uSrc);
7144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7145
7146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7148 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7149 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7150
7151 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7152 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7153
7154 IEM_MC_ADVANCE_RIP();
7155 IEM_MC_END();
7156 break;
7157
7158 default:
7159 return IEMOP_RAISE_INVALID_OPCODE();
7160 }
7161 }
7162 /* The register, register encoding is invalid. */
7163 else
7164 return IEMOP_RAISE_INVALID_OPCODE();
7165 return VINF_SUCCESS;
7166}
7167
7168
7169/** Opcode 0x0f 0xe8. */
7170FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
7171/** Opcode 0x0f 0xe9. */
7172FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
7173/** Opcode 0x0f 0xea. */
7174FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
7175/** Opcode 0x0f 0xeb. */
7176FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
7177/** Opcode 0x0f 0xec. */
7178FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
7179/** Opcode 0x0f 0xed. */
7180FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
7181/** Opcode 0x0f 0xee. */
7182FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
7183
7184
7185/** Opcode 0x0f 0xef. */
7186FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7187{
7188 IEMOP_MNEMONIC(pxor, "pxor");
7189 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7190}
7191
7192
7193/** Opcode 0x0f 0xf0. */
7194FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7195/** Opcode 0x0f 0xf1. */
7196FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7197/** Opcode 0x0f 0xf2. */
7198FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7199/** Opcode 0x0f 0xf3. */
7200FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7201/** Opcode 0x0f 0xf4. */
7202FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7203/** Opcode 0x0f 0xf5. */
7204FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7205/** Opcode 0x0f 0xf6. */
7206FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7207/** Opcode 0x0f 0xf7. */
7208FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7209/** Opcode 0x0f 0xf8. */
7210FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7211/** Opcode 0x0f 0xf9. */
7212FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7213/** Opcode 0x0f 0xfa. */
7214FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7215/** Opcode 0x0f 0xfb. */
7216FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7217/** Opcode 0x0f 0xfc. */
7218FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7219/** Opcode 0x0f 0xfd. */
7220FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7221/** Opcode 0x0f 0xfe. */
7222FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7223
7224
7225IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[256] =
7226{
7227 /* 0x00 */ iemOp_Grp6,
7228 /* 0x01 */ iemOp_Grp7,
7229 /* 0x02 */ iemOp_lar_Gv_Ew,
7230 /* 0x03 */ iemOp_lsl_Gv_Ew,
7231 /* 0x04 */ iemOp_Invalid,
7232 /* 0x05 */ iemOp_syscall,
7233 /* 0x06 */ iemOp_clts,
7234 /* 0x07 */ iemOp_sysret,
7235 /* 0x08 */ iemOp_invd,
7236 /* 0x09 */ iemOp_wbinvd,
7237 /* 0x0a */ iemOp_Invalid,
7238 /* 0x0b */ iemOp_ud2,
7239 /* 0x0c */ iemOp_Invalid,
7240 /* 0x0d */ iemOp_nop_Ev_GrpP,
7241 /* 0x0e */ iemOp_femms,
7242 /* 0x0f */ iemOp_3Dnow,
7243 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7244 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7245 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7246 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7247 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7248 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7249 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7250 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7251 /* 0x18 */ iemOp_prefetch_Grp16,
7252 /* 0x19 */ iemOp_nop_Ev,
7253 /* 0x1a */ iemOp_nop_Ev,
7254 /* 0x1b */ iemOp_nop_Ev,
7255 /* 0x1c */ iemOp_nop_Ev,
7256 /* 0x1d */ iemOp_nop_Ev,
7257 /* 0x1e */ iemOp_nop_Ev,
7258 /* 0x1f */ iemOp_nop_Ev,
7259 /* 0x20 */ iemOp_mov_Rd_Cd,
7260 /* 0x21 */ iemOp_mov_Rd_Dd,
7261 /* 0x22 */ iemOp_mov_Cd_Rd,
7262 /* 0x23 */ iemOp_mov_Dd_Rd,
7263 /* 0x24 */ iemOp_mov_Rd_Td,
7264 /* 0x25 */ iemOp_Invalid,
7265 /* 0x26 */ iemOp_mov_Td_Rd,
7266 /* 0x27 */ iemOp_Invalid,
7267 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7268 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7269 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7270 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7271 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7272 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7273 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7274 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7275 /* 0x30 */ iemOp_wrmsr,
7276 /* 0x31 */ iemOp_rdtsc,
7277 /* 0x32 */ iemOp_rdmsr,
7278 /* 0x33 */ iemOp_rdpmc,
7279 /* 0x34 */ iemOp_sysenter,
7280 /* 0x35 */ iemOp_sysexit,
7281 /* 0x36 */ iemOp_Invalid,
7282 /* 0x37 */ iemOp_getsec,
7283 /* 0x38 */ iemOp_3byte_Esc_A4,
7284 /* 0x39 */ iemOp_Invalid,
7285 /* 0x3a */ iemOp_3byte_Esc_A5,
7286 /* 0x3b */ iemOp_Invalid,
7287 /* 0x3c */ iemOp_Invalid,
7288 /* 0x3d */ iemOp_Invalid,
7289 /* 0x3e */ iemOp_Invalid,
7290 /* 0x3f */ iemOp_Invalid,
7291 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7292 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7293 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7294 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7295 /* 0x44 */ iemOp_cmove_Gv_Ev,
7296 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7297 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7298 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7299 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7300 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7301 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7302 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7303 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7304 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7305 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7306 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7307 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7308 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7309 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7310 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7311 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7312 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7313 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7314 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7315 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7316 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7317 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7318 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7319 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7320 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7321 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7322 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7323 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7324 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7325 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7326 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7327 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7328 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7329 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7330 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7331 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7332 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7333 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7334 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7335 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7336 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7337 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7338 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7339 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7340 /* 0x71 */ iemOp_Grp12,
7341 /* 0x72 */ iemOp_Grp13,
7342 /* 0x73 */ iemOp_Grp14,
7343 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7344 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7345 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7346 /* 0x77 */ iemOp_emms,
7347 /* 0x78 */ iemOp_vmread_AmdGrp17,
7348 /* 0x79 */ iemOp_vmwrite,
7349 /* 0x7a */ iemOp_Invalid,
7350 /* 0x7b */ iemOp_Invalid,
7351 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7352 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7353 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7354 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7355 /* 0x80 */ iemOp_jo_Jv,
7356 /* 0x81 */ iemOp_jno_Jv,
7357 /* 0x82 */ iemOp_jc_Jv,
7358 /* 0x83 */ iemOp_jnc_Jv,
7359 /* 0x84 */ iemOp_je_Jv,
7360 /* 0x85 */ iemOp_jne_Jv,
7361 /* 0x86 */ iemOp_jbe_Jv,
7362 /* 0x87 */ iemOp_jnbe_Jv,
7363 /* 0x88 */ iemOp_js_Jv,
7364 /* 0x89 */ iemOp_jns_Jv,
7365 /* 0x8a */ iemOp_jp_Jv,
7366 /* 0x8b */ iemOp_jnp_Jv,
7367 /* 0x8c */ iemOp_jl_Jv,
7368 /* 0x8d */ iemOp_jnl_Jv,
7369 /* 0x8e */ iemOp_jle_Jv,
7370 /* 0x8f */ iemOp_jnle_Jv,
7371 /* 0x90 */ iemOp_seto_Eb,
7372 /* 0x91 */ iemOp_setno_Eb,
7373 /* 0x92 */ iemOp_setc_Eb,
7374 /* 0x93 */ iemOp_setnc_Eb,
7375 /* 0x94 */ iemOp_sete_Eb,
7376 /* 0x95 */ iemOp_setne_Eb,
7377 /* 0x96 */ iemOp_setbe_Eb,
7378 /* 0x97 */ iemOp_setnbe_Eb,
7379 /* 0x98 */ iemOp_sets_Eb,
7380 /* 0x99 */ iemOp_setns_Eb,
7381 /* 0x9a */ iemOp_setp_Eb,
7382 /* 0x9b */ iemOp_setnp_Eb,
7383 /* 0x9c */ iemOp_setl_Eb,
7384 /* 0x9d */ iemOp_setnl_Eb,
7385 /* 0x9e */ iemOp_setle_Eb,
7386 /* 0x9f */ iemOp_setnle_Eb,
7387 /* 0xa0 */ iemOp_push_fs,
7388 /* 0xa1 */ iemOp_pop_fs,
7389 /* 0xa2 */ iemOp_cpuid,
7390 /* 0xa3 */ iemOp_bt_Ev_Gv,
7391 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7392 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7393 /* 0xa6 */ iemOp_Invalid,
7394 /* 0xa7 */ iemOp_Invalid,
7395 /* 0xa8 */ iemOp_push_gs,
7396 /* 0xa9 */ iemOp_pop_gs,
7397 /* 0xaa */ iemOp_rsm,
7398 /* 0xab */ iemOp_bts_Ev_Gv,
7399 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7400 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7401 /* 0xae */ iemOp_Grp15,
7402 /* 0xaf */ iemOp_imul_Gv_Ev,
7403 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7404 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7405 /* 0xb2 */ iemOp_lss_Gv_Mp,
7406 /* 0xb3 */ iemOp_btr_Ev_Gv,
7407 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7408 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7409 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7410 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7411 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7412 /* 0xb9 */ iemOp_Grp10,
7413 /* 0xba */ iemOp_Grp8,
7414 /* 0xbb */ iemOp_btc_Ev_Gv,
7415 /* 0xbc */ iemOp_bsf_Gv_Ev,
7416 /* 0xbd */ iemOp_bsr_Gv_Ev,
7417 /* 0xbe */ iemOp_movsx_Gv_Eb,
7418 /* 0xbf */ iemOp_movsx_Gv_Ew,
7419 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7420 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7421 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7422 /* 0xc3 */ iemOp_movnti_My_Gy,
7423 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7424 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7425 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7426 /* 0xc7 */ iemOp_Grp9,
7427 /* 0xc8 */ iemOp_bswap_rAX_r8,
7428 /* 0xc9 */ iemOp_bswap_rCX_r9,
7429 /* 0xca */ iemOp_bswap_rDX_r10,
7430 /* 0xcb */ iemOp_bswap_rBX_r11,
7431 /* 0xcc */ iemOp_bswap_rSP_r12,
7432 /* 0xcd */ iemOp_bswap_rBP_r13,
7433 /* 0xce */ iemOp_bswap_rSI_r14,
7434 /* 0xcf */ iemOp_bswap_rDI_r15,
7435 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7436 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7437 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7438 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7439 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7440 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7441 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7442 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7443 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7444 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7445 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7446 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7447 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7448 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7449 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7450 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7451 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7452 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7453 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7454 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7455 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7456 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7457 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7458 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7459 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7460 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7461 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7462 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7463 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7464 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7465 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7466 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7467 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7468 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7469 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7470 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7471 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7472 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7473 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7474 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7475 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7476 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7477 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7478 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7479 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7480 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7481 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7482 /* 0xff */ iemOp_Invalid
7483};
7484
7485/** @} */
7486
7487
7488/** @name One byte opcodes.
7489 *
7490 * @{
7491 */
7492
7493/** Opcode 0x00. */
7494FNIEMOP_DEF(iemOp_add_Eb_Gb)
7495{
7496 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
7497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7498}
7499
7500
7501/** Opcode 0x01. */
7502FNIEMOP_DEF(iemOp_add_Ev_Gv)
7503{
7504 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
7505 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7506}
7507
7508
7509/** Opcode 0x02. */
7510FNIEMOP_DEF(iemOp_add_Gb_Eb)
7511{
7512 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
7513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7514}
7515
7516
7517/** Opcode 0x03. */
7518FNIEMOP_DEF(iemOp_add_Gv_Ev)
7519{
7520 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
7521 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7522}
7523
7524
7525/** Opcode 0x04. */
7526FNIEMOP_DEF(iemOp_add_Al_Ib)
7527{
7528 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
7529 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7530}
7531
7532
7533/** Opcode 0x05. */
7534FNIEMOP_DEF(iemOp_add_eAX_Iz)
7535{
7536 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
7537 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7538}
7539
7540
7541/** Opcode 0x06. */
7542FNIEMOP_DEF(iemOp_push_ES)
7543{
7544 IEMOP_MNEMONIC(push_es, "push es");
7545 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7546}
7547
7548
7549/** Opcode 0x07. */
7550FNIEMOP_DEF(iemOp_pop_ES)
7551{
7552 IEMOP_MNEMONIC(pop_es, "pop es");
7553 IEMOP_HLP_NO_64BIT();
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
7556}
7557
7558
7559/** Opcode 0x08. */
7560FNIEMOP_DEF(iemOp_or_Eb_Gb)
7561{
7562 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
7563 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7564 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7565}
7566
7567
7568/** Opcode 0x09. */
7569FNIEMOP_DEF(iemOp_or_Ev_Gv)
7570{
7571 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
7572 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7573 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7574}
7575
7576
7577/** Opcode 0x0a. */
7578FNIEMOP_DEF(iemOp_or_Gb_Eb)
7579{
7580 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
7581 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7582 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7583}
7584
7585
7586/** Opcode 0x0b. */
7587FNIEMOP_DEF(iemOp_or_Gv_Ev)
7588{
7589 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
7590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7592}
7593
7594
7595/** Opcode 0x0c. */
7596FNIEMOP_DEF(iemOp_or_Al_Ib)
7597{
7598 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
7599 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7600 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7601}
7602
7603
7604/** Opcode 0x0d. */
7605FNIEMOP_DEF(iemOp_or_eAX_Iz)
7606{
7607 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
7608 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7609 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7610}
7611
7612
7613/** Opcode 0x0e. */
7614FNIEMOP_DEF(iemOp_push_CS)
7615{
7616 IEMOP_MNEMONIC(push_cs, "push cs");
7617 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7618}
7619
7620
7621/** Opcode 0x0f. */
7622FNIEMOP_DEF(iemOp_2byteEscape)
7623{
7624 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7625 /** @todo PUSH CS on 8086, undefined on 80186. */
7626 IEMOP_HLP_MIN_286();
7627 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7628}
7629
7630/** Opcode 0x10. */
7631FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7632{
7633 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
7634 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7635}
7636
7637
7638/** Opcode 0x11. */
7639FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7640{
7641 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
7642 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7643}
7644
7645
7646/** Opcode 0x12. */
7647FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7648{
7649 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
7650 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7651}
7652
7653
7654/** Opcode 0x13. */
7655FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7656{
7657 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
7658 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7659}
7660
7661
7662/** Opcode 0x14. */
7663FNIEMOP_DEF(iemOp_adc_Al_Ib)
7664{
7665 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
7666 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7667}
7668
7669
7670/** Opcode 0x15. */
7671FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7672{
7673 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
7674 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7675}
7676
7677
7678/** Opcode 0x16. */
7679FNIEMOP_DEF(iemOp_push_SS)
7680{
7681 IEMOP_MNEMONIC(push_ss, "push ss");
7682 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7683}
7684
7685
7686/** Opcode 0x17. */
7687FNIEMOP_DEF(iemOp_pop_SS)
7688{
7689 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
7690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7691 IEMOP_HLP_NO_64BIT();
7692 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
7693}
7694
7695
7696/** Opcode 0x18. */
7697FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7698{
7699 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
7700 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7701}
7702
7703
7704/** Opcode 0x19. */
7705FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7706{
7707 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
7708 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7709}
7710
7711
7712/** Opcode 0x1a. */
7713FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7714{
7715 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
7716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7717}
7718
7719
7720/** Opcode 0x1b. */
7721FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7722{
7723 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
7724 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7725}
7726
7727
7728/** Opcode 0x1c. */
7729FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7730{
7731 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
7732 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7733}
7734
7735
7736/** Opcode 0x1d. */
7737FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7738{
7739 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
7740 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7741}
7742
7743
7744/** Opcode 0x1e. */
7745FNIEMOP_DEF(iemOp_push_DS)
7746{
7747 IEMOP_MNEMONIC(push_ds, "push ds");
7748 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7749}
7750
7751
7752/** Opcode 0x1f. */
7753FNIEMOP_DEF(iemOp_pop_DS)
7754{
7755 IEMOP_MNEMONIC(pop_ds, "pop ds");
7756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7757 IEMOP_HLP_NO_64BIT();
7758 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
7759}
7760
7761
7762/** Opcode 0x20. */
7763FNIEMOP_DEF(iemOp_and_Eb_Gb)
7764{
7765 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
7766 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7767 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7768}
7769
7770
7771/** Opcode 0x21. */
7772FNIEMOP_DEF(iemOp_and_Ev_Gv)
7773{
7774 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
7775 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7776 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7777}
7778
7779
7780/** Opcode 0x22. */
7781FNIEMOP_DEF(iemOp_and_Gb_Eb)
7782{
7783 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
7784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7785 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7786}
7787
7788
7789/** Opcode 0x23. */
7790FNIEMOP_DEF(iemOp_and_Gv_Ev)
7791{
7792 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
7793 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7794 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7795}
7796
7797
7798/** Opcode 0x24. */
7799FNIEMOP_DEF(iemOp_and_Al_Ib)
7800{
7801 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
7802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7803 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7804}
7805
7806
7807/** Opcode 0x25. */
7808FNIEMOP_DEF(iemOp_and_eAX_Iz)
7809{
7810 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
7811 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7812 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7813}
7814
7815
7816/** Opcode 0x26. */
7817FNIEMOP_DEF(iemOp_seg_ES)
7818{
7819 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7820 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
7821 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
7822
7823 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7824 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7825}
7826
7827
7828/** Opcode 0x27. */
7829FNIEMOP_DEF(iemOp_daa)
7830{
7831 IEMOP_MNEMONIC(daa_AL, "daa AL");
7832 IEMOP_HLP_NO_64BIT();
7833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7834 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7835 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7836}
7837
7838
7839/** Opcode 0x28. */
7840FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7841{
7842 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
7843 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7844}
7845
7846
7847/** Opcode 0x29. */
7848FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7849{
7850 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
7851 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7852}
7853
7854
7855/** Opcode 0x2a. */
7856FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7857{
7858 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
7859 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7860}
7861
7862
7863/** Opcode 0x2b. */
7864FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7865{
7866 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
7867 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7868}
7869
7870
7871/** Opcode 0x2c. */
7872FNIEMOP_DEF(iemOp_sub_Al_Ib)
7873{
7874 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
7875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7876}
7877
7878
7879/** Opcode 0x2d. */
7880FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7881{
7882 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
7883 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7884}
7885
7886
7887/** Opcode 0x2e. */
7888FNIEMOP_DEF(iemOp_seg_CS)
7889{
7890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
7892 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
7893
7894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7896}
7897
7898
7899/** Opcode 0x2f. */
7900FNIEMOP_DEF(iemOp_das)
7901{
7902 IEMOP_MNEMONIC(das_AL, "das AL");
7903 IEMOP_HLP_NO_64BIT();
7904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7905 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7906 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7907}
7908
7909
7910/** Opcode 0x30. */
7911FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7912{
7913 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
7914 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7915 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7916}
7917
7918
7919/** Opcode 0x31. */
7920FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7921{
7922 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
7923 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7924 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7925}
7926
7927
7928/** Opcode 0x32. */
7929FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7930{
7931 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
7932 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7933 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7934}
7935
7936
7937/** Opcode 0x33. */
7938FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7939{
7940 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
7941 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7942 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7943}
7944
7945
7946/** Opcode 0x34. */
7947FNIEMOP_DEF(iemOp_xor_Al_Ib)
7948{
7949 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
7950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7951 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7952}
7953
7954
7955/** Opcode 0x35. */
7956FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7957{
7958 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
7959 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7960 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7961}
7962
7963
7964/** Opcode 0x36. */
7965FNIEMOP_DEF(iemOp_seg_SS)
7966{
7967 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7968 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
7969 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
7970
7971 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7972 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7973}
7974
7975
7976/** Opcode 0x37. */
7977FNIEMOP_STUB(iemOp_aaa);
7978
7979
7980/** Opcode 0x38. */
7981FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7982{
7983 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
7984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7985}
7986
7987
7988/** Opcode 0x39. */
7989FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7990{
7991 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
7992 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7993}
7994
7995
7996/** Opcode 0x3a. */
7997FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7998{
7999 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8000 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8001}
8002
8003
8004/** Opcode 0x3b. */
8005FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8006{
8007 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8008 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8009}
8010
8011
8012/** Opcode 0x3c. */
8013FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8014{
8015 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8016 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8017}
8018
8019
8020/** Opcode 0x3d. */
8021FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8022{
8023 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8024 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8025}
8026
8027
8028/** Opcode 0x3e. */
8029FNIEMOP_DEF(iemOp_seg_DS)
8030{
8031 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8032 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8033 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8034
8035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8036 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8037}
8038
8039
8040/** Opcode 0x3f. */
8041FNIEMOP_STUB(iemOp_aas);
8042
8043/**
8044 * Common 'inc/dec/not/neg register' helper.
8045 */
8046FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8047{
8048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8049 switch (pVCpu->iem.s.enmEffOpSize)
8050 {
8051 case IEMMODE_16BIT:
8052 IEM_MC_BEGIN(2, 0);
8053 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8054 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8055 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8056 IEM_MC_REF_EFLAGS(pEFlags);
8057 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8058 IEM_MC_ADVANCE_RIP();
8059 IEM_MC_END();
8060 return VINF_SUCCESS;
8061
8062 case IEMMODE_32BIT:
8063 IEM_MC_BEGIN(2, 0);
8064 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8065 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8066 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8067 IEM_MC_REF_EFLAGS(pEFlags);
8068 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8069 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8070 IEM_MC_ADVANCE_RIP();
8071 IEM_MC_END();
8072 return VINF_SUCCESS;
8073
8074 case IEMMODE_64BIT:
8075 IEM_MC_BEGIN(2, 0);
8076 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8077 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8078 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8079 IEM_MC_REF_EFLAGS(pEFlags);
8080 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8081 IEM_MC_ADVANCE_RIP();
8082 IEM_MC_END();
8083 return VINF_SUCCESS;
8084 }
8085 return VINF_SUCCESS;
8086}
8087
8088
8089/** Opcode 0x40. */
8090FNIEMOP_DEF(iemOp_inc_eAX)
8091{
8092 /*
8093 * This is a REX prefix in 64-bit mode.
8094 */
8095 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8096 {
8097 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8098 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8099
8100 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8101 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8102 }
8103
8104 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8105 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8106}
8107
8108
8109/** Opcode 0x41. */
8110FNIEMOP_DEF(iemOp_inc_eCX)
8111{
8112 /*
8113 * This is a REX prefix in 64-bit mode.
8114 */
8115 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8116 {
8117 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8118 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8119 pVCpu->iem.s.uRexB = 1 << 3;
8120
8121 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8122 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8123 }
8124
8125 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8126 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8127}
8128
8129
8130/** Opcode 0x42. */
8131FNIEMOP_DEF(iemOp_inc_eDX)
8132{
8133 /*
8134 * This is a REX prefix in 64-bit mode.
8135 */
8136 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8137 {
8138 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8139 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8140 pVCpu->iem.s.uRexIndex = 1 << 3;
8141
8142 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8143 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8144 }
8145
8146 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8147 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8148}
8149
8150
8151
8152/** Opcode 0x43. */
8153FNIEMOP_DEF(iemOp_inc_eBX)
8154{
8155 /*
8156 * This is a REX prefix in 64-bit mode.
8157 */
8158 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8159 {
8160 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8161 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8162 pVCpu->iem.s.uRexB = 1 << 3;
8163 pVCpu->iem.s.uRexIndex = 1 << 3;
8164
8165 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8166 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8167 }
8168
8169 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8170 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8171}
8172
8173
8174/** Opcode 0x44. */
8175FNIEMOP_DEF(iemOp_inc_eSP)
8176{
8177 /*
8178 * This is a REX prefix in 64-bit mode.
8179 */
8180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8181 {
8182 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8183 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8184 pVCpu->iem.s.uRexReg = 1 << 3;
8185
8186 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8187 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8188 }
8189
8190 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8191 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8192}
8193
8194
8195/** Opcode 0x45. */
8196FNIEMOP_DEF(iemOp_inc_eBP)
8197{
8198 /*
8199 * This is a REX prefix in 64-bit mode.
8200 */
8201 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8202 {
8203 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8204 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8205 pVCpu->iem.s.uRexReg = 1 << 3;
8206 pVCpu->iem.s.uRexB = 1 << 3;
8207
8208 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8209 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8210 }
8211
8212 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8213 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8214}
8215
8216
8217/** Opcode 0x46. */
8218FNIEMOP_DEF(iemOp_inc_eSI)
8219{
8220 /*
8221 * This is a REX prefix in 64-bit mode.
8222 */
8223 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8224 {
8225 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8226 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8227 pVCpu->iem.s.uRexReg = 1 << 3;
8228 pVCpu->iem.s.uRexIndex = 1 << 3;
8229
8230 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8231 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8232 }
8233
8234 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8235 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8236}
8237
8238
8239/** Opcode 0x47. */
8240FNIEMOP_DEF(iemOp_inc_eDI)
8241{
8242 /*
8243 * This is a REX prefix in 64-bit mode.
8244 */
8245 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8246 {
8247 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8248 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8249 pVCpu->iem.s.uRexReg = 1 << 3;
8250 pVCpu->iem.s.uRexB = 1 << 3;
8251 pVCpu->iem.s.uRexIndex = 1 << 3;
8252
8253 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8254 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8255 }
8256
8257 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
8258 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8259}
8260
8261
8262/** Opcode 0x48. */
8263FNIEMOP_DEF(iemOp_dec_eAX)
8264{
8265 /*
8266 * This is a REX prefix in 64-bit mode.
8267 */
8268 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8269 {
8270 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8271 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8272 iemRecalEffOpSize(pVCpu);
8273
8274 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8275 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8276 }
8277
8278 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
8279 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8280}
8281
8282
8283/** Opcode 0x49. */
8284FNIEMOP_DEF(iemOp_dec_eCX)
8285{
8286 /*
8287 * This is a REX prefix in 64-bit mode.
8288 */
8289 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8290 {
8291 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8292 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8293 pVCpu->iem.s.uRexB = 1 << 3;
8294 iemRecalEffOpSize(pVCpu);
8295
8296 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8297 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8298 }
8299
8300 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
8301 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8302}
8303
8304
8305/** Opcode 0x4a. */
8306FNIEMOP_DEF(iemOp_dec_eDX)
8307{
8308 /*
8309 * This is a REX prefix in 64-bit mode.
8310 */
8311 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8312 {
8313 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8314 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8315 pVCpu->iem.s.uRexIndex = 1 << 3;
8316 iemRecalEffOpSize(pVCpu);
8317
8318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8319 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8320 }
8321
8322 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
8323 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8324}
8325
8326
8327/** Opcode 0x4b. */
8328FNIEMOP_DEF(iemOp_dec_eBX)
8329{
8330 /*
8331 * This is a REX prefix in 64-bit mode.
8332 */
8333 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8334 {
8335 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8336 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8337 pVCpu->iem.s.uRexB = 1 << 3;
8338 pVCpu->iem.s.uRexIndex = 1 << 3;
8339 iemRecalEffOpSize(pVCpu);
8340
8341 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8342 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8343 }
8344
8345 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
8346 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8347}
8348
8349
8350/** Opcode 0x4c. */
8351FNIEMOP_DEF(iemOp_dec_eSP)
8352{
8353 /*
8354 * This is a REX prefix in 64-bit mode.
8355 */
8356 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8357 {
8358 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8359 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8360 pVCpu->iem.s.uRexReg = 1 << 3;
8361 iemRecalEffOpSize(pVCpu);
8362
8363 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8364 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8365 }
8366
8367 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
8368 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8369}
8370
8371
8372/** Opcode 0x4d. */
8373FNIEMOP_DEF(iemOp_dec_eBP)
8374{
8375 /*
8376 * This is a REX prefix in 64-bit mode.
8377 */
8378 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8379 {
8380 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8381 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8382 pVCpu->iem.s.uRexReg = 1 << 3;
8383 pVCpu->iem.s.uRexB = 1 << 3;
8384 iemRecalEffOpSize(pVCpu);
8385
8386 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8387 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8388 }
8389
8390 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
8391 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8392}
8393
8394
8395/** Opcode 0x4e. */
8396FNIEMOP_DEF(iemOp_dec_eSI)
8397{
8398 /*
8399 * This is a REX prefix in 64-bit mode.
8400 */
8401 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8402 {
8403 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8404 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8405 pVCpu->iem.s.uRexReg = 1 << 3;
8406 pVCpu->iem.s.uRexIndex = 1 << 3;
8407 iemRecalEffOpSize(pVCpu);
8408
8409 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8410 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8411 }
8412
8413 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
8414 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8415}
8416
8417
8418/** Opcode 0x4f. */
8419FNIEMOP_DEF(iemOp_dec_eDI)
8420{
8421 /*
8422 * This is a REX prefix in 64-bit mode.
8423 */
8424 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8425 {
8426 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8427 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8428 pVCpu->iem.s.uRexReg = 1 << 3;
8429 pVCpu->iem.s.uRexB = 1 << 3;
8430 pVCpu->iem.s.uRexIndex = 1 << 3;
8431 iemRecalEffOpSize(pVCpu);
8432
8433 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8434 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8435 }
8436
8437 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
8438 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8439}
8440
8441
8442/**
8443 * Common 'push register' helper.
8444 */
8445FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8446{
8447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8448 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8449 {
8450 iReg |= pVCpu->iem.s.uRexB;
8451 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8452 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8453 }
8454
8455 switch (pVCpu->iem.s.enmEffOpSize)
8456 {
8457 case IEMMODE_16BIT:
8458 IEM_MC_BEGIN(0, 1);
8459 IEM_MC_LOCAL(uint16_t, u16Value);
8460 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8461 IEM_MC_PUSH_U16(u16Value);
8462 IEM_MC_ADVANCE_RIP();
8463 IEM_MC_END();
8464 break;
8465
8466 case IEMMODE_32BIT:
8467 IEM_MC_BEGIN(0, 1);
8468 IEM_MC_LOCAL(uint32_t, u32Value);
8469 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8470 IEM_MC_PUSH_U32(u32Value);
8471 IEM_MC_ADVANCE_RIP();
8472 IEM_MC_END();
8473 break;
8474
8475 case IEMMODE_64BIT:
8476 IEM_MC_BEGIN(0, 1);
8477 IEM_MC_LOCAL(uint64_t, u64Value);
8478 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8479 IEM_MC_PUSH_U64(u64Value);
8480 IEM_MC_ADVANCE_RIP();
8481 IEM_MC_END();
8482 break;
8483 }
8484
8485 return VINF_SUCCESS;
8486}
8487
8488
8489/** Opcode 0x50. */
8490FNIEMOP_DEF(iemOp_push_eAX)
8491{
8492 IEMOP_MNEMONIC(push_rAX, "push rAX");
8493 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8494}
8495
8496
8497/** Opcode 0x51. */
8498FNIEMOP_DEF(iemOp_push_eCX)
8499{
8500 IEMOP_MNEMONIC(push_rCX, "push rCX");
8501 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8502}
8503
8504
8505/** Opcode 0x52. */
8506FNIEMOP_DEF(iemOp_push_eDX)
8507{
8508 IEMOP_MNEMONIC(push_rDX, "push rDX");
8509 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8510}
8511
8512
8513/** Opcode 0x53. */
8514FNIEMOP_DEF(iemOp_push_eBX)
8515{
8516 IEMOP_MNEMONIC(push_rBX, "push rBX");
8517 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8518}
8519
8520
8521/** Opcode 0x54. */
8522FNIEMOP_DEF(iemOp_push_eSP)
8523{
8524 IEMOP_MNEMONIC(push_rSP, "push rSP");
8525 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
8526 {
8527 IEM_MC_BEGIN(0, 1);
8528 IEM_MC_LOCAL(uint16_t, u16Value);
8529 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8530 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8531 IEM_MC_PUSH_U16(u16Value);
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 }
8535 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8536}
8537
8538
8539/** Opcode 0x55. */
8540FNIEMOP_DEF(iemOp_push_eBP)
8541{
8542 IEMOP_MNEMONIC(push_rBP, "push rBP");
8543 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8544}
8545
8546
8547/** Opcode 0x56. */
8548FNIEMOP_DEF(iemOp_push_eSI)
8549{
8550 IEMOP_MNEMONIC(push_rSI, "push rSI");
8551 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8552}
8553
8554
8555/** Opcode 0x57. */
8556FNIEMOP_DEF(iemOp_push_eDI)
8557{
8558 IEMOP_MNEMONIC(push_rDI, "push rDI");
8559 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8560}
8561
8562
8563/**
8564 * Common 'pop register' helper.
8565 */
8566FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8567{
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8570 {
8571 iReg |= pVCpu->iem.s.uRexB;
8572 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8573 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8574 }
8575
8576 switch (pVCpu->iem.s.enmEffOpSize)
8577 {
8578 case IEMMODE_16BIT:
8579 IEM_MC_BEGIN(0, 1);
8580 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8581 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8582 IEM_MC_POP_U16(pu16Dst);
8583 IEM_MC_ADVANCE_RIP();
8584 IEM_MC_END();
8585 break;
8586
8587 case IEMMODE_32BIT:
8588 IEM_MC_BEGIN(0, 1);
8589 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8590 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8591 IEM_MC_POP_U32(pu32Dst);
8592 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8593 IEM_MC_ADVANCE_RIP();
8594 IEM_MC_END();
8595 break;
8596
8597 case IEMMODE_64BIT:
8598 IEM_MC_BEGIN(0, 1);
8599 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8600 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8601 IEM_MC_POP_U64(pu64Dst);
8602 IEM_MC_ADVANCE_RIP();
8603 IEM_MC_END();
8604 break;
8605 }
8606
8607 return VINF_SUCCESS;
8608}
8609
8610
8611/** Opcode 0x58. */
8612FNIEMOP_DEF(iemOp_pop_eAX)
8613{
8614 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
8615 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8616}
8617
8618
8619/** Opcode 0x59. */
8620FNIEMOP_DEF(iemOp_pop_eCX)
8621{
8622 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
8623 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8624}
8625
8626
8627/** Opcode 0x5a. */
8628FNIEMOP_DEF(iemOp_pop_eDX)
8629{
8630 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
8631 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8632}
8633
8634
8635/** Opcode 0x5b. */
8636FNIEMOP_DEF(iemOp_pop_eBX)
8637{
8638 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
8639 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8640}
8641
8642
8643/** Opcode 0x5c. */
8644FNIEMOP_DEF(iemOp_pop_eSP)
8645{
8646 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
8647 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8648 {
8649 if (pVCpu->iem.s.uRexB)
8650 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8651 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8652 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8653 }
8654
8655 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8656 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8657 /** @todo add testcase for this instruction. */
8658 switch (pVCpu->iem.s.enmEffOpSize)
8659 {
8660 case IEMMODE_16BIT:
8661 IEM_MC_BEGIN(0, 1);
8662 IEM_MC_LOCAL(uint16_t, u16Dst);
8663 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8664 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8665 IEM_MC_ADVANCE_RIP();
8666 IEM_MC_END();
8667 break;
8668
8669 case IEMMODE_32BIT:
8670 IEM_MC_BEGIN(0, 1);
8671 IEM_MC_LOCAL(uint32_t, u32Dst);
8672 IEM_MC_POP_U32(&u32Dst);
8673 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8674 IEM_MC_ADVANCE_RIP();
8675 IEM_MC_END();
8676 break;
8677
8678 case IEMMODE_64BIT:
8679 IEM_MC_BEGIN(0, 1);
8680 IEM_MC_LOCAL(uint64_t, u64Dst);
8681 IEM_MC_POP_U64(&u64Dst);
8682 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8683 IEM_MC_ADVANCE_RIP();
8684 IEM_MC_END();
8685 break;
8686 }
8687
8688 return VINF_SUCCESS;
8689}
8690
8691
8692/** Opcode 0x5d. */
8693FNIEMOP_DEF(iemOp_pop_eBP)
8694{
8695 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
8696 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8697}
8698
8699
8700/** Opcode 0x5e. */
8701FNIEMOP_DEF(iemOp_pop_eSI)
8702{
8703 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
8704 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8705}
8706
8707
8708/** Opcode 0x5f. */
8709FNIEMOP_DEF(iemOp_pop_eDI)
8710{
8711 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
8712 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8713}
8714
8715
8716/** Opcode 0x60. */
8717FNIEMOP_DEF(iemOp_pusha)
8718{
8719 IEMOP_MNEMONIC(pusha, "pusha");
8720 IEMOP_HLP_MIN_186();
8721 IEMOP_HLP_NO_64BIT();
8722 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8723 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8724 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8725 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8726}
8727
8728
8729/** Opcode 0x61. */
8730FNIEMOP_DEF(iemOp_popa)
8731{
8732 IEMOP_MNEMONIC(popa, "popa");
8733 IEMOP_HLP_MIN_186();
8734 IEMOP_HLP_NO_64BIT();
8735 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8736 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8737 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8738 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8739}
8740
8741
8742/** Opcode 0x62. */
8743FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8744// IEMOP_HLP_MIN_186();
8745
8746
8747/** Opcode 0x63 - non-64-bit modes. */
8748FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8749{
8750 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
8751 IEMOP_HLP_MIN_286();
8752 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8754
8755 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8756 {
8757 /* Register */
8758 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8759 IEM_MC_BEGIN(3, 0);
8760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8761 IEM_MC_ARG(uint16_t, u16Src, 1);
8762 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8763
8764 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8765 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8766 IEM_MC_REF_EFLAGS(pEFlags);
8767 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8768
8769 IEM_MC_ADVANCE_RIP();
8770 IEM_MC_END();
8771 }
8772 else
8773 {
8774 /* Memory */
8775 IEM_MC_BEGIN(3, 2);
8776 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8777 IEM_MC_ARG(uint16_t, u16Src, 1);
8778 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8780
8781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8782 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8783 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8784 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8785 IEM_MC_FETCH_EFLAGS(EFlags);
8786 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8787
8788 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8789 IEM_MC_COMMIT_EFLAGS(EFlags);
8790 IEM_MC_ADVANCE_RIP();
8791 IEM_MC_END();
8792 }
8793 return VINF_SUCCESS;
8794
8795}
8796
8797
8798/** Opcode 0x63.
8799 * @note This is a weird one. It works like a regular move instruction if
8800 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8801 * @todo This definitely needs a testcase to verify the odd cases. */
8802FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8803{
8804 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8805
8806 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
8807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8808
8809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8810 {
8811 /*
8812 * Register to register.
8813 */
8814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8815 IEM_MC_BEGIN(0, 1);
8816 IEM_MC_LOCAL(uint64_t, u64Value);
8817 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8818 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8819 IEM_MC_ADVANCE_RIP();
8820 IEM_MC_END();
8821 }
8822 else
8823 {
8824 /*
8825 * We're loading a register from memory.
8826 */
8827 IEM_MC_BEGIN(0, 2);
8828 IEM_MC_LOCAL(uint64_t, u64Value);
8829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8832 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8833 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8834 IEM_MC_ADVANCE_RIP();
8835 IEM_MC_END();
8836 }
8837 return VINF_SUCCESS;
8838}
8839
8840
8841/** Opcode 0x64. */
8842FNIEMOP_DEF(iemOp_seg_FS)
8843{
8844 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8845 IEMOP_HLP_MIN_386();
8846
8847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
8848 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
8849
8850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8852}
8853
8854
8855/** Opcode 0x65. */
8856FNIEMOP_DEF(iemOp_seg_GS)
8857{
8858 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8859 IEMOP_HLP_MIN_386();
8860
8861 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
8862 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
8863
8864 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8865 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8866}
8867
8868
8869/** Opcode 0x66. */
8870FNIEMOP_DEF(iemOp_op_size)
8871{
8872 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8873 IEMOP_HLP_MIN_386();
8874
8875 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
8876 iemRecalEffOpSize(pVCpu);
8877
8878 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8879 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8880}
8881
8882
8883/** Opcode 0x67. */
8884FNIEMOP_DEF(iemOp_addr_size)
8885{
8886 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8887 IEMOP_HLP_MIN_386();
8888
8889 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8890 switch (pVCpu->iem.s.enmDefAddrMode)
8891 {
8892 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8893 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
8894 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8895 default: AssertFailed();
8896 }
8897
8898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8899 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8900}
8901
8902
8903/** Opcode 0x68. */
8904FNIEMOP_DEF(iemOp_push_Iz)
8905{
8906 IEMOP_MNEMONIC(push_Iz, "push Iz");
8907 IEMOP_HLP_MIN_186();
8908 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8909 switch (pVCpu->iem.s.enmEffOpSize)
8910 {
8911 case IEMMODE_16BIT:
8912 {
8913 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8915 IEM_MC_BEGIN(0,0);
8916 IEM_MC_PUSH_U16(u16Imm);
8917 IEM_MC_ADVANCE_RIP();
8918 IEM_MC_END();
8919 return VINF_SUCCESS;
8920 }
8921
8922 case IEMMODE_32BIT:
8923 {
8924 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8926 IEM_MC_BEGIN(0,0);
8927 IEM_MC_PUSH_U32(u32Imm);
8928 IEM_MC_ADVANCE_RIP();
8929 IEM_MC_END();
8930 return VINF_SUCCESS;
8931 }
8932
8933 case IEMMODE_64BIT:
8934 {
8935 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8937 IEM_MC_BEGIN(0,0);
8938 IEM_MC_PUSH_U64(u64Imm);
8939 IEM_MC_ADVANCE_RIP();
8940 IEM_MC_END();
8941 return VINF_SUCCESS;
8942 }
8943
8944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8945 }
8946}
8947
8948
8949/** Opcode 0x69. */
8950FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8951{
8952 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8953 IEMOP_HLP_MIN_186();
8954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8955 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8956
8957 switch (pVCpu->iem.s.enmEffOpSize)
8958 {
8959 case IEMMODE_16BIT:
8960 {
8961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8962 {
8963 /* register operand */
8964 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8966
8967 IEM_MC_BEGIN(3, 1);
8968 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8969 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8970 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8971 IEM_MC_LOCAL(uint16_t, u16Tmp);
8972
8973 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8974 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8975 IEM_MC_REF_EFLAGS(pEFlags);
8976 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8977 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
8978
8979 IEM_MC_ADVANCE_RIP();
8980 IEM_MC_END();
8981 }
8982 else
8983 {
8984 /* memory operand */
8985 IEM_MC_BEGIN(3, 2);
8986 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8987 IEM_MC_ARG(uint16_t, u16Src, 1);
8988 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8989 IEM_MC_LOCAL(uint16_t, u16Tmp);
8990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8991
8992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8993 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8994 IEM_MC_ASSIGN(u16Src, u16Imm);
8995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8996 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8997 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8998 IEM_MC_REF_EFLAGS(pEFlags);
8999 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9000 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9001
9002 IEM_MC_ADVANCE_RIP();
9003 IEM_MC_END();
9004 }
9005 return VINF_SUCCESS;
9006 }
9007
9008 case IEMMODE_32BIT:
9009 {
9010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9011 {
9012 /* register operand */
9013 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9015
9016 IEM_MC_BEGIN(3, 1);
9017 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9018 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9020 IEM_MC_LOCAL(uint32_t, u32Tmp);
9021
9022 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9023 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9024 IEM_MC_REF_EFLAGS(pEFlags);
9025 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9026 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9027
9028 IEM_MC_ADVANCE_RIP();
9029 IEM_MC_END();
9030 }
9031 else
9032 {
9033 /* memory operand */
9034 IEM_MC_BEGIN(3, 2);
9035 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9036 IEM_MC_ARG(uint32_t, u32Src, 1);
9037 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9038 IEM_MC_LOCAL(uint32_t, u32Tmp);
9039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9040
9041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9042 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9043 IEM_MC_ASSIGN(u32Src, u32Imm);
9044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9045 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9046 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9047 IEM_MC_REF_EFLAGS(pEFlags);
9048 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9049 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9050
9051 IEM_MC_ADVANCE_RIP();
9052 IEM_MC_END();
9053 }
9054 return VINF_SUCCESS;
9055 }
9056
9057 case IEMMODE_64BIT:
9058 {
9059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9060 {
9061 /* register operand */
9062 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9064
9065 IEM_MC_BEGIN(3, 1);
9066 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9067 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9068 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9069 IEM_MC_LOCAL(uint64_t, u64Tmp);
9070
9071 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9072 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9073 IEM_MC_REF_EFLAGS(pEFlags);
9074 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9075 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9076
9077 IEM_MC_ADVANCE_RIP();
9078 IEM_MC_END();
9079 }
9080 else
9081 {
9082 /* memory operand */
9083 IEM_MC_BEGIN(3, 2);
9084 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9085 IEM_MC_ARG(uint64_t, u64Src, 1);
9086 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9087 IEM_MC_LOCAL(uint64_t, u64Tmp);
9088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9089
9090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9091 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9092 IEM_MC_ASSIGN(u64Src, u64Imm);
9093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9094 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9095 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9096 IEM_MC_REF_EFLAGS(pEFlags);
9097 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9098 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9099
9100 IEM_MC_ADVANCE_RIP();
9101 IEM_MC_END();
9102 }
9103 return VINF_SUCCESS;
9104 }
9105 }
9106 AssertFailedReturn(VERR_IEM_IPE_9);
9107}
9108
9109
9110/** Opcode 0x6a. */
9111FNIEMOP_DEF(iemOp_push_Ib)
9112{
9113 IEMOP_MNEMONIC(push_Ib, "push Ib");
9114 IEMOP_HLP_MIN_186();
9115 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9117 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9118
9119 IEM_MC_BEGIN(0,0);
9120 switch (pVCpu->iem.s.enmEffOpSize)
9121 {
9122 case IEMMODE_16BIT:
9123 IEM_MC_PUSH_U16(i8Imm);
9124 break;
9125 case IEMMODE_32BIT:
9126 IEM_MC_PUSH_U32(i8Imm);
9127 break;
9128 case IEMMODE_64BIT:
9129 IEM_MC_PUSH_U64(i8Imm);
9130 break;
9131 }
9132 IEM_MC_ADVANCE_RIP();
9133 IEM_MC_END();
9134 return VINF_SUCCESS;
9135}
9136
9137
9138/** Opcode 0x6b. */
9139FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9140{
9141 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9142 IEMOP_HLP_MIN_186();
9143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9145
9146 switch (pVCpu->iem.s.enmEffOpSize)
9147 {
9148 case IEMMODE_16BIT:
9149 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9150 {
9151 /* register operand */
9152 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9154
9155 IEM_MC_BEGIN(3, 1);
9156 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9157 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9159 IEM_MC_LOCAL(uint16_t, u16Tmp);
9160
9161 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9162 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9163 IEM_MC_REF_EFLAGS(pEFlags);
9164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9165 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9166
9167 IEM_MC_ADVANCE_RIP();
9168 IEM_MC_END();
9169 }
9170 else
9171 {
9172 /* memory operand */
9173 IEM_MC_BEGIN(3, 2);
9174 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9175 IEM_MC_ARG(uint16_t, u16Src, 1);
9176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9177 IEM_MC_LOCAL(uint16_t, u16Tmp);
9178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9179
9180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9181 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9182 IEM_MC_ASSIGN(u16Src, u16Imm);
9183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9184 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9185 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9186 IEM_MC_REF_EFLAGS(pEFlags);
9187 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9188 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9189
9190 IEM_MC_ADVANCE_RIP();
9191 IEM_MC_END();
9192 }
9193 return VINF_SUCCESS;
9194
9195 case IEMMODE_32BIT:
9196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9197 {
9198 /* register operand */
9199 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9201
9202 IEM_MC_BEGIN(3, 1);
9203 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9204 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9206 IEM_MC_LOCAL(uint32_t, u32Tmp);
9207
9208 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9209 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9210 IEM_MC_REF_EFLAGS(pEFlags);
9211 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9212 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9213
9214 IEM_MC_ADVANCE_RIP();
9215 IEM_MC_END();
9216 }
9217 else
9218 {
9219 /* memory operand */
9220 IEM_MC_BEGIN(3, 2);
9221 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9222 IEM_MC_ARG(uint32_t, u32Src, 1);
9223 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9224 IEM_MC_LOCAL(uint32_t, u32Tmp);
9225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9226
9227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9228 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9229 IEM_MC_ASSIGN(u32Src, u32Imm);
9230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9231 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9232 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9233 IEM_MC_REF_EFLAGS(pEFlags);
9234 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9235 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9236
9237 IEM_MC_ADVANCE_RIP();
9238 IEM_MC_END();
9239 }
9240 return VINF_SUCCESS;
9241
9242 case IEMMODE_64BIT:
9243 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9244 {
9245 /* register operand */
9246 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9248
9249 IEM_MC_BEGIN(3, 1);
9250 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9251 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9253 IEM_MC_LOCAL(uint64_t, u64Tmp);
9254
9255 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9256 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9257 IEM_MC_REF_EFLAGS(pEFlags);
9258 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9259 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9260
9261 IEM_MC_ADVANCE_RIP();
9262 IEM_MC_END();
9263 }
9264 else
9265 {
9266 /* memory operand */
9267 IEM_MC_BEGIN(3, 2);
9268 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9269 IEM_MC_ARG(uint64_t, u64Src, 1);
9270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9271 IEM_MC_LOCAL(uint64_t, u64Tmp);
9272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9273
9274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9275 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9276 IEM_MC_ASSIGN(u64Src, u64Imm);
9277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9278 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9279 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9280 IEM_MC_REF_EFLAGS(pEFlags);
9281 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9282 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9283
9284 IEM_MC_ADVANCE_RIP();
9285 IEM_MC_END();
9286 }
9287 return VINF_SUCCESS;
9288 }
9289 AssertFailedReturn(VERR_IEM_IPE_8);
9290}
9291
9292
9293/** Opcode 0x6c. */
9294FNIEMOP_DEF(iemOp_insb_Yb_DX)
9295{
9296 IEMOP_HLP_MIN_186();
9297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9298 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9299 {
9300 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
9301 switch (pVCpu->iem.s.enmEffAddrMode)
9302 {
9303 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9304 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9305 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9307 }
9308 }
9309 else
9310 {
9311 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
9312 switch (pVCpu->iem.s.enmEffAddrMode)
9313 {
9314 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9315 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9316 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9318 }
9319 }
9320}
9321
9322
9323/** Opcode 0x6d. */
9324FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9325{
9326 IEMOP_HLP_MIN_186();
9327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9328 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9329 {
9330 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
9331 switch (pVCpu->iem.s.enmEffOpSize)
9332 {
9333 case IEMMODE_16BIT:
9334 switch (pVCpu->iem.s.enmEffAddrMode)
9335 {
9336 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9337 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9338 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9340 }
9341 break;
9342 case IEMMODE_64BIT:
9343 case IEMMODE_32BIT:
9344 switch (pVCpu->iem.s.enmEffAddrMode)
9345 {
9346 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9347 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9348 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9350 }
9351 break;
9352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9353 }
9354 }
9355 else
9356 {
9357 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
9358 switch (pVCpu->iem.s.enmEffOpSize)
9359 {
9360 case IEMMODE_16BIT:
9361 switch (pVCpu->iem.s.enmEffAddrMode)
9362 {
9363 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9364 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9365 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9367 }
9368 break;
9369 case IEMMODE_64BIT:
9370 case IEMMODE_32BIT:
9371 switch (pVCpu->iem.s.enmEffAddrMode)
9372 {
9373 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9374 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9375 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9377 }
9378 break;
9379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9380 }
9381 }
9382}
9383
9384
9385/** Opcode 0x6e. */
9386FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9387{
9388 IEMOP_HLP_MIN_186();
9389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9390 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9391 {
9392 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
9393 switch (pVCpu->iem.s.enmEffAddrMode)
9394 {
9395 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9396 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9397 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9399 }
9400 }
9401 else
9402 {
9403 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
9404 switch (pVCpu->iem.s.enmEffAddrMode)
9405 {
9406 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9407 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9408 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9410 }
9411 }
9412}
9413
9414
9415/** Opcode 0x6f. */
9416FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9417{
9418 IEMOP_HLP_MIN_186();
9419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9420 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9421 {
9422 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
9423 switch (pVCpu->iem.s.enmEffOpSize)
9424 {
9425 case IEMMODE_16BIT:
9426 switch (pVCpu->iem.s.enmEffAddrMode)
9427 {
9428 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9429 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9430 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9432 }
9433 break;
9434 case IEMMODE_64BIT:
9435 case IEMMODE_32BIT:
9436 switch (pVCpu->iem.s.enmEffAddrMode)
9437 {
9438 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9439 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9440 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9442 }
9443 break;
9444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9445 }
9446 }
9447 else
9448 {
9449 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
9450 switch (pVCpu->iem.s.enmEffOpSize)
9451 {
9452 case IEMMODE_16BIT:
9453 switch (pVCpu->iem.s.enmEffAddrMode)
9454 {
9455 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9456 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9457 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9459 }
9460 break;
9461 case IEMMODE_64BIT:
9462 case IEMMODE_32BIT:
9463 switch (pVCpu->iem.s.enmEffAddrMode)
9464 {
9465 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9466 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9467 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9469 }
9470 break;
9471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9472 }
9473 }
9474}
9475
9476
9477/** Opcode 0x70. */
9478FNIEMOP_DEF(iemOp_jo_Jb)
9479{
9480 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
9481 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9483 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9484
9485 IEM_MC_BEGIN(0, 0);
9486 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9487 IEM_MC_REL_JMP_S8(i8Imm);
9488 } IEM_MC_ELSE() {
9489 IEM_MC_ADVANCE_RIP();
9490 } IEM_MC_ENDIF();
9491 IEM_MC_END();
9492 return VINF_SUCCESS;
9493}
9494
9495
9496/** Opcode 0x71. */
9497FNIEMOP_DEF(iemOp_jno_Jb)
9498{
9499 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
9500 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9502 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9503
9504 IEM_MC_BEGIN(0, 0);
9505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9506 IEM_MC_ADVANCE_RIP();
9507 } IEM_MC_ELSE() {
9508 IEM_MC_REL_JMP_S8(i8Imm);
9509 } IEM_MC_ENDIF();
9510 IEM_MC_END();
9511 return VINF_SUCCESS;
9512}
9513
9514/** Opcode 0x72. */
9515FNIEMOP_DEF(iemOp_jc_Jb)
9516{
9517 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
9518 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9520 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9521
9522 IEM_MC_BEGIN(0, 0);
9523 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9524 IEM_MC_REL_JMP_S8(i8Imm);
9525 } IEM_MC_ELSE() {
9526 IEM_MC_ADVANCE_RIP();
9527 } IEM_MC_ENDIF();
9528 IEM_MC_END();
9529 return VINF_SUCCESS;
9530}
9531
9532
9533/** Opcode 0x73. */
9534FNIEMOP_DEF(iemOp_jnc_Jb)
9535{
9536 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
9537 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9540
9541 IEM_MC_BEGIN(0, 0);
9542 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9543 IEM_MC_ADVANCE_RIP();
9544 } IEM_MC_ELSE() {
9545 IEM_MC_REL_JMP_S8(i8Imm);
9546 } IEM_MC_ENDIF();
9547 IEM_MC_END();
9548 return VINF_SUCCESS;
9549}
9550
9551
9552/** Opcode 0x74. */
9553FNIEMOP_DEF(iemOp_je_Jb)
9554{
9555 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
9556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9558 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9559
9560 IEM_MC_BEGIN(0, 0);
9561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9562 IEM_MC_REL_JMP_S8(i8Imm);
9563 } IEM_MC_ELSE() {
9564 IEM_MC_ADVANCE_RIP();
9565 } IEM_MC_ENDIF();
9566 IEM_MC_END();
9567 return VINF_SUCCESS;
9568}
9569
9570
9571/** Opcode 0x75. */
9572FNIEMOP_DEF(iemOp_jne_Jb)
9573{
9574 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
9575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9578
9579 IEM_MC_BEGIN(0, 0);
9580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9581 IEM_MC_ADVANCE_RIP();
9582 } IEM_MC_ELSE() {
9583 IEM_MC_REL_JMP_S8(i8Imm);
9584 } IEM_MC_ENDIF();
9585 IEM_MC_END();
9586 return VINF_SUCCESS;
9587}
9588
9589
9590/** Opcode 0x76. */
9591FNIEMOP_DEF(iemOp_jbe_Jb)
9592{
9593 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
9594 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9596 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9597
9598 IEM_MC_BEGIN(0, 0);
9599 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9600 IEM_MC_REL_JMP_S8(i8Imm);
9601 } IEM_MC_ELSE() {
9602 IEM_MC_ADVANCE_RIP();
9603 } IEM_MC_ENDIF();
9604 IEM_MC_END();
9605 return VINF_SUCCESS;
9606}
9607
9608
9609/** Opcode 0x77. */
9610FNIEMOP_DEF(iemOp_jnbe_Jb)
9611{
9612 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
9613 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9615 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9616
9617 IEM_MC_BEGIN(0, 0);
9618 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9619 IEM_MC_ADVANCE_RIP();
9620 } IEM_MC_ELSE() {
9621 IEM_MC_REL_JMP_S8(i8Imm);
9622 } IEM_MC_ENDIF();
9623 IEM_MC_END();
9624 return VINF_SUCCESS;
9625}
9626
9627
9628/** Opcode 0x78. */
9629FNIEMOP_DEF(iemOp_js_Jb)
9630{
9631 IEMOP_MNEMONIC(js_Jb, "js Jb");
9632 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9634 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9635
9636 IEM_MC_BEGIN(0, 0);
9637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9638 IEM_MC_REL_JMP_S8(i8Imm);
9639 } IEM_MC_ELSE() {
9640 IEM_MC_ADVANCE_RIP();
9641 } IEM_MC_ENDIF();
9642 IEM_MC_END();
9643 return VINF_SUCCESS;
9644}
9645
9646
9647/** Opcode 0x79. */
9648FNIEMOP_DEF(iemOp_jns_Jb)
9649{
9650 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
9651 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9654
9655 IEM_MC_BEGIN(0, 0);
9656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9657 IEM_MC_ADVANCE_RIP();
9658 } IEM_MC_ELSE() {
9659 IEM_MC_REL_JMP_S8(i8Imm);
9660 } IEM_MC_ENDIF();
9661 IEM_MC_END();
9662 return VINF_SUCCESS;
9663}
9664
9665
9666/** Opcode 0x7a. */
9667FNIEMOP_DEF(iemOp_jp_Jb)
9668{
9669 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
9670 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9673
9674 IEM_MC_BEGIN(0, 0);
9675 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9676 IEM_MC_REL_JMP_S8(i8Imm);
9677 } IEM_MC_ELSE() {
9678 IEM_MC_ADVANCE_RIP();
9679 } IEM_MC_ENDIF();
9680 IEM_MC_END();
9681 return VINF_SUCCESS;
9682}
9683
9684
9685/** Opcode 0x7b. */
9686FNIEMOP_DEF(iemOp_jnp_Jb)
9687{
9688 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
9689 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9691 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9692
9693 IEM_MC_BEGIN(0, 0);
9694 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9695 IEM_MC_ADVANCE_RIP();
9696 } IEM_MC_ELSE() {
9697 IEM_MC_REL_JMP_S8(i8Imm);
9698 } IEM_MC_ENDIF();
9699 IEM_MC_END();
9700 return VINF_SUCCESS;
9701}
9702
9703
9704/** Opcode 0x7c. */
9705FNIEMOP_DEF(iemOp_jl_Jb)
9706{
9707 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
9708 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9710 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9711
9712 IEM_MC_BEGIN(0, 0);
9713 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9714 IEM_MC_REL_JMP_S8(i8Imm);
9715 } IEM_MC_ELSE() {
9716 IEM_MC_ADVANCE_RIP();
9717 } IEM_MC_ENDIF();
9718 IEM_MC_END();
9719 return VINF_SUCCESS;
9720}
9721
9722
9723/** Opcode 0x7d. */
9724FNIEMOP_DEF(iemOp_jnl_Jb)
9725{
9726 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
9727 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9729 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9730
9731 IEM_MC_BEGIN(0, 0);
9732 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9733 IEM_MC_ADVANCE_RIP();
9734 } IEM_MC_ELSE() {
9735 IEM_MC_REL_JMP_S8(i8Imm);
9736 } IEM_MC_ENDIF();
9737 IEM_MC_END();
9738 return VINF_SUCCESS;
9739}
9740
9741
9742/** Opcode 0x7e. */
9743FNIEMOP_DEF(iemOp_jle_Jb)
9744{
9745 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
9746 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9748 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9749
9750 IEM_MC_BEGIN(0, 0);
9751 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9752 IEM_MC_REL_JMP_S8(i8Imm);
9753 } IEM_MC_ELSE() {
9754 IEM_MC_ADVANCE_RIP();
9755 } IEM_MC_ENDIF();
9756 IEM_MC_END();
9757 return VINF_SUCCESS;
9758}
9759
9760
9761/** Opcode 0x7f. */
9762FNIEMOP_DEF(iemOp_jnle_Jb)
9763{
9764 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
9765 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9767 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9768
9769 IEM_MC_BEGIN(0, 0);
9770 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9771 IEM_MC_ADVANCE_RIP();
9772 } IEM_MC_ELSE() {
9773 IEM_MC_REL_JMP_S8(i8Imm);
9774 } IEM_MC_ENDIF();
9775 IEM_MC_END();
9776 return VINF_SUCCESS;
9777}
9778
9779
9780/** Opcode 0x80. */
9781FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9782{
9783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9784 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9785 {
9786 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
9787 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
9788 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
9789 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
9790 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
9791 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
9792 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
9793 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
9794 }
9795 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9796
9797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9798 {
9799 /* register target */
9800 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9802 IEM_MC_BEGIN(3, 0);
9803 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9804 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9806
9807 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9808 IEM_MC_REF_EFLAGS(pEFlags);
9809 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9810
9811 IEM_MC_ADVANCE_RIP();
9812 IEM_MC_END();
9813 }
9814 else
9815 {
9816 /* memory target */
9817 uint32_t fAccess;
9818 if (pImpl->pfnLockedU8)
9819 fAccess = IEM_ACCESS_DATA_RW;
9820 else /* CMP */
9821 fAccess = IEM_ACCESS_DATA_R;
9822 IEM_MC_BEGIN(3, 2);
9823 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9824 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9826
9827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9828 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9829 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9830 if (pImpl->pfnLockedU8)
9831 IEMOP_HLP_DONE_DECODING();
9832 else
9833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9834
9835 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9836 IEM_MC_FETCH_EFLAGS(EFlags);
9837 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9839 else
9840 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9841
9842 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9843 IEM_MC_COMMIT_EFLAGS(EFlags);
9844 IEM_MC_ADVANCE_RIP();
9845 IEM_MC_END();
9846 }
9847 return VINF_SUCCESS;
9848}
9849
9850
9851/** Opcode 0x81. */
9852FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9853{
9854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9855 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9856 {
9857 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
9858 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
9859 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
9860 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
9861 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
9862 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
9863 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
9864 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
9865 }
9866 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9867
9868 switch (pVCpu->iem.s.enmEffOpSize)
9869 {
9870 case IEMMODE_16BIT:
9871 {
9872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9873 {
9874 /* register target */
9875 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9877 IEM_MC_BEGIN(3, 0);
9878 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9879 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9880 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9881
9882 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9883 IEM_MC_REF_EFLAGS(pEFlags);
9884 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9885
9886 IEM_MC_ADVANCE_RIP();
9887 IEM_MC_END();
9888 }
9889 else
9890 {
9891 /* memory target */
9892 uint32_t fAccess;
9893 if (pImpl->pfnLockedU16)
9894 fAccess = IEM_ACCESS_DATA_RW;
9895 else /* CMP, TEST */
9896 fAccess = IEM_ACCESS_DATA_R;
9897 IEM_MC_BEGIN(3, 2);
9898 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9899 IEM_MC_ARG(uint16_t, u16Src, 1);
9900 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9902
9903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9904 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9905 IEM_MC_ASSIGN(u16Src, u16Imm);
9906 if (pImpl->pfnLockedU16)
9907 IEMOP_HLP_DONE_DECODING();
9908 else
9909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9910 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9911 IEM_MC_FETCH_EFLAGS(EFlags);
9912 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9913 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9914 else
9915 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9916
9917 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9918 IEM_MC_COMMIT_EFLAGS(EFlags);
9919 IEM_MC_ADVANCE_RIP();
9920 IEM_MC_END();
9921 }
9922 break;
9923 }
9924
9925 case IEMMODE_32BIT:
9926 {
9927 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9928 {
9929 /* register target */
9930 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9932 IEM_MC_BEGIN(3, 0);
9933 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9934 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9936
9937 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9938 IEM_MC_REF_EFLAGS(pEFlags);
9939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9940 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9941
9942 IEM_MC_ADVANCE_RIP();
9943 IEM_MC_END();
9944 }
9945 else
9946 {
9947 /* memory target */
9948 uint32_t fAccess;
9949 if (pImpl->pfnLockedU32)
9950 fAccess = IEM_ACCESS_DATA_RW;
9951 else /* CMP, TEST */
9952 fAccess = IEM_ACCESS_DATA_R;
9953 IEM_MC_BEGIN(3, 2);
9954 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9955 IEM_MC_ARG(uint32_t, u32Src, 1);
9956 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9958
9959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9960 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9961 IEM_MC_ASSIGN(u32Src, u32Imm);
9962 if (pImpl->pfnLockedU32)
9963 IEMOP_HLP_DONE_DECODING();
9964 else
9965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9966 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9967 IEM_MC_FETCH_EFLAGS(EFlags);
9968 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9970 else
9971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9972
9973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9974 IEM_MC_COMMIT_EFLAGS(EFlags);
9975 IEM_MC_ADVANCE_RIP();
9976 IEM_MC_END();
9977 }
9978 break;
9979 }
9980
9981 case IEMMODE_64BIT:
9982 {
9983 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9984 {
9985 /* register target */
9986 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9988 IEM_MC_BEGIN(3, 0);
9989 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9990 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9991 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9992
9993 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9994 IEM_MC_REF_EFLAGS(pEFlags);
9995 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9996
9997 IEM_MC_ADVANCE_RIP();
9998 IEM_MC_END();
9999 }
10000 else
10001 {
10002 /* memory target */
10003 uint32_t fAccess;
10004 if (pImpl->pfnLockedU64)
10005 fAccess = IEM_ACCESS_DATA_RW;
10006 else /* CMP */
10007 fAccess = IEM_ACCESS_DATA_R;
10008 IEM_MC_BEGIN(3, 2);
10009 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10010 IEM_MC_ARG(uint64_t, u64Src, 1);
10011 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10013
10014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10015 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10016 if (pImpl->pfnLockedU64)
10017 IEMOP_HLP_DONE_DECODING();
10018 else
10019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10020 IEM_MC_ASSIGN(u64Src, u64Imm);
10021 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10022 IEM_MC_FETCH_EFLAGS(EFlags);
10023 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10024 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10025 else
10026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10027
10028 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10029 IEM_MC_COMMIT_EFLAGS(EFlags);
10030 IEM_MC_ADVANCE_RIP();
10031 IEM_MC_END();
10032 }
10033 break;
10034 }
10035 }
10036 return VINF_SUCCESS;
10037}
10038
10039
10040/** Opcode 0x82. */
10041FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10042{
10043 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10044 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10045}
10046
10047
10048/** Opcode 0x83. */
10049FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10050{
10051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10052 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10053 {
10054 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10055 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10056 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10057 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10058 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10059 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10060 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10061 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10062 }
10063 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10064 to the 386 even if absent in the intel reference manuals and some
10065 3rd party opcode listings. */
10066 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10067
10068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10069 {
10070 /*
10071 * Register target
10072 */
10073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10074 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10075 switch (pVCpu->iem.s.enmEffOpSize)
10076 {
10077 case IEMMODE_16BIT:
10078 {
10079 IEM_MC_BEGIN(3, 0);
10080 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10081 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10082 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10083
10084 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10085 IEM_MC_REF_EFLAGS(pEFlags);
10086 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10087
10088 IEM_MC_ADVANCE_RIP();
10089 IEM_MC_END();
10090 break;
10091 }
10092
10093 case IEMMODE_32BIT:
10094 {
10095 IEM_MC_BEGIN(3, 0);
10096 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10097 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10098 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10099
10100 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10101 IEM_MC_REF_EFLAGS(pEFlags);
10102 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10103 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10104
10105 IEM_MC_ADVANCE_RIP();
10106 IEM_MC_END();
10107 break;
10108 }
10109
10110 case IEMMODE_64BIT:
10111 {
10112 IEM_MC_BEGIN(3, 0);
10113 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10114 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10115 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10116
10117 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10118 IEM_MC_REF_EFLAGS(pEFlags);
10119 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10120
10121 IEM_MC_ADVANCE_RIP();
10122 IEM_MC_END();
10123 break;
10124 }
10125 }
10126 }
10127 else
10128 {
10129 /*
10130 * Memory target.
10131 */
10132 uint32_t fAccess;
10133 if (pImpl->pfnLockedU16)
10134 fAccess = IEM_ACCESS_DATA_RW;
10135 else /* CMP */
10136 fAccess = IEM_ACCESS_DATA_R;
10137
10138 switch (pVCpu->iem.s.enmEffOpSize)
10139 {
10140 case IEMMODE_16BIT:
10141 {
10142 IEM_MC_BEGIN(3, 2);
10143 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10144 IEM_MC_ARG(uint16_t, u16Src, 1);
10145 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10147
10148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10149 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10150 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10151 if (pImpl->pfnLockedU16)
10152 IEMOP_HLP_DONE_DECODING();
10153 else
10154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10155 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10156 IEM_MC_FETCH_EFLAGS(EFlags);
10157 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10158 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10159 else
10160 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10161
10162 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10163 IEM_MC_COMMIT_EFLAGS(EFlags);
10164 IEM_MC_ADVANCE_RIP();
10165 IEM_MC_END();
10166 break;
10167 }
10168
10169 case IEMMODE_32BIT:
10170 {
10171 IEM_MC_BEGIN(3, 2);
10172 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10173 IEM_MC_ARG(uint32_t, u32Src, 1);
10174 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10176
10177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10178 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10179 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10180 if (pImpl->pfnLockedU32)
10181 IEMOP_HLP_DONE_DECODING();
10182 else
10183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10184 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10185 IEM_MC_FETCH_EFLAGS(EFlags);
10186 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10188 else
10189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10190
10191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10192 IEM_MC_COMMIT_EFLAGS(EFlags);
10193 IEM_MC_ADVANCE_RIP();
10194 IEM_MC_END();
10195 break;
10196 }
10197
10198 case IEMMODE_64BIT:
10199 {
10200 IEM_MC_BEGIN(3, 2);
10201 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10202 IEM_MC_ARG(uint64_t, u64Src, 1);
10203 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10205
10206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10207 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10208 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10209 if (pImpl->pfnLockedU64)
10210 IEMOP_HLP_DONE_DECODING();
10211 else
10212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10213 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10214 IEM_MC_FETCH_EFLAGS(EFlags);
10215 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10217 else
10218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10219
10220 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10221 IEM_MC_COMMIT_EFLAGS(EFlags);
10222 IEM_MC_ADVANCE_RIP();
10223 IEM_MC_END();
10224 break;
10225 }
10226 }
10227 }
10228 return VINF_SUCCESS;
10229}
10230
10231
10232/** Opcode 0x84. */
10233FNIEMOP_DEF(iemOp_test_Eb_Gb)
10234{
10235 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
10236 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10237 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10238}
10239
10240
10241/** Opcode 0x85. */
10242FNIEMOP_DEF(iemOp_test_Ev_Gv)
10243{
10244 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
10245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10246 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10247}
10248
10249
10250/** Opcode 0x86. */
10251FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10252{
10253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10254 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
10255
10256 /*
10257 * If rm is denoting a register, no more instruction bytes.
10258 */
10259 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10260 {
10261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10262
10263 IEM_MC_BEGIN(0, 2);
10264 IEM_MC_LOCAL(uint8_t, uTmp1);
10265 IEM_MC_LOCAL(uint8_t, uTmp2);
10266
10267 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10268 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10269 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10270 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10271
10272 IEM_MC_ADVANCE_RIP();
10273 IEM_MC_END();
10274 }
10275 else
10276 {
10277 /*
10278 * We're accessing memory.
10279 */
10280/** @todo the register must be committed separately! */
10281 IEM_MC_BEGIN(2, 2);
10282 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10283 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10285
10286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10287 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10288 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10289 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10291
10292 IEM_MC_ADVANCE_RIP();
10293 IEM_MC_END();
10294 }
10295 return VINF_SUCCESS;
10296}
10297
10298
10299/** Opcode 0x87. */
10300FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10301{
10302 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
10303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10304
10305 /*
10306 * If rm is denoting a register, no more instruction bytes.
10307 */
10308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10309 {
10310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10311
10312 switch (pVCpu->iem.s.enmEffOpSize)
10313 {
10314 case IEMMODE_16BIT:
10315 IEM_MC_BEGIN(0, 2);
10316 IEM_MC_LOCAL(uint16_t, uTmp1);
10317 IEM_MC_LOCAL(uint16_t, uTmp2);
10318
10319 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10320 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10321 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10323
10324 IEM_MC_ADVANCE_RIP();
10325 IEM_MC_END();
10326 return VINF_SUCCESS;
10327
10328 case IEMMODE_32BIT:
10329 IEM_MC_BEGIN(0, 2);
10330 IEM_MC_LOCAL(uint32_t, uTmp1);
10331 IEM_MC_LOCAL(uint32_t, uTmp2);
10332
10333 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10334 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10335 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10336 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10337
10338 IEM_MC_ADVANCE_RIP();
10339 IEM_MC_END();
10340 return VINF_SUCCESS;
10341
10342 case IEMMODE_64BIT:
10343 IEM_MC_BEGIN(0, 2);
10344 IEM_MC_LOCAL(uint64_t, uTmp1);
10345 IEM_MC_LOCAL(uint64_t, uTmp2);
10346
10347 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10348 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10349 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10351
10352 IEM_MC_ADVANCE_RIP();
10353 IEM_MC_END();
10354 return VINF_SUCCESS;
10355
10356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10357 }
10358 }
10359 else
10360 {
10361 /*
10362 * We're accessing memory.
10363 */
10364 switch (pVCpu->iem.s.enmEffOpSize)
10365 {
10366/** @todo the register must be committed separately! */
10367 case IEMMODE_16BIT:
10368 IEM_MC_BEGIN(2, 2);
10369 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10370 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10372
10373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10374 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10375 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10376 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10377 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10378
10379 IEM_MC_ADVANCE_RIP();
10380 IEM_MC_END();
10381 return VINF_SUCCESS;
10382
10383 case IEMMODE_32BIT:
10384 IEM_MC_BEGIN(2, 2);
10385 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10386 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10388
10389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10390 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10391 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10392 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10393 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10394
10395 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10396 IEM_MC_ADVANCE_RIP();
10397 IEM_MC_END();
10398 return VINF_SUCCESS;
10399
10400 case IEMMODE_64BIT:
10401 IEM_MC_BEGIN(2, 2);
10402 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10403 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10405
10406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10407 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10408 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10409 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10410 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10411
10412 IEM_MC_ADVANCE_RIP();
10413 IEM_MC_END();
10414 return VINF_SUCCESS;
10415
10416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10417 }
10418 }
10419}
10420
10421
10422/** Opcode 0x88. */
10423FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10424{
10425 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
10426
10427 uint8_t bRm;
10428 IEM_OPCODE_GET_NEXT_U8(&bRm);
10429
10430 /*
10431 * If rm is denoting a register, no more instruction bytes.
10432 */
10433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10434 {
10435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10436 IEM_MC_BEGIN(0, 1);
10437 IEM_MC_LOCAL(uint8_t, u8Value);
10438 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10439 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
10440 IEM_MC_ADVANCE_RIP();
10441 IEM_MC_END();
10442 }
10443 else
10444 {
10445 /*
10446 * We're writing a register to memory.
10447 */
10448 IEM_MC_BEGIN(0, 2);
10449 IEM_MC_LOCAL(uint8_t, u8Value);
10450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10453 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10454 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
10455 IEM_MC_ADVANCE_RIP();
10456 IEM_MC_END();
10457 }
10458 return VINF_SUCCESS;
10459
10460}
10461
10462
10463/** Opcode 0x89. */
10464FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10465{
10466 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
10467
10468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10469
10470 /*
10471 * If rm is denoting a register, no more instruction bytes.
10472 */
10473 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10474 {
10475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10476 switch (pVCpu->iem.s.enmEffOpSize)
10477 {
10478 case IEMMODE_16BIT:
10479 IEM_MC_BEGIN(0, 1);
10480 IEM_MC_LOCAL(uint16_t, u16Value);
10481 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10482 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10483 IEM_MC_ADVANCE_RIP();
10484 IEM_MC_END();
10485 break;
10486
10487 case IEMMODE_32BIT:
10488 IEM_MC_BEGIN(0, 1);
10489 IEM_MC_LOCAL(uint32_t, u32Value);
10490 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10491 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10492 IEM_MC_ADVANCE_RIP();
10493 IEM_MC_END();
10494 break;
10495
10496 case IEMMODE_64BIT:
10497 IEM_MC_BEGIN(0, 1);
10498 IEM_MC_LOCAL(uint64_t, u64Value);
10499 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10500 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10501 IEM_MC_ADVANCE_RIP();
10502 IEM_MC_END();
10503 break;
10504 }
10505 }
10506 else
10507 {
10508 /*
10509 * We're writing a register to memory.
10510 */
10511 switch (pVCpu->iem.s.enmEffOpSize)
10512 {
10513 case IEMMODE_16BIT:
10514 IEM_MC_BEGIN(0, 2);
10515 IEM_MC_LOCAL(uint16_t, u16Value);
10516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10519 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10520 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10521 IEM_MC_ADVANCE_RIP();
10522 IEM_MC_END();
10523 break;
10524
10525 case IEMMODE_32BIT:
10526 IEM_MC_BEGIN(0, 2);
10527 IEM_MC_LOCAL(uint32_t, u32Value);
10528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10532 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
10533 IEM_MC_ADVANCE_RIP();
10534 IEM_MC_END();
10535 break;
10536
10537 case IEMMODE_64BIT:
10538 IEM_MC_BEGIN(0, 2);
10539 IEM_MC_LOCAL(uint64_t, u64Value);
10540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10543 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10544 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
10545 IEM_MC_ADVANCE_RIP();
10546 IEM_MC_END();
10547 break;
10548 }
10549 }
10550 return VINF_SUCCESS;
10551}
10552
10553
10554/** Opcode 0x8a. */
10555FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10556{
10557 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
10558
10559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10560
10561 /*
10562 * If rm is denoting a register, no more instruction bytes.
10563 */
10564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10565 {
10566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10567 IEM_MC_BEGIN(0, 1);
10568 IEM_MC_LOCAL(uint8_t, u8Value);
10569 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10570 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10571 IEM_MC_ADVANCE_RIP();
10572 IEM_MC_END();
10573 }
10574 else
10575 {
10576 /*
10577 * We're loading a register from memory.
10578 */
10579 IEM_MC_BEGIN(0, 2);
10580 IEM_MC_LOCAL(uint8_t, u8Value);
10581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10584 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10585 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10586 IEM_MC_ADVANCE_RIP();
10587 IEM_MC_END();
10588 }
10589 return VINF_SUCCESS;
10590}
10591
10592
10593/** Opcode 0x8b. */
10594FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10595{
10596 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
10597
10598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10599
10600 /*
10601 * If rm is denoting a register, no more instruction bytes.
10602 */
10603 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10604 {
10605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10606 switch (pVCpu->iem.s.enmEffOpSize)
10607 {
10608 case IEMMODE_16BIT:
10609 IEM_MC_BEGIN(0, 1);
10610 IEM_MC_LOCAL(uint16_t, u16Value);
10611 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10612 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10613 IEM_MC_ADVANCE_RIP();
10614 IEM_MC_END();
10615 break;
10616
10617 case IEMMODE_32BIT:
10618 IEM_MC_BEGIN(0, 1);
10619 IEM_MC_LOCAL(uint32_t, u32Value);
10620 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10621 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10622 IEM_MC_ADVANCE_RIP();
10623 IEM_MC_END();
10624 break;
10625
10626 case IEMMODE_64BIT:
10627 IEM_MC_BEGIN(0, 1);
10628 IEM_MC_LOCAL(uint64_t, u64Value);
10629 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10630 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10631 IEM_MC_ADVANCE_RIP();
10632 IEM_MC_END();
10633 break;
10634 }
10635 }
10636 else
10637 {
10638 /*
10639 * We're loading a register from memory.
10640 */
10641 switch (pVCpu->iem.s.enmEffOpSize)
10642 {
10643 case IEMMODE_16BIT:
10644 IEM_MC_BEGIN(0, 2);
10645 IEM_MC_LOCAL(uint16_t, u16Value);
10646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10649 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10650 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10651 IEM_MC_ADVANCE_RIP();
10652 IEM_MC_END();
10653 break;
10654
10655 case IEMMODE_32BIT:
10656 IEM_MC_BEGIN(0, 2);
10657 IEM_MC_LOCAL(uint32_t, u32Value);
10658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10661 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10662 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10663 IEM_MC_ADVANCE_RIP();
10664 IEM_MC_END();
10665 break;
10666
10667 case IEMMODE_64BIT:
10668 IEM_MC_BEGIN(0, 2);
10669 IEM_MC_LOCAL(uint64_t, u64Value);
10670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10673 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10674 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10675 IEM_MC_ADVANCE_RIP();
10676 IEM_MC_END();
10677 break;
10678 }
10679 }
10680 return VINF_SUCCESS;
10681}
10682
10683
10684/** Opcode 0x63. */
10685FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10686{
10687 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
10688 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10689 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10690 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10691 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10692}
10693
10694
10695/** Opcode 0x8c. */
10696FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10697{
10698 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
10699
10700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10701
10702 /*
10703 * Check that the destination register exists. The REX.R prefix is ignored.
10704 */
10705 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10706 if ( iSegReg > X86_SREG_GS)
10707 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10708
10709 /*
10710 * If rm is denoting a register, no more instruction bytes.
10711 * In that case, the operand size is respected and the upper bits are
10712 * cleared (starting with some pentium).
10713 */
10714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10715 {
10716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10717 switch (pVCpu->iem.s.enmEffOpSize)
10718 {
10719 case IEMMODE_16BIT:
10720 IEM_MC_BEGIN(0, 1);
10721 IEM_MC_LOCAL(uint16_t, u16Value);
10722 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10723 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10724 IEM_MC_ADVANCE_RIP();
10725 IEM_MC_END();
10726 break;
10727
10728 case IEMMODE_32BIT:
10729 IEM_MC_BEGIN(0, 1);
10730 IEM_MC_LOCAL(uint32_t, u32Value);
10731 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10732 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10733 IEM_MC_ADVANCE_RIP();
10734 IEM_MC_END();
10735 break;
10736
10737 case IEMMODE_64BIT:
10738 IEM_MC_BEGIN(0, 1);
10739 IEM_MC_LOCAL(uint64_t, u64Value);
10740 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10741 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10742 IEM_MC_ADVANCE_RIP();
10743 IEM_MC_END();
10744 break;
10745 }
10746 }
10747 else
10748 {
10749 /*
10750 * We're saving the register to memory. The access is word sized
10751 * regardless of operand size prefixes.
10752 */
10753#if 0 /* not necessary */
10754 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10755#endif
10756 IEM_MC_BEGIN(0, 2);
10757 IEM_MC_LOCAL(uint16_t, u16Value);
10758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10761 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10762 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10763 IEM_MC_ADVANCE_RIP();
10764 IEM_MC_END();
10765 }
10766 return VINF_SUCCESS;
10767}
10768
10769
10770
10771
10772/** Opcode 0x8d. */
10773FNIEMOP_DEF(iemOp_lea_Gv_M)
10774{
10775 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
10776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10778 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10779
10780 switch (pVCpu->iem.s.enmEffOpSize)
10781 {
10782 case IEMMODE_16BIT:
10783 IEM_MC_BEGIN(0, 2);
10784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10785 IEM_MC_LOCAL(uint16_t, u16Cast);
10786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10788 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10789 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
10790 IEM_MC_ADVANCE_RIP();
10791 IEM_MC_END();
10792 return VINF_SUCCESS;
10793
10794 case IEMMODE_32BIT:
10795 IEM_MC_BEGIN(0, 2);
10796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10797 IEM_MC_LOCAL(uint32_t, u32Cast);
10798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10800 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10801 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
10802 IEM_MC_ADVANCE_RIP();
10803 IEM_MC_END();
10804 return VINF_SUCCESS;
10805
10806 case IEMMODE_64BIT:
10807 IEM_MC_BEGIN(0, 1);
10808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10811 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
10812 IEM_MC_ADVANCE_RIP();
10813 IEM_MC_END();
10814 return VINF_SUCCESS;
10815 }
10816 AssertFailedReturn(VERR_IEM_IPE_7);
10817}
10818
10819
10820/** Opcode 0x8e. */
10821FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10822{
10823 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
10824
10825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10826
10827 /*
10828 * The practical operand size is 16-bit.
10829 */
10830#if 0 /* not necessary */
10831 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10832#endif
10833
10834 /*
10835 * Check that the destination register exists and can be used with this
10836 * instruction. The REX.R prefix is ignored.
10837 */
10838 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10839 if ( iSegReg == X86_SREG_CS
10840 || iSegReg > X86_SREG_GS)
10841 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10842
10843 /*
10844 * If rm is denoting a register, no more instruction bytes.
10845 */
10846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10847 {
10848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10849 IEM_MC_BEGIN(2, 0);
10850 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10851 IEM_MC_ARG(uint16_t, u16Value, 1);
10852 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10853 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10854 IEM_MC_END();
10855 }
10856 else
10857 {
10858 /*
10859 * We're loading the register from memory. The access is word sized
10860 * regardless of operand size prefixes.
10861 */
10862 IEM_MC_BEGIN(2, 1);
10863 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10864 IEM_MC_ARG(uint16_t, u16Value, 1);
10865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10868 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10869 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10870 IEM_MC_END();
10871 }
10872 return VINF_SUCCESS;
10873}
10874
10875
10876/** Opcode 0x8f /0. */
10877FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10878{
10879 /* This bugger is rather annoying as it requires rSP to be updated before
10880 doing the effective address calculations. Will eventually require a
10881 split between the R/M+SIB decoding and the effective address
10882 calculation - which is something that is required for any attempt at
10883 reusing this code for a recompiler. It may also be good to have if we
10884 need to delay #UD exception caused by invalid lock prefixes.
10885
10886 For now, we'll do a mostly safe interpreter-only implementation here. */
10887 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10888 * now until tests show it's checked.. */
10889 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
10890
10891 /* Register access is relatively easy and can share code. */
10892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10893 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10894
10895 /*
10896 * Memory target.
10897 *
10898 * Intel says that RSP is incremented before it's used in any effective
10899 * address calcuations. This means some serious extra annoyance here since
10900 * we decode and calculate the effective address in one step and like to
10901 * delay committing registers till everything is done.
10902 *
10903 * So, we'll decode and calculate the effective address twice. This will
10904 * require some recoding if turned into a recompiler.
10905 */
10906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10907
10908#ifndef TST_IEM_CHECK_MC
10909 /* Calc effective address with modified ESP. */
10910/** @todo testcase */
10911 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
10912 RTGCPTR GCPtrEff;
10913 VBOXSTRICTRC rcStrict;
10914 switch (pVCpu->iem.s.enmEffOpSize)
10915 {
10916 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
10917 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
10918 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
10919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10920 }
10921 if (rcStrict != VINF_SUCCESS)
10922 return rcStrict;
10923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10924
10925 /* Perform the operation - this should be CImpl. */
10926 RTUINT64U TmpRsp;
10927 TmpRsp.u = pCtx->rsp;
10928 switch (pVCpu->iem.s.enmEffOpSize)
10929 {
10930 case IEMMODE_16BIT:
10931 {
10932 uint16_t u16Value;
10933 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
10934 if (rcStrict == VINF_SUCCESS)
10935 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
10936 break;
10937 }
10938
10939 case IEMMODE_32BIT:
10940 {
10941 uint32_t u32Value;
10942 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
10943 if (rcStrict == VINF_SUCCESS)
10944 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
10945 break;
10946 }
10947
10948 case IEMMODE_64BIT:
10949 {
10950 uint64_t u64Value;
10951 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
10952 if (rcStrict == VINF_SUCCESS)
10953 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
10954 break;
10955 }
10956
10957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10958 }
10959 if (rcStrict == VINF_SUCCESS)
10960 {
10961 pCtx->rsp = TmpRsp.u;
10962 iemRegUpdateRipAndClearRF(pVCpu);
10963 }
10964 return rcStrict;
10965
10966#else
10967 return VERR_IEM_IPE_2;
10968#endif
10969}
10970
10971
10972/** Opcode 0x8f. */
10973FNIEMOP_DEF(iemOp_Grp1A)
10974{
10975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10976 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10977 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10978
10979 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10980 /** @todo XOP decoding. */
10981 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
10982 return IEMOP_RAISE_INVALID_OPCODE();
10983}
10984
10985
10986/**
10987 * Common 'xchg reg,rAX' helper.
10988 */
10989FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10990{
10991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10992
10993 iReg |= pVCpu->iem.s.uRexB;
10994 switch (pVCpu->iem.s.enmEffOpSize)
10995 {
10996 case IEMMODE_16BIT:
10997 IEM_MC_BEGIN(0, 2);
10998 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10999 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11000 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11001 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11002 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11003 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11004 IEM_MC_ADVANCE_RIP();
11005 IEM_MC_END();
11006 return VINF_SUCCESS;
11007
11008 case IEMMODE_32BIT:
11009 IEM_MC_BEGIN(0, 2);
11010 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11011 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11012 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11013 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11014 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11015 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11016 IEM_MC_ADVANCE_RIP();
11017 IEM_MC_END();
11018 return VINF_SUCCESS;
11019
11020 case IEMMODE_64BIT:
11021 IEM_MC_BEGIN(0, 2);
11022 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11023 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11024 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11025 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11026 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11027 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11028 IEM_MC_ADVANCE_RIP();
11029 IEM_MC_END();
11030 return VINF_SUCCESS;
11031
11032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11033 }
11034}
11035
11036
11037/** Opcode 0x90. */
11038FNIEMOP_DEF(iemOp_nop)
11039{
11040 /* R8/R8D and RAX/EAX can be exchanged. */
11041 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11042 {
11043 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11044 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11045 }
11046
11047 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11048 IEMOP_MNEMONIC(pause, "pause");
11049 else
11050 IEMOP_MNEMONIC(nop, "nop");
11051 IEM_MC_BEGIN(0, 0);
11052 IEM_MC_ADVANCE_RIP();
11053 IEM_MC_END();
11054 return VINF_SUCCESS;
11055}
11056
11057
11058/** Opcode 0x91. */
11059FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11060{
11061 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11062 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11063}
11064
11065
11066/** Opcode 0x92. */
11067FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11068{
11069 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11070 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11071}
11072
11073
11074/** Opcode 0x93. */
11075FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11076{
11077 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11078 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11079}
11080
11081
11082/** Opcode 0x94. */
11083FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11084{
11085 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11086 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11087}
11088
11089
11090/** Opcode 0x95. */
11091FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11092{
11093 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11094 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11095}
11096
11097
11098/** Opcode 0x96. */
11099FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11100{
11101 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11102 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11103}
11104
11105
11106/** Opcode 0x97. */
11107FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11108{
11109 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11110 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11111}
11112
11113
11114/** Opcode 0x98. */
11115FNIEMOP_DEF(iemOp_cbw)
11116{
11117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11118 switch (pVCpu->iem.s.enmEffOpSize)
11119 {
11120 case IEMMODE_16BIT:
11121 IEMOP_MNEMONIC(cbw, "cbw");
11122 IEM_MC_BEGIN(0, 1);
11123 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11124 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11125 } IEM_MC_ELSE() {
11126 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11127 } IEM_MC_ENDIF();
11128 IEM_MC_ADVANCE_RIP();
11129 IEM_MC_END();
11130 return VINF_SUCCESS;
11131
11132 case IEMMODE_32BIT:
11133 IEMOP_MNEMONIC(cwde, "cwde");
11134 IEM_MC_BEGIN(0, 1);
11135 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11136 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11137 } IEM_MC_ELSE() {
11138 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11139 } IEM_MC_ENDIF();
11140 IEM_MC_ADVANCE_RIP();
11141 IEM_MC_END();
11142 return VINF_SUCCESS;
11143
11144 case IEMMODE_64BIT:
11145 IEMOP_MNEMONIC(cdqe, "cdqe");
11146 IEM_MC_BEGIN(0, 1);
11147 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11148 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11149 } IEM_MC_ELSE() {
11150 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11151 } IEM_MC_ENDIF();
11152 IEM_MC_ADVANCE_RIP();
11153 IEM_MC_END();
11154 return VINF_SUCCESS;
11155
11156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11157 }
11158}
11159
11160
11161/** Opcode 0x99. */
11162FNIEMOP_DEF(iemOp_cwd)
11163{
11164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11165 switch (pVCpu->iem.s.enmEffOpSize)
11166 {
11167 case IEMMODE_16BIT:
11168 IEMOP_MNEMONIC(cwd, "cwd");
11169 IEM_MC_BEGIN(0, 1);
11170 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11171 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11172 } IEM_MC_ELSE() {
11173 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11174 } IEM_MC_ENDIF();
11175 IEM_MC_ADVANCE_RIP();
11176 IEM_MC_END();
11177 return VINF_SUCCESS;
11178
11179 case IEMMODE_32BIT:
11180 IEMOP_MNEMONIC(cdq, "cdq");
11181 IEM_MC_BEGIN(0, 1);
11182 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11183 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11184 } IEM_MC_ELSE() {
11185 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11186 } IEM_MC_ENDIF();
11187 IEM_MC_ADVANCE_RIP();
11188 IEM_MC_END();
11189 return VINF_SUCCESS;
11190
11191 case IEMMODE_64BIT:
11192 IEMOP_MNEMONIC(cqo, "cqo");
11193 IEM_MC_BEGIN(0, 1);
11194 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11195 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11196 } IEM_MC_ELSE() {
11197 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11198 } IEM_MC_ENDIF();
11199 IEM_MC_ADVANCE_RIP();
11200 IEM_MC_END();
11201 return VINF_SUCCESS;
11202
11203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11204 }
11205}
11206
11207
11208/** Opcode 0x9a. */
11209FNIEMOP_DEF(iemOp_call_Ap)
11210{
11211 IEMOP_MNEMONIC(call_Ap, "call Ap");
11212 IEMOP_HLP_NO_64BIT();
11213
11214 /* Decode the far pointer address and pass it on to the far call C implementation. */
11215 uint32_t offSeg;
11216 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11217 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11218 else
11219 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11220 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11222 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11223}
11224
11225
11226/** Opcode 0x9b. (aka fwait) */
11227FNIEMOP_DEF(iemOp_wait)
11228{
11229 IEMOP_MNEMONIC(wait, "wait");
11230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11231
11232 IEM_MC_BEGIN(0, 0);
11233 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11234 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11235 IEM_MC_ADVANCE_RIP();
11236 IEM_MC_END();
11237 return VINF_SUCCESS;
11238}
11239
11240
11241/** Opcode 0x9c. */
11242FNIEMOP_DEF(iemOp_pushf_Fv)
11243{
11244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11245 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11246 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11247}
11248
11249
11250/** Opcode 0x9d. */
11251FNIEMOP_DEF(iemOp_popf_Fv)
11252{
11253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11254 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11255 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11256}
11257
11258
11259/** Opcode 0x9e. */
11260FNIEMOP_DEF(iemOp_sahf)
11261{
11262 IEMOP_MNEMONIC(sahf, "sahf");
11263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11264 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11265 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11266 return IEMOP_RAISE_INVALID_OPCODE();
11267 IEM_MC_BEGIN(0, 2);
11268 IEM_MC_LOCAL(uint32_t, u32Flags);
11269 IEM_MC_LOCAL(uint32_t, EFlags);
11270 IEM_MC_FETCH_EFLAGS(EFlags);
11271 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11272 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11273 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11274 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11275 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11276 IEM_MC_COMMIT_EFLAGS(EFlags);
11277 IEM_MC_ADVANCE_RIP();
11278 IEM_MC_END();
11279 return VINF_SUCCESS;
11280}
11281
11282
11283/** Opcode 0x9f. */
11284FNIEMOP_DEF(iemOp_lahf)
11285{
11286 IEMOP_MNEMONIC(lahf, "lahf");
11287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11288 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11289 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11290 return IEMOP_RAISE_INVALID_OPCODE();
11291 IEM_MC_BEGIN(0, 1);
11292 IEM_MC_LOCAL(uint8_t, u8Flags);
11293 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11294 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11295 IEM_MC_ADVANCE_RIP();
11296 IEM_MC_END();
11297 return VINF_SUCCESS;
11298}
11299
11300
11301/**
11302 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11303 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11304 * prefixes. Will return on failures.
11305 * @param a_GCPtrMemOff The variable to store the offset in.
11306 */
11307#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11308 do \
11309 { \
11310 switch (pVCpu->iem.s.enmEffAddrMode) \
11311 { \
11312 case IEMMODE_16BIT: \
11313 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11314 break; \
11315 case IEMMODE_32BIT: \
11316 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11317 break; \
11318 case IEMMODE_64BIT: \
11319 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11320 break; \
11321 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11322 } \
11323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11324 } while (0)
11325
11326/** Opcode 0xa0. */
11327FNIEMOP_DEF(iemOp_mov_Al_Ob)
11328{
11329 /*
11330 * Get the offset and fend of lock prefixes.
11331 */
11332 RTGCPTR GCPtrMemOff;
11333 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11334
11335 /*
11336 * Fetch AL.
11337 */
11338 IEM_MC_BEGIN(0,1);
11339 IEM_MC_LOCAL(uint8_t, u8Tmp);
11340 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11341 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11342 IEM_MC_ADVANCE_RIP();
11343 IEM_MC_END();
11344 return VINF_SUCCESS;
11345}
11346
11347
11348/** Opcode 0xa1. */
11349FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11350{
11351 /*
11352 * Get the offset and fend of lock prefixes.
11353 */
11354 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
11355 RTGCPTR GCPtrMemOff;
11356 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11357
11358 /*
11359 * Fetch rAX.
11360 */
11361 switch (pVCpu->iem.s.enmEffOpSize)
11362 {
11363 case IEMMODE_16BIT:
11364 IEM_MC_BEGIN(0,1);
11365 IEM_MC_LOCAL(uint16_t, u16Tmp);
11366 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11367 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11368 IEM_MC_ADVANCE_RIP();
11369 IEM_MC_END();
11370 return VINF_SUCCESS;
11371
11372 case IEMMODE_32BIT:
11373 IEM_MC_BEGIN(0,1);
11374 IEM_MC_LOCAL(uint32_t, u32Tmp);
11375 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11376 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11377 IEM_MC_ADVANCE_RIP();
11378 IEM_MC_END();
11379 return VINF_SUCCESS;
11380
11381 case IEMMODE_64BIT:
11382 IEM_MC_BEGIN(0,1);
11383 IEM_MC_LOCAL(uint64_t, u64Tmp);
11384 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11385 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11386 IEM_MC_ADVANCE_RIP();
11387 IEM_MC_END();
11388 return VINF_SUCCESS;
11389
11390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11391 }
11392}
11393
11394
11395/** Opcode 0xa2. */
11396FNIEMOP_DEF(iemOp_mov_Ob_AL)
11397{
11398 /*
11399 * Get the offset and fend of lock prefixes.
11400 */
11401 RTGCPTR GCPtrMemOff;
11402 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11403
11404 /*
11405 * Store AL.
11406 */
11407 IEM_MC_BEGIN(0,1);
11408 IEM_MC_LOCAL(uint8_t, u8Tmp);
11409 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11410 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
11411 IEM_MC_ADVANCE_RIP();
11412 IEM_MC_END();
11413 return VINF_SUCCESS;
11414}
11415
11416
11417/** Opcode 0xa3. */
11418FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11419{
11420 /*
11421 * Get the offset and fend of lock prefixes.
11422 */
11423 RTGCPTR GCPtrMemOff;
11424 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11425
11426 /*
11427 * Store rAX.
11428 */
11429 switch (pVCpu->iem.s.enmEffOpSize)
11430 {
11431 case IEMMODE_16BIT:
11432 IEM_MC_BEGIN(0,1);
11433 IEM_MC_LOCAL(uint16_t, u16Tmp);
11434 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11435 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
11436 IEM_MC_ADVANCE_RIP();
11437 IEM_MC_END();
11438 return VINF_SUCCESS;
11439
11440 case IEMMODE_32BIT:
11441 IEM_MC_BEGIN(0,1);
11442 IEM_MC_LOCAL(uint32_t, u32Tmp);
11443 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11444 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
11445 IEM_MC_ADVANCE_RIP();
11446 IEM_MC_END();
11447 return VINF_SUCCESS;
11448
11449 case IEMMODE_64BIT:
11450 IEM_MC_BEGIN(0,1);
11451 IEM_MC_LOCAL(uint64_t, u64Tmp);
11452 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11453 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
11454 IEM_MC_ADVANCE_RIP();
11455 IEM_MC_END();
11456 return VINF_SUCCESS;
11457
11458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11459 }
11460}
11461
11462/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11463#define IEM_MOVS_CASE(ValBits, AddrBits) \
11464 IEM_MC_BEGIN(0, 2); \
11465 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11466 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11467 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11468 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11469 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11470 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11472 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11473 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11474 } IEM_MC_ELSE() { \
11475 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11476 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11477 } IEM_MC_ENDIF(); \
11478 IEM_MC_ADVANCE_RIP(); \
11479 IEM_MC_END();
11480
11481/** Opcode 0xa4. */
11482FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11483{
11484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11485
11486 /*
11487 * Use the C implementation if a repeat prefix is encountered.
11488 */
11489 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11490 {
11491 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
11492 switch (pVCpu->iem.s.enmEffAddrMode)
11493 {
11494 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
11495 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
11496 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
11497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11498 }
11499 }
11500 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
11501
11502 /*
11503 * Sharing case implementation with movs[wdq] below.
11504 */
11505 switch (pVCpu->iem.s.enmEffAddrMode)
11506 {
11507 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11508 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11509 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11511 }
11512 return VINF_SUCCESS;
11513}
11514
11515
11516/** Opcode 0xa5. */
11517FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11518{
11519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11520
11521 /*
11522 * Use the C implementation if a repeat prefix is encountered.
11523 */
11524 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11525 {
11526 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
11527 switch (pVCpu->iem.s.enmEffOpSize)
11528 {
11529 case IEMMODE_16BIT:
11530 switch (pVCpu->iem.s.enmEffAddrMode)
11531 {
11532 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
11533 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
11534 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
11535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11536 }
11537 break;
11538 case IEMMODE_32BIT:
11539 switch (pVCpu->iem.s.enmEffAddrMode)
11540 {
11541 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
11542 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
11543 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
11544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11545 }
11546 case IEMMODE_64BIT:
11547 switch (pVCpu->iem.s.enmEffAddrMode)
11548 {
11549 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11550 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
11551 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
11552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11553 }
11554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11555 }
11556 }
11557 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
11558
11559 /*
11560 * Annoying double switch here.
11561 * Using ugly macro for implementing the cases, sharing it with movsb.
11562 */
11563 switch (pVCpu->iem.s.enmEffOpSize)
11564 {
11565 case IEMMODE_16BIT:
11566 switch (pVCpu->iem.s.enmEffAddrMode)
11567 {
11568 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11569 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11570 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11572 }
11573 break;
11574
11575 case IEMMODE_32BIT:
11576 switch (pVCpu->iem.s.enmEffAddrMode)
11577 {
11578 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11579 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11580 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11582 }
11583 break;
11584
11585 case IEMMODE_64BIT:
11586 switch (pVCpu->iem.s.enmEffAddrMode)
11587 {
11588 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11589 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11590 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11592 }
11593 break;
11594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11595 }
11596 return VINF_SUCCESS;
11597}
11598
11599#undef IEM_MOVS_CASE
11600
11601/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11602#define IEM_CMPS_CASE(ValBits, AddrBits) \
11603 IEM_MC_BEGIN(3, 3); \
11604 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11605 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11606 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11607 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11608 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11609 \
11610 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11611 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
11612 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11613 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11614 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11615 IEM_MC_REF_EFLAGS(pEFlags); \
11616 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11617 \
11618 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11619 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11620 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11621 } IEM_MC_ELSE() { \
11622 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11623 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11624 } IEM_MC_ENDIF(); \
11625 IEM_MC_ADVANCE_RIP(); \
11626 IEM_MC_END(); \
11627
11628/** Opcode 0xa6. */
11629FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11630{
11631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11632
11633 /*
11634 * Use the C implementation if a repeat prefix is encountered.
11635 */
11636 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11637 {
11638 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
11639 switch (pVCpu->iem.s.enmEffAddrMode)
11640 {
11641 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11642 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11643 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11645 }
11646 }
11647 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11648 {
11649 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
11650 switch (pVCpu->iem.s.enmEffAddrMode)
11651 {
11652 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11653 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11654 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11656 }
11657 }
11658 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
11659
11660 /*
11661 * Sharing case implementation with cmps[wdq] below.
11662 */
11663 switch (pVCpu->iem.s.enmEffAddrMode)
11664 {
11665 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11666 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11667 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11669 }
11670 return VINF_SUCCESS;
11671
11672}
11673
11674
11675/** Opcode 0xa7. */
11676FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11677{
11678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11679
11680 /*
11681 * Use the C implementation if a repeat prefix is encountered.
11682 */
11683 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11684 {
11685 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
11686 switch (pVCpu->iem.s.enmEffOpSize)
11687 {
11688 case IEMMODE_16BIT:
11689 switch (pVCpu->iem.s.enmEffAddrMode)
11690 {
11691 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11692 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11693 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11695 }
11696 break;
11697 case IEMMODE_32BIT:
11698 switch (pVCpu->iem.s.enmEffAddrMode)
11699 {
11700 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11701 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11702 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11704 }
11705 case IEMMODE_64BIT:
11706 switch (pVCpu->iem.s.enmEffAddrMode)
11707 {
11708 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11709 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11710 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11712 }
11713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11714 }
11715 }
11716
11717 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11718 {
11719 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
11720 switch (pVCpu->iem.s.enmEffOpSize)
11721 {
11722 case IEMMODE_16BIT:
11723 switch (pVCpu->iem.s.enmEffAddrMode)
11724 {
11725 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11726 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11727 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11729 }
11730 break;
11731 case IEMMODE_32BIT:
11732 switch (pVCpu->iem.s.enmEffAddrMode)
11733 {
11734 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11735 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11736 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11738 }
11739 case IEMMODE_64BIT:
11740 switch (pVCpu->iem.s.enmEffAddrMode)
11741 {
11742 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11743 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11744 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11746 }
11747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11748 }
11749 }
11750
11751 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
11752
11753 /*
11754 * Annoying double switch here.
11755 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11756 */
11757 switch (pVCpu->iem.s.enmEffOpSize)
11758 {
11759 case IEMMODE_16BIT:
11760 switch (pVCpu->iem.s.enmEffAddrMode)
11761 {
11762 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11763 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11764 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11766 }
11767 break;
11768
11769 case IEMMODE_32BIT:
11770 switch (pVCpu->iem.s.enmEffAddrMode)
11771 {
11772 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11773 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11774 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11776 }
11777 break;
11778
11779 case IEMMODE_64BIT:
11780 switch (pVCpu->iem.s.enmEffAddrMode)
11781 {
11782 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11783 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11784 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11786 }
11787 break;
11788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11789 }
11790 return VINF_SUCCESS;
11791
11792}
11793
11794#undef IEM_CMPS_CASE
11795
11796/** Opcode 0xa8. */
11797FNIEMOP_DEF(iemOp_test_AL_Ib)
11798{
11799 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
11800 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11801 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11802}
11803
11804
11805/** Opcode 0xa9. */
11806FNIEMOP_DEF(iemOp_test_eAX_Iz)
11807{
11808 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
11809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11811}
11812
11813
11814/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11815#define IEM_STOS_CASE(ValBits, AddrBits) \
11816 IEM_MC_BEGIN(0, 2); \
11817 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11818 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11819 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11820 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11821 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11823 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11824 } IEM_MC_ELSE() { \
11825 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11826 } IEM_MC_ENDIF(); \
11827 IEM_MC_ADVANCE_RIP(); \
11828 IEM_MC_END(); \
11829
11830/** Opcode 0xaa. */
11831FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11832{
11833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11834
11835 /*
11836 * Use the C implementation if a repeat prefix is encountered.
11837 */
11838 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11839 {
11840 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
11841 switch (pVCpu->iem.s.enmEffAddrMode)
11842 {
11843 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11844 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11845 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11847 }
11848 }
11849 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
11850
11851 /*
11852 * Sharing case implementation with stos[wdq] below.
11853 */
11854 switch (pVCpu->iem.s.enmEffAddrMode)
11855 {
11856 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11857 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11858 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11860 }
11861 return VINF_SUCCESS;
11862}
11863
11864
11865/** Opcode 0xab. */
11866FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11867{
11868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11869
11870 /*
11871 * Use the C implementation if a repeat prefix is encountered.
11872 */
11873 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11874 {
11875 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
11876 switch (pVCpu->iem.s.enmEffOpSize)
11877 {
11878 case IEMMODE_16BIT:
11879 switch (pVCpu->iem.s.enmEffAddrMode)
11880 {
11881 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11882 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11883 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11885 }
11886 break;
11887 case IEMMODE_32BIT:
11888 switch (pVCpu->iem.s.enmEffAddrMode)
11889 {
11890 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11891 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11892 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11894 }
11895 case IEMMODE_64BIT:
11896 switch (pVCpu->iem.s.enmEffAddrMode)
11897 {
11898 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11899 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11900 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11902 }
11903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11904 }
11905 }
11906 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
11907
11908 /*
11909 * Annoying double switch here.
11910 * Using ugly macro for implementing the cases, sharing it with stosb.
11911 */
11912 switch (pVCpu->iem.s.enmEffOpSize)
11913 {
11914 case IEMMODE_16BIT:
11915 switch (pVCpu->iem.s.enmEffAddrMode)
11916 {
11917 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11918 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11919 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11921 }
11922 break;
11923
11924 case IEMMODE_32BIT:
11925 switch (pVCpu->iem.s.enmEffAddrMode)
11926 {
11927 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11928 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11929 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11931 }
11932 break;
11933
11934 case IEMMODE_64BIT:
11935 switch (pVCpu->iem.s.enmEffAddrMode)
11936 {
11937 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11938 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11939 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11941 }
11942 break;
11943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11944 }
11945 return VINF_SUCCESS;
11946}
11947
11948#undef IEM_STOS_CASE
11949
11950/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11951#define IEM_LODS_CASE(ValBits, AddrBits) \
11952 IEM_MC_BEGIN(0, 2); \
11953 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11954 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11955 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11956 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11957 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11958 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11959 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11960 } IEM_MC_ELSE() { \
11961 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11962 } IEM_MC_ENDIF(); \
11963 IEM_MC_ADVANCE_RIP(); \
11964 IEM_MC_END();
11965
11966/** Opcode 0xac. */
11967FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11968{
11969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11970
11971 /*
11972 * Use the C implementation if a repeat prefix is encountered.
11973 */
11974 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11975 {
11976 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
11977 switch (pVCpu->iem.s.enmEffAddrMode)
11978 {
11979 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
11980 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
11981 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
11982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11983 }
11984 }
11985 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
11986
11987 /*
11988 * Sharing case implementation with stos[wdq] below.
11989 */
11990 switch (pVCpu->iem.s.enmEffAddrMode)
11991 {
11992 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11993 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11994 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11996 }
11997 return VINF_SUCCESS;
11998}
11999
12000
12001/** Opcode 0xad. */
12002FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12003{
12004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12005
12006 /*
12007 * Use the C implementation if a repeat prefix is encountered.
12008 */
12009 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12010 {
12011 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12012 switch (pVCpu->iem.s.enmEffOpSize)
12013 {
12014 case IEMMODE_16BIT:
12015 switch (pVCpu->iem.s.enmEffAddrMode)
12016 {
12017 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12018 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12019 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12021 }
12022 break;
12023 case IEMMODE_32BIT:
12024 switch (pVCpu->iem.s.enmEffAddrMode)
12025 {
12026 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12027 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12028 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12030 }
12031 case IEMMODE_64BIT:
12032 switch (pVCpu->iem.s.enmEffAddrMode)
12033 {
12034 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12035 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12036 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12038 }
12039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12040 }
12041 }
12042 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12043
12044 /*
12045 * Annoying double switch here.
12046 * Using ugly macro for implementing the cases, sharing it with lodsb.
12047 */
12048 switch (pVCpu->iem.s.enmEffOpSize)
12049 {
12050 case IEMMODE_16BIT:
12051 switch (pVCpu->iem.s.enmEffAddrMode)
12052 {
12053 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12054 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12055 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12057 }
12058 break;
12059
12060 case IEMMODE_32BIT:
12061 switch (pVCpu->iem.s.enmEffAddrMode)
12062 {
12063 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12064 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12065 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12067 }
12068 break;
12069
12070 case IEMMODE_64BIT:
12071 switch (pVCpu->iem.s.enmEffAddrMode)
12072 {
12073 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12074 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12075 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12077 }
12078 break;
12079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12080 }
12081 return VINF_SUCCESS;
12082}
12083
12084#undef IEM_LODS_CASE
12085
12086/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12087#define IEM_SCAS_CASE(ValBits, AddrBits) \
12088 IEM_MC_BEGIN(3, 2); \
12089 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12090 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12091 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12092 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12093 \
12094 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12095 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12096 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12097 IEM_MC_REF_EFLAGS(pEFlags); \
12098 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12099 \
12100 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12101 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12102 } IEM_MC_ELSE() { \
12103 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12104 } IEM_MC_ENDIF(); \
12105 IEM_MC_ADVANCE_RIP(); \
12106 IEM_MC_END();
12107
12108/** Opcode 0xae. */
12109FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12110{
12111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12112
12113 /*
12114 * Use the C implementation if a repeat prefix is encountered.
12115 */
12116 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12117 {
12118 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12119 switch (pVCpu->iem.s.enmEffAddrMode)
12120 {
12121 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12122 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12123 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12125 }
12126 }
12127 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12128 {
12129 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12130 switch (pVCpu->iem.s.enmEffAddrMode)
12131 {
12132 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12133 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12134 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12136 }
12137 }
12138 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12139
12140 /*
12141 * Sharing case implementation with stos[wdq] below.
12142 */
12143 switch (pVCpu->iem.s.enmEffAddrMode)
12144 {
12145 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12146 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12147 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12149 }
12150 return VINF_SUCCESS;
12151}
12152
12153
12154/** Opcode 0xaf. */
12155FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12156{
12157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12158
12159 /*
12160 * Use the C implementation if a repeat prefix is encountered.
12161 */
12162 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12163 {
12164 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12165 switch (pVCpu->iem.s.enmEffOpSize)
12166 {
12167 case IEMMODE_16BIT:
12168 switch (pVCpu->iem.s.enmEffAddrMode)
12169 {
12170 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12171 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12172 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12174 }
12175 break;
12176 case IEMMODE_32BIT:
12177 switch (pVCpu->iem.s.enmEffAddrMode)
12178 {
12179 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12180 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12181 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12183 }
12184 case IEMMODE_64BIT:
12185 switch (pVCpu->iem.s.enmEffAddrMode)
12186 {
12187 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12188 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12189 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12191 }
12192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12193 }
12194 }
12195 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12196 {
12197 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12198 switch (pVCpu->iem.s.enmEffOpSize)
12199 {
12200 case IEMMODE_16BIT:
12201 switch (pVCpu->iem.s.enmEffAddrMode)
12202 {
12203 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12204 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12205 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12207 }
12208 break;
12209 case IEMMODE_32BIT:
12210 switch (pVCpu->iem.s.enmEffAddrMode)
12211 {
12212 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12213 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12214 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12216 }
12217 case IEMMODE_64BIT:
12218 switch (pVCpu->iem.s.enmEffAddrMode)
12219 {
12220 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12221 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12222 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12224 }
12225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12226 }
12227 }
12228 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12229
12230 /*
12231 * Annoying double switch here.
12232 * Using ugly macro for implementing the cases, sharing it with scasb.
12233 */
12234 switch (pVCpu->iem.s.enmEffOpSize)
12235 {
12236 case IEMMODE_16BIT:
12237 switch (pVCpu->iem.s.enmEffAddrMode)
12238 {
12239 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12240 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12241 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12243 }
12244 break;
12245
12246 case IEMMODE_32BIT:
12247 switch (pVCpu->iem.s.enmEffAddrMode)
12248 {
12249 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12250 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12251 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12253 }
12254 break;
12255
12256 case IEMMODE_64BIT:
12257 switch (pVCpu->iem.s.enmEffAddrMode)
12258 {
12259 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12260 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12261 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12263 }
12264 break;
12265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12266 }
12267 return VINF_SUCCESS;
12268}
12269
12270#undef IEM_SCAS_CASE
12271
12272/**
12273 * Common 'mov r8, imm8' helper.
12274 */
12275FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12276{
12277 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12279
12280 IEM_MC_BEGIN(0, 1);
12281 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12282 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12283 IEM_MC_ADVANCE_RIP();
12284 IEM_MC_END();
12285
12286 return VINF_SUCCESS;
12287}
12288
12289
12290/** Opcode 0xb0. */
12291FNIEMOP_DEF(iemOp_mov_AL_Ib)
12292{
12293 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
12294 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12295}
12296
12297
12298/** Opcode 0xb1. */
12299FNIEMOP_DEF(iemOp_CL_Ib)
12300{
12301 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
12302 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12303}
12304
12305
12306/** Opcode 0xb2. */
12307FNIEMOP_DEF(iemOp_DL_Ib)
12308{
12309 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
12310 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12311}
12312
12313
12314/** Opcode 0xb3. */
12315FNIEMOP_DEF(iemOp_BL_Ib)
12316{
12317 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
12318 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12319}
12320
12321
12322/** Opcode 0xb4. */
12323FNIEMOP_DEF(iemOp_mov_AH_Ib)
12324{
12325 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
12326 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12327}
12328
12329
12330/** Opcode 0xb5. */
12331FNIEMOP_DEF(iemOp_CH_Ib)
12332{
12333 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
12334 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12335}
12336
12337
12338/** Opcode 0xb6. */
12339FNIEMOP_DEF(iemOp_DH_Ib)
12340{
12341 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
12342 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12343}
12344
12345
12346/** Opcode 0xb7. */
12347FNIEMOP_DEF(iemOp_BH_Ib)
12348{
12349 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
12350 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12351}
12352
12353
12354/**
12355 * Common 'mov regX,immX' helper.
12356 */
12357FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12358{
12359 switch (pVCpu->iem.s.enmEffOpSize)
12360 {
12361 case IEMMODE_16BIT:
12362 {
12363 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12365
12366 IEM_MC_BEGIN(0, 1);
12367 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12368 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12369 IEM_MC_ADVANCE_RIP();
12370 IEM_MC_END();
12371 break;
12372 }
12373
12374 case IEMMODE_32BIT:
12375 {
12376 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12378
12379 IEM_MC_BEGIN(0, 1);
12380 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12381 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12382 IEM_MC_ADVANCE_RIP();
12383 IEM_MC_END();
12384 break;
12385 }
12386 case IEMMODE_64BIT:
12387 {
12388 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12390
12391 IEM_MC_BEGIN(0, 1);
12392 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12393 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12394 IEM_MC_ADVANCE_RIP();
12395 IEM_MC_END();
12396 break;
12397 }
12398 }
12399
12400 return VINF_SUCCESS;
12401}
12402
12403
12404/** Opcode 0xb8. */
12405FNIEMOP_DEF(iemOp_eAX_Iv)
12406{
12407 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
12408 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12409}
12410
12411
12412/** Opcode 0xb9. */
12413FNIEMOP_DEF(iemOp_eCX_Iv)
12414{
12415 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
12416 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12417}
12418
12419
12420/** Opcode 0xba. */
12421FNIEMOP_DEF(iemOp_eDX_Iv)
12422{
12423 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
12424 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12425}
12426
12427
12428/** Opcode 0xbb. */
12429FNIEMOP_DEF(iemOp_eBX_Iv)
12430{
12431 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
12432 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12433}
12434
12435
12436/** Opcode 0xbc. */
12437FNIEMOP_DEF(iemOp_eSP_Iv)
12438{
12439 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
12440 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12441}
12442
12443
12444/** Opcode 0xbd. */
12445FNIEMOP_DEF(iemOp_eBP_Iv)
12446{
12447 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
12448 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12449}
12450
12451
12452/** Opcode 0xbe. */
12453FNIEMOP_DEF(iemOp_eSI_Iv)
12454{
12455 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
12456 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12457}
12458
12459
12460/** Opcode 0xbf. */
12461FNIEMOP_DEF(iemOp_eDI_Iv)
12462{
12463 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
12464 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12465}
12466
12467
12468/** Opcode 0xc0. */
12469FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12470{
12471 IEMOP_HLP_MIN_186();
12472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12473 PCIEMOPSHIFTSIZES pImpl;
12474 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12475 {
12476 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
12477 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
12478 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
12479 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
12480 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
12481 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
12482 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
12483 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12484 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12485 }
12486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12487
12488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12489 {
12490 /* register */
12491 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12493 IEM_MC_BEGIN(3, 0);
12494 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12495 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12496 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12497 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12498 IEM_MC_REF_EFLAGS(pEFlags);
12499 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12500 IEM_MC_ADVANCE_RIP();
12501 IEM_MC_END();
12502 }
12503 else
12504 {
12505 /* memory */
12506 IEM_MC_BEGIN(3, 2);
12507 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12508 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12509 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12511
12512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12513 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12514 IEM_MC_ASSIGN(cShiftArg, cShift);
12515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12516 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12517 IEM_MC_FETCH_EFLAGS(EFlags);
12518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12519
12520 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12521 IEM_MC_COMMIT_EFLAGS(EFlags);
12522 IEM_MC_ADVANCE_RIP();
12523 IEM_MC_END();
12524 }
12525 return VINF_SUCCESS;
12526}
12527
12528
12529/** Opcode 0xc1. */
12530FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12531{
12532 IEMOP_HLP_MIN_186();
12533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12534 PCIEMOPSHIFTSIZES pImpl;
12535 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12536 {
12537 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
12538 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
12539 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
12540 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
12541 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
12542 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
12543 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
12544 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12545 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12546 }
12547 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12548
12549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12550 {
12551 /* register */
12552 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12554 switch (pVCpu->iem.s.enmEffOpSize)
12555 {
12556 case IEMMODE_16BIT:
12557 IEM_MC_BEGIN(3, 0);
12558 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12559 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12560 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12561 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12562 IEM_MC_REF_EFLAGS(pEFlags);
12563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12564 IEM_MC_ADVANCE_RIP();
12565 IEM_MC_END();
12566 return VINF_SUCCESS;
12567
12568 case IEMMODE_32BIT:
12569 IEM_MC_BEGIN(3, 0);
12570 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12571 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12572 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12573 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12574 IEM_MC_REF_EFLAGS(pEFlags);
12575 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12576 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12577 IEM_MC_ADVANCE_RIP();
12578 IEM_MC_END();
12579 return VINF_SUCCESS;
12580
12581 case IEMMODE_64BIT:
12582 IEM_MC_BEGIN(3, 0);
12583 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12584 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12585 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12586 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12587 IEM_MC_REF_EFLAGS(pEFlags);
12588 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12589 IEM_MC_ADVANCE_RIP();
12590 IEM_MC_END();
12591 return VINF_SUCCESS;
12592
12593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12594 }
12595 }
12596 else
12597 {
12598 /* memory */
12599 switch (pVCpu->iem.s.enmEffOpSize)
12600 {
12601 case IEMMODE_16BIT:
12602 IEM_MC_BEGIN(3, 2);
12603 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12604 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12605 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12607
12608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12609 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12610 IEM_MC_ASSIGN(cShiftArg, cShift);
12611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12612 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12613 IEM_MC_FETCH_EFLAGS(EFlags);
12614 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12615
12616 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12617 IEM_MC_COMMIT_EFLAGS(EFlags);
12618 IEM_MC_ADVANCE_RIP();
12619 IEM_MC_END();
12620 return VINF_SUCCESS;
12621
12622 case IEMMODE_32BIT:
12623 IEM_MC_BEGIN(3, 2);
12624 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12625 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12626 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12628
12629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12630 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12631 IEM_MC_ASSIGN(cShiftArg, cShift);
12632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12633 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12634 IEM_MC_FETCH_EFLAGS(EFlags);
12635 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12636
12637 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12638 IEM_MC_COMMIT_EFLAGS(EFlags);
12639 IEM_MC_ADVANCE_RIP();
12640 IEM_MC_END();
12641 return VINF_SUCCESS;
12642
12643 case IEMMODE_64BIT:
12644 IEM_MC_BEGIN(3, 2);
12645 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12646 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12647 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12649
12650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12651 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12652 IEM_MC_ASSIGN(cShiftArg, cShift);
12653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12654 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12655 IEM_MC_FETCH_EFLAGS(EFlags);
12656 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12657
12658 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12659 IEM_MC_COMMIT_EFLAGS(EFlags);
12660 IEM_MC_ADVANCE_RIP();
12661 IEM_MC_END();
12662 return VINF_SUCCESS;
12663
12664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12665 }
12666 }
12667}
12668
12669
12670/** Opcode 0xc2. */
12671FNIEMOP_DEF(iemOp_retn_Iw)
12672{
12673 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
12674 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12676 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12677 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
12678}
12679
12680
12681/** Opcode 0xc3. */
12682FNIEMOP_DEF(iemOp_retn)
12683{
12684 IEMOP_MNEMONIC(retn, "retn");
12685 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12687 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
12688}
12689
12690
12691/** Opcode 0xc4. */
12692FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12693{
12694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12695 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12696 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12697 {
12698 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
12699 /* The LES instruction is invalid 64-bit mode. In legacy and
12700 compatability mode it is invalid with MOD=3.
12701 The use as a VEX prefix is made possible by assigning the inverted
12702 REX.R to the top MOD bit, and the top bit in the inverted register
12703 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12704 to accessing registers 0..7 in this VEX form. */
12705 /** @todo VEX: Just use new tables for it. */
12706 return IEMOP_RAISE_INVALID_OPCODE();
12707 }
12708 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
12709 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12710}
12711
12712
12713/** Opcode 0xc5. */
12714FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12715{
12716 /* The LDS instruction is invalid 64-bit mode. In legacy and
12717 compatability mode it is invalid with MOD=3.
12718 The use as a VEX prefix is made possible by assigning the inverted
12719 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12720 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12722 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
12723 {
12724 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12725 {
12726 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
12727 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12728 }
12729 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12730 }
12731
12732 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
12733 /** @todo Test when exctly the VEX conformance checks kick in during
12734 * instruction decoding and fetching (using \#PF). */
12735 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12736 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12737 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12738#if 0 /* will make sense of this next week... */
12739 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12740 &&
12741 )
12742 {
12743
12744 }
12745#endif
12746
12747 /** @todo VEX: Just use new tables for it. */
12748 return IEMOP_RAISE_INVALID_OPCODE();
12749}
12750
12751
12752/** Opcode 0xc6. */
12753FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12754{
12755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12756 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12757 return IEMOP_RAISE_INVALID_OPCODE();
12758 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
12759
12760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12761 {
12762 /* register access */
12763 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12765 IEM_MC_BEGIN(0, 0);
12766 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
12767 IEM_MC_ADVANCE_RIP();
12768 IEM_MC_END();
12769 }
12770 else
12771 {
12772 /* memory access. */
12773 IEM_MC_BEGIN(0, 1);
12774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12776 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12778 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
12779 IEM_MC_ADVANCE_RIP();
12780 IEM_MC_END();
12781 }
12782 return VINF_SUCCESS;
12783}
12784
12785
12786/** Opcode 0xc7. */
12787FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12788{
12789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12790 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12791 return IEMOP_RAISE_INVALID_OPCODE();
12792 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
12793
12794 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12795 {
12796 /* register access */
12797 switch (pVCpu->iem.s.enmEffOpSize)
12798 {
12799 case IEMMODE_16BIT:
12800 IEM_MC_BEGIN(0, 0);
12801 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12803 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
12804 IEM_MC_ADVANCE_RIP();
12805 IEM_MC_END();
12806 return VINF_SUCCESS;
12807
12808 case IEMMODE_32BIT:
12809 IEM_MC_BEGIN(0, 0);
12810 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12812 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
12813 IEM_MC_ADVANCE_RIP();
12814 IEM_MC_END();
12815 return VINF_SUCCESS;
12816
12817 case IEMMODE_64BIT:
12818 IEM_MC_BEGIN(0, 0);
12819 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12821 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
12822 IEM_MC_ADVANCE_RIP();
12823 IEM_MC_END();
12824 return VINF_SUCCESS;
12825
12826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12827 }
12828 }
12829 else
12830 {
12831 /* memory access. */
12832 switch (pVCpu->iem.s.enmEffOpSize)
12833 {
12834 case IEMMODE_16BIT:
12835 IEM_MC_BEGIN(0, 1);
12836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12838 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12840 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
12841 IEM_MC_ADVANCE_RIP();
12842 IEM_MC_END();
12843 return VINF_SUCCESS;
12844
12845 case IEMMODE_32BIT:
12846 IEM_MC_BEGIN(0, 1);
12847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12849 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12851 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
12852 IEM_MC_ADVANCE_RIP();
12853 IEM_MC_END();
12854 return VINF_SUCCESS;
12855
12856 case IEMMODE_64BIT:
12857 IEM_MC_BEGIN(0, 1);
12858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12860 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12862 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
12863 IEM_MC_ADVANCE_RIP();
12864 IEM_MC_END();
12865 return VINF_SUCCESS;
12866
12867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12868 }
12869 }
12870}
12871
12872
12873
12874
12875/** Opcode 0xc8. */
12876FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12877{
12878 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
12879 IEMOP_HLP_MIN_186();
12880 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12881 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12882 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12884 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
12885}
12886
12887
12888/** Opcode 0xc9. */
12889FNIEMOP_DEF(iemOp_leave)
12890{
12891 IEMOP_MNEMONIC(leave, "leave");
12892 IEMOP_HLP_MIN_186();
12893 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12895 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
12896}
12897
12898
12899/** Opcode 0xca. */
12900FNIEMOP_DEF(iemOp_retf_Iw)
12901{
12902 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
12903 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12905 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12906 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
12907}
12908
12909
12910/** Opcode 0xcb. */
12911FNIEMOP_DEF(iemOp_retf)
12912{
12913 IEMOP_MNEMONIC(retf, "retf");
12914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12916 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
12917}
12918
12919
12920/** Opcode 0xcc. */
12921FNIEMOP_DEF(iemOp_int_3)
12922{
12923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12924 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12925}
12926
12927
12928/** Opcode 0xcd. */
12929FNIEMOP_DEF(iemOp_int_Ib)
12930{
12931 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12933 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12934}
12935
12936
12937/** Opcode 0xce. */
12938FNIEMOP_DEF(iemOp_into)
12939{
12940 IEMOP_MNEMONIC(into, "into");
12941 IEMOP_HLP_NO_64BIT();
12942
12943 IEM_MC_BEGIN(2, 0);
12944 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12945 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12946 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12947 IEM_MC_END();
12948 return VINF_SUCCESS;
12949}
12950
12951
12952/** Opcode 0xcf. */
12953FNIEMOP_DEF(iemOp_iret)
12954{
12955 IEMOP_MNEMONIC(iret, "iret");
12956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12957 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
12958}
12959
12960
12961/** Opcode 0xd0. */
12962FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12963{
12964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12965 PCIEMOPSHIFTSIZES pImpl;
12966 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12967 {
12968 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
12969 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
12970 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
12971 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
12972 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
12973 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
12974 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
12975 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12976 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12977 }
12978 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12979
12980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12981 {
12982 /* register */
12983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12984 IEM_MC_BEGIN(3, 0);
12985 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12986 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12987 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12988 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12989 IEM_MC_REF_EFLAGS(pEFlags);
12990 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12991 IEM_MC_ADVANCE_RIP();
12992 IEM_MC_END();
12993 }
12994 else
12995 {
12996 /* memory */
12997 IEM_MC_BEGIN(3, 2);
12998 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12999 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13000 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13002
13003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13005 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13006 IEM_MC_FETCH_EFLAGS(EFlags);
13007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13008
13009 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13010 IEM_MC_COMMIT_EFLAGS(EFlags);
13011 IEM_MC_ADVANCE_RIP();
13012 IEM_MC_END();
13013 }
13014 return VINF_SUCCESS;
13015}
13016
13017
13018
13019/** Opcode 0xd1. */
13020FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13021{
13022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13023 PCIEMOPSHIFTSIZES pImpl;
13024 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13025 {
13026 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13027 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13028 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13029 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13030 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13031 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13032 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13033 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13034 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13035 }
13036 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13037
13038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13039 {
13040 /* register */
13041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13042 switch (pVCpu->iem.s.enmEffOpSize)
13043 {
13044 case IEMMODE_16BIT:
13045 IEM_MC_BEGIN(3, 0);
13046 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13047 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13048 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13049 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13050 IEM_MC_REF_EFLAGS(pEFlags);
13051 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13052 IEM_MC_ADVANCE_RIP();
13053 IEM_MC_END();
13054 return VINF_SUCCESS;
13055
13056 case IEMMODE_32BIT:
13057 IEM_MC_BEGIN(3, 0);
13058 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13059 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13061 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13062 IEM_MC_REF_EFLAGS(pEFlags);
13063 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13064 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13065 IEM_MC_ADVANCE_RIP();
13066 IEM_MC_END();
13067 return VINF_SUCCESS;
13068
13069 case IEMMODE_64BIT:
13070 IEM_MC_BEGIN(3, 0);
13071 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13072 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13073 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13074 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13075 IEM_MC_REF_EFLAGS(pEFlags);
13076 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13077 IEM_MC_ADVANCE_RIP();
13078 IEM_MC_END();
13079 return VINF_SUCCESS;
13080
13081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13082 }
13083 }
13084 else
13085 {
13086 /* memory */
13087 switch (pVCpu->iem.s.enmEffOpSize)
13088 {
13089 case IEMMODE_16BIT:
13090 IEM_MC_BEGIN(3, 2);
13091 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13092 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13093 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13095
13096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13098 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13099 IEM_MC_FETCH_EFLAGS(EFlags);
13100 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13101
13102 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13103 IEM_MC_COMMIT_EFLAGS(EFlags);
13104 IEM_MC_ADVANCE_RIP();
13105 IEM_MC_END();
13106 return VINF_SUCCESS;
13107
13108 case IEMMODE_32BIT:
13109 IEM_MC_BEGIN(3, 2);
13110 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13111 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13112 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13114
13115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13117 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13118 IEM_MC_FETCH_EFLAGS(EFlags);
13119 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13120
13121 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13122 IEM_MC_COMMIT_EFLAGS(EFlags);
13123 IEM_MC_ADVANCE_RIP();
13124 IEM_MC_END();
13125 return VINF_SUCCESS;
13126
13127 case IEMMODE_64BIT:
13128 IEM_MC_BEGIN(3, 2);
13129 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13130 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13131 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13133
13134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13136 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13137 IEM_MC_FETCH_EFLAGS(EFlags);
13138 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13139
13140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13141 IEM_MC_COMMIT_EFLAGS(EFlags);
13142 IEM_MC_ADVANCE_RIP();
13143 IEM_MC_END();
13144 return VINF_SUCCESS;
13145
13146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13147 }
13148 }
13149}
13150
13151
13152/** Opcode 0xd2. */
13153FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13154{
13155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13156 PCIEMOPSHIFTSIZES pImpl;
13157 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13158 {
13159 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13160 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13161 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13162 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13163 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13164 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13165 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13166 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13167 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13168 }
13169 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13170
13171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13172 {
13173 /* register */
13174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13175 IEM_MC_BEGIN(3, 0);
13176 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13177 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13179 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13180 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13181 IEM_MC_REF_EFLAGS(pEFlags);
13182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13183 IEM_MC_ADVANCE_RIP();
13184 IEM_MC_END();
13185 }
13186 else
13187 {
13188 /* memory */
13189 IEM_MC_BEGIN(3, 2);
13190 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13191 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13192 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13194
13195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13197 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13198 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13199 IEM_MC_FETCH_EFLAGS(EFlags);
13200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13201
13202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13203 IEM_MC_COMMIT_EFLAGS(EFlags);
13204 IEM_MC_ADVANCE_RIP();
13205 IEM_MC_END();
13206 }
13207 return VINF_SUCCESS;
13208}
13209
13210
13211/** Opcode 0xd3. */
13212FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13213{
13214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13215 PCIEMOPSHIFTSIZES pImpl;
13216 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13217 {
13218 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13219 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13220 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13221 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13222 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13223 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13224 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13225 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13226 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13227 }
13228 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13229
13230 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13231 {
13232 /* register */
13233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13234 switch (pVCpu->iem.s.enmEffOpSize)
13235 {
13236 case IEMMODE_16BIT:
13237 IEM_MC_BEGIN(3, 0);
13238 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13239 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13240 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13241 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13242 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13243 IEM_MC_REF_EFLAGS(pEFlags);
13244 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13245 IEM_MC_ADVANCE_RIP();
13246 IEM_MC_END();
13247 return VINF_SUCCESS;
13248
13249 case IEMMODE_32BIT:
13250 IEM_MC_BEGIN(3, 0);
13251 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13252 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13254 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13255 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13256 IEM_MC_REF_EFLAGS(pEFlags);
13257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13258 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13259 IEM_MC_ADVANCE_RIP();
13260 IEM_MC_END();
13261 return VINF_SUCCESS;
13262
13263 case IEMMODE_64BIT:
13264 IEM_MC_BEGIN(3, 0);
13265 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13266 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13268 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13269 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13270 IEM_MC_REF_EFLAGS(pEFlags);
13271 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13272 IEM_MC_ADVANCE_RIP();
13273 IEM_MC_END();
13274 return VINF_SUCCESS;
13275
13276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13277 }
13278 }
13279 else
13280 {
13281 /* memory */
13282 switch (pVCpu->iem.s.enmEffOpSize)
13283 {
13284 case IEMMODE_16BIT:
13285 IEM_MC_BEGIN(3, 2);
13286 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13287 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13288 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13290
13291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13293 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13294 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13295 IEM_MC_FETCH_EFLAGS(EFlags);
13296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13297
13298 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13299 IEM_MC_COMMIT_EFLAGS(EFlags);
13300 IEM_MC_ADVANCE_RIP();
13301 IEM_MC_END();
13302 return VINF_SUCCESS;
13303
13304 case IEMMODE_32BIT:
13305 IEM_MC_BEGIN(3, 2);
13306 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13307 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13308 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13310
13311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13313 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13314 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13315 IEM_MC_FETCH_EFLAGS(EFlags);
13316 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13317
13318 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13319 IEM_MC_COMMIT_EFLAGS(EFlags);
13320 IEM_MC_ADVANCE_RIP();
13321 IEM_MC_END();
13322 return VINF_SUCCESS;
13323
13324 case IEMMODE_64BIT:
13325 IEM_MC_BEGIN(3, 2);
13326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13327 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13328 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13330
13331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13334 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13335 IEM_MC_FETCH_EFLAGS(EFlags);
13336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13337
13338 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13339 IEM_MC_COMMIT_EFLAGS(EFlags);
13340 IEM_MC_ADVANCE_RIP();
13341 IEM_MC_END();
13342 return VINF_SUCCESS;
13343
13344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13345 }
13346 }
13347}
13348
13349/** Opcode 0xd4. */
13350FNIEMOP_DEF(iemOp_aam_Ib)
13351{
13352 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
13353 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13355 IEMOP_HLP_NO_64BIT();
13356 if (!bImm)
13357 return IEMOP_RAISE_DIVIDE_ERROR();
13358 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13359}
13360
13361
13362/** Opcode 0xd5. */
13363FNIEMOP_DEF(iemOp_aad_Ib)
13364{
13365 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
13366 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13368 IEMOP_HLP_NO_64BIT();
13369 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13370}
13371
13372
13373/** Opcode 0xd6. */
13374FNIEMOP_DEF(iemOp_salc)
13375{
13376 IEMOP_MNEMONIC(salc, "salc");
13377 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13378 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13380 IEMOP_HLP_NO_64BIT();
13381
13382 IEM_MC_BEGIN(0, 0);
13383 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13384 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13385 } IEM_MC_ELSE() {
13386 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13387 } IEM_MC_ENDIF();
13388 IEM_MC_ADVANCE_RIP();
13389 IEM_MC_END();
13390 return VINF_SUCCESS;
13391}
13392
13393
13394/** Opcode 0xd7. */
13395FNIEMOP_DEF(iemOp_xlat)
13396{
13397 IEMOP_MNEMONIC(xlat, "xlat");
13398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13399 switch (pVCpu->iem.s.enmEffAddrMode)
13400 {
13401 case IEMMODE_16BIT:
13402 IEM_MC_BEGIN(2, 0);
13403 IEM_MC_LOCAL(uint8_t, u8Tmp);
13404 IEM_MC_LOCAL(uint16_t, u16Addr);
13405 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13406 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13407 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
13408 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13409 IEM_MC_ADVANCE_RIP();
13410 IEM_MC_END();
13411 return VINF_SUCCESS;
13412
13413 case IEMMODE_32BIT:
13414 IEM_MC_BEGIN(2, 0);
13415 IEM_MC_LOCAL(uint8_t, u8Tmp);
13416 IEM_MC_LOCAL(uint32_t, u32Addr);
13417 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13418 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13419 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
13420 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13421 IEM_MC_ADVANCE_RIP();
13422 IEM_MC_END();
13423 return VINF_SUCCESS;
13424
13425 case IEMMODE_64BIT:
13426 IEM_MC_BEGIN(2, 0);
13427 IEM_MC_LOCAL(uint8_t, u8Tmp);
13428 IEM_MC_LOCAL(uint64_t, u64Addr);
13429 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13430 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13431 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
13432 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13433 IEM_MC_ADVANCE_RIP();
13434 IEM_MC_END();
13435 return VINF_SUCCESS;
13436
13437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13438 }
13439}
13440
13441
13442/**
13443 * Common worker for FPU instructions working on ST0 and STn, and storing the
13444 * result in ST0.
13445 *
13446 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13447 */
13448FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13449{
13450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13451
13452 IEM_MC_BEGIN(3, 1);
13453 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13454 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13456 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13457
13458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13460 IEM_MC_PREPARE_FPU_USAGE();
13461 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13462 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13463 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13464 IEM_MC_ELSE()
13465 IEM_MC_FPU_STACK_UNDERFLOW(0);
13466 IEM_MC_ENDIF();
13467 IEM_MC_ADVANCE_RIP();
13468
13469 IEM_MC_END();
13470 return VINF_SUCCESS;
13471}
13472
13473
13474/**
13475 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13476 * flags.
13477 *
13478 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13479 */
13480FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13481{
13482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13483
13484 IEM_MC_BEGIN(3, 1);
13485 IEM_MC_LOCAL(uint16_t, u16Fsw);
13486 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13487 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13488 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13489
13490 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13491 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13492 IEM_MC_PREPARE_FPU_USAGE();
13493 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13494 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13495 IEM_MC_UPDATE_FSW(u16Fsw);
13496 IEM_MC_ELSE()
13497 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13498 IEM_MC_ENDIF();
13499 IEM_MC_ADVANCE_RIP();
13500
13501 IEM_MC_END();
13502 return VINF_SUCCESS;
13503}
13504
13505
13506/**
13507 * Common worker for FPU instructions working on ST0 and STn, only affecting
13508 * flags, and popping when done.
13509 *
13510 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13511 */
13512FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13513{
13514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13515
13516 IEM_MC_BEGIN(3, 1);
13517 IEM_MC_LOCAL(uint16_t, u16Fsw);
13518 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13519 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13520 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13521
13522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13524 IEM_MC_PREPARE_FPU_USAGE();
13525 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13526 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13527 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13528 IEM_MC_ELSE()
13529 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13530 IEM_MC_ENDIF();
13531 IEM_MC_ADVANCE_RIP();
13532
13533 IEM_MC_END();
13534 return VINF_SUCCESS;
13535}
13536
13537
13538/** Opcode 0xd8 11/0. */
13539FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13540{
13541 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
13542 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13543}
13544
13545
13546/** Opcode 0xd8 11/1. */
13547FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13548{
13549 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
13550 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13551}
13552
13553
13554/** Opcode 0xd8 11/2. */
13555FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13556{
13557 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
13558 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13559}
13560
13561
13562/** Opcode 0xd8 11/3. */
13563FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13564{
13565 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
13566 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13567}
13568
13569
13570/** Opcode 0xd8 11/4. */
13571FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13572{
13573 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
13574 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13575}
13576
13577
13578/** Opcode 0xd8 11/5. */
13579FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13580{
13581 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
13582 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13583}
13584
13585
13586/** Opcode 0xd8 11/6. */
13587FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13588{
13589 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
13590 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13591}
13592
13593
13594/** Opcode 0xd8 11/7. */
13595FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13596{
13597 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
13598 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13599}
13600
13601
13602/**
13603 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13604 * the result in ST0.
13605 *
13606 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13607 */
13608FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13609{
13610 IEM_MC_BEGIN(3, 3);
13611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13612 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13613 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13614 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13615 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13616 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13617
13618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13620
13621 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13623 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13624
13625 IEM_MC_PREPARE_FPU_USAGE();
13626 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13627 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13628 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13629 IEM_MC_ELSE()
13630 IEM_MC_FPU_STACK_UNDERFLOW(0);
13631 IEM_MC_ENDIF();
13632 IEM_MC_ADVANCE_RIP();
13633
13634 IEM_MC_END();
13635 return VINF_SUCCESS;
13636}
13637
13638
13639/** Opcode 0xd8 !11/0. */
13640FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13641{
13642 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
13643 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13644}
13645
13646
13647/** Opcode 0xd8 !11/1. */
13648FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13649{
13650 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
13651 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13652}
13653
13654
13655/** Opcode 0xd8 !11/2. */
13656FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13657{
13658 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
13659
13660 IEM_MC_BEGIN(3, 3);
13661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13662 IEM_MC_LOCAL(uint16_t, u16Fsw);
13663 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13664 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13665 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13666 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13667
13668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13670
13671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13673 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13674
13675 IEM_MC_PREPARE_FPU_USAGE();
13676 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13677 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13678 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13679 IEM_MC_ELSE()
13680 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13681 IEM_MC_ENDIF();
13682 IEM_MC_ADVANCE_RIP();
13683
13684 IEM_MC_END();
13685 return VINF_SUCCESS;
13686}
13687
13688
13689/** Opcode 0xd8 !11/3. */
13690FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13691{
13692 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
13693
13694 IEM_MC_BEGIN(3, 3);
13695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13696 IEM_MC_LOCAL(uint16_t, u16Fsw);
13697 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13698 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13699 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13700 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13701
13702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13704
13705 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13706 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13707 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13708
13709 IEM_MC_PREPARE_FPU_USAGE();
13710 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13711 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13712 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13713 IEM_MC_ELSE()
13714 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13715 IEM_MC_ENDIF();
13716 IEM_MC_ADVANCE_RIP();
13717
13718 IEM_MC_END();
13719 return VINF_SUCCESS;
13720}
13721
13722
13723/** Opcode 0xd8 !11/4. */
13724FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13725{
13726 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
13727 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13728}
13729
13730
13731/** Opcode 0xd8 !11/5. */
13732FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13733{
13734 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
13735 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13736}
13737
13738
13739/** Opcode 0xd8 !11/6. */
13740FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13741{
13742 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
13743 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13744}
13745
13746
13747/** Opcode 0xd8 !11/7. */
13748FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13749{
13750 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
13751 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13752}
13753
13754
13755/** Opcode 0xd8. */
13756FNIEMOP_DEF(iemOp_EscF0)
13757{
13758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13759 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
13760
13761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13762 {
13763 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13764 {
13765 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13766 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13767 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13768 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13769 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13770 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13771 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13772 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13774 }
13775 }
13776 else
13777 {
13778 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13779 {
13780 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13781 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13782 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13783 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13784 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13785 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13786 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13787 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13789 }
13790 }
13791}
13792
13793
13794/** Opcode 0xd9 /0 mem32real
13795 * @sa iemOp_fld_m64r */
13796FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13797{
13798 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
13799
13800 IEM_MC_BEGIN(2, 3);
13801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13802 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13803 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13804 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13805 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13806
13807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13809
13810 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13811 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13812 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13813
13814 IEM_MC_PREPARE_FPU_USAGE();
13815 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13816 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13817 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13818 IEM_MC_ELSE()
13819 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13820 IEM_MC_ENDIF();
13821 IEM_MC_ADVANCE_RIP();
13822
13823 IEM_MC_END();
13824 return VINF_SUCCESS;
13825}
13826
13827
13828/** Opcode 0xd9 !11/2 mem32real */
13829FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13830{
13831 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
13832 IEM_MC_BEGIN(3, 2);
13833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13834 IEM_MC_LOCAL(uint16_t, u16Fsw);
13835 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13836 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13837 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13838
13839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13842 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13843
13844 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13845 IEM_MC_PREPARE_FPU_USAGE();
13846 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13847 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13848 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13849 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13850 IEM_MC_ELSE()
13851 IEM_MC_IF_FCW_IM()
13852 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13853 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13854 IEM_MC_ENDIF();
13855 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13856 IEM_MC_ENDIF();
13857 IEM_MC_ADVANCE_RIP();
13858
13859 IEM_MC_END();
13860 return VINF_SUCCESS;
13861}
13862
13863
13864/** Opcode 0xd9 !11/3 */
13865FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13866{
13867 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
13868 IEM_MC_BEGIN(3, 2);
13869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13870 IEM_MC_LOCAL(uint16_t, u16Fsw);
13871 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13872 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13873 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13874
13875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13877 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13878 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13879
13880 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13881 IEM_MC_PREPARE_FPU_USAGE();
13882 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13883 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13884 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13885 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13886 IEM_MC_ELSE()
13887 IEM_MC_IF_FCW_IM()
13888 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13889 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13890 IEM_MC_ENDIF();
13891 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13892 IEM_MC_ENDIF();
13893 IEM_MC_ADVANCE_RIP();
13894
13895 IEM_MC_END();
13896 return VINF_SUCCESS;
13897}
13898
13899
13900/** Opcode 0xd9 !11/4 */
13901FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13902{
13903 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
13904 IEM_MC_BEGIN(3, 0);
13905 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13906 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13907 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13910 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13911 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13912 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13913 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13914 IEM_MC_END();
13915 return VINF_SUCCESS;
13916}
13917
13918
13919/** Opcode 0xd9 !11/5 */
13920FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13921{
13922 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
13923 IEM_MC_BEGIN(1, 1);
13924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13925 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13928 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13929 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13930 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13931 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13932 IEM_MC_END();
13933 return VINF_SUCCESS;
13934}
13935
13936
13937/** Opcode 0xd9 !11/6 */
13938FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13939{
13940 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
13941 IEM_MC_BEGIN(3, 0);
13942 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13943 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13944 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13947 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13948 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13949 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13950 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13951 IEM_MC_END();
13952 return VINF_SUCCESS;
13953}
13954
13955
13956/** Opcode 0xd9 !11/7 */
13957FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13958{
13959 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
13960 IEM_MC_BEGIN(2, 0);
13961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13962 IEM_MC_LOCAL(uint16_t, u16Fcw);
13963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13966 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13967 IEM_MC_FETCH_FCW(u16Fcw);
13968 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
13969 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13970 IEM_MC_END();
13971 return VINF_SUCCESS;
13972}
13973
13974
13975/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13976FNIEMOP_DEF(iemOp_fnop)
13977{
13978 IEMOP_MNEMONIC(fnop, "fnop");
13979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13980
13981 IEM_MC_BEGIN(0, 0);
13982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13984 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13985 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13986 * intel optimizations. Investigate. */
13987 IEM_MC_UPDATE_FPU_OPCODE_IP();
13988 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13989 IEM_MC_END();
13990 return VINF_SUCCESS;
13991}
13992
13993
13994/** Opcode 0xd9 11/0 stN */
13995FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13996{
13997 IEMOP_MNEMONIC(fld_stN, "fld stN");
13998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13999
14000 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14001 * indicates that it does. */
14002 IEM_MC_BEGIN(0, 2);
14003 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14004 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14005 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14006 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14007
14008 IEM_MC_PREPARE_FPU_USAGE();
14009 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14010 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14011 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14012 IEM_MC_ELSE()
14013 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14014 IEM_MC_ENDIF();
14015
14016 IEM_MC_ADVANCE_RIP();
14017 IEM_MC_END();
14018
14019 return VINF_SUCCESS;
14020}
14021
14022
14023/** Opcode 0xd9 11/3 stN */
14024FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14025{
14026 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14028
14029 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14030 * indicates that it does. */
14031 IEM_MC_BEGIN(1, 3);
14032 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14033 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14034 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14035 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14036 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14037 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14038
14039 IEM_MC_PREPARE_FPU_USAGE();
14040 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14041 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14042 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14043 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14044 IEM_MC_ELSE()
14045 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14046 IEM_MC_ENDIF();
14047
14048 IEM_MC_ADVANCE_RIP();
14049 IEM_MC_END();
14050
14051 return VINF_SUCCESS;
14052}
14053
14054
14055/** Opcode 0xd9 11/4, 0xdd 11/2. */
14056FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14057{
14058 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14060
14061 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
14062 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14063 if (!iDstReg)
14064 {
14065 IEM_MC_BEGIN(0, 1);
14066 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14068 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14069
14070 IEM_MC_PREPARE_FPU_USAGE();
14071 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14072 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14073 IEM_MC_ELSE()
14074 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14075 IEM_MC_ENDIF();
14076
14077 IEM_MC_ADVANCE_RIP();
14078 IEM_MC_END();
14079 }
14080 else
14081 {
14082 IEM_MC_BEGIN(0, 2);
14083 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14084 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14085 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14086 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14087
14088 IEM_MC_PREPARE_FPU_USAGE();
14089 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14090 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14091 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14092 IEM_MC_ELSE()
14093 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14094 IEM_MC_ENDIF();
14095
14096 IEM_MC_ADVANCE_RIP();
14097 IEM_MC_END();
14098 }
14099 return VINF_SUCCESS;
14100}
14101
14102
14103/**
14104 * Common worker for FPU instructions working on ST0 and replaces it with the
14105 * result, i.e. unary operators.
14106 *
14107 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14108 */
14109FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14110{
14111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14112
14113 IEM_MC_BEGIN(2, 1);
14114 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14115 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14116 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14117
14118 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14119 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14120 IEM_MC_PREPARE_FPU_USAGE();
14121 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14122 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14123 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14124 IEM_MC_ELSE()
14125 IEM_MC_FPU_STACK_UNDERFLOW(0);
14126 IEM_MC_ENDIF();
14127 IEM_MC_ADVANCE_RIP();
14128
14129 IEM_MC_END();
14130 return VINF_SUCCESS;
14131}
14132
14133
14134/** Opcode 0xd9 0xe0. */
14135FNIEMOP_DEF(iemOp_fchs)
14136{
14137 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14138 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14139}
14140
14141
14142/** Opcode 0xd9 0xe1. */
14143FNIEMOP_DEF(iemOp_fabs)
14144{
14145 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14146 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14147}
14148
14149
14150/**
14151 * Common worker for FPU instructions working on ST0 and only returns FSW.
14152 *
14153 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14154 */
14155FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14156{
14157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14158
14159 IEM_MC_BEGIN(2, 1);
14160 IEM_MC_LOCAL(uint16_t, u16Fsw);
14161 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14162 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14163
14164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14165 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14166 IEM_MC_PREPARE_FPU_USAGE();
14167 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14168 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14169 IEM_MC_UPDATE_FSW(u16Fsw);
14170 IEM_MC_ELSE()
14171 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14172 IEM_MC_ENDIF();
14173 IEM_MC_ADVANCE_RIP();
14174
14175 IEM_MC_END();
14176 return VINF_SUCCESS;
14177}
14178
14179
14180/** Opcode 0xd9 0xe4. */
14181FNIEMOP_DEF(iemOp_ftst)
14182{
14183 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14184 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14185}
14186
14187
14188/** Opcode 0xd9 0xe5. */
14189FNIEMOP_DEF(iemOp_fxam)
14190{
14191 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14192 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14193}
14194
14195
14196/**
14197 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14198 *
14199 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14200 */
14201FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14202{
14203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14204
14205 IEM_MC_BEGIN(1, 1);
14206 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14207 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14208
14209 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14210 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14211 IEM_MC_PREPARE_FPU_USAGE();
14212 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14213 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14214 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14215 IEM_MC_ELSE()
14216 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14217 IEM_MC_ENDIF();
14218 IEM_MC_ADVANCE_RIP();
14219
14220 IEM_MC_END();
14221 return VINF_SUCCESS;
14222}
14223
14224
14225/** Opcode 0xd9 0xe8. */
14226FNIEMOP_DEF(iemOp_fld1)
14227{
14228 IEMOP_MNEMONIC(fld1, "fld1");
14229 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14230}
14231
14232
14233/** Opcode 0xd9 0xe9. */
14234FNIEMOP_DEF(iemOp_fldl2t)
14235{
14236 IEMOP_MNEMONIC(fldl2t, "fldl2t");
14237 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14238}
14239
14240
14241/** Opcode 0xd9 0xea. */
14242FNIEMOP_DEF(iemOp_fldl2e)
14243{
14244 IEMOP_MNEMONIC(fldl2e, "fldl2e");
14245 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14246}
14247
14248/** Opcode 0xd9 0xeb. */
14249FNIEMOP_DEF(iemOp_fldpi)
14250{
14251 IEMOP_MNEMONIC(fldpi, "fldpi");
14252 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14253}
14254
14255
14256/** Opcode 0xd9 0xec. */
14257FNIEMOP_DEF(iemOp_fldlg2)
14258{
14259 IEMOP_MNEMONIC(fldlg2, "fldlg2");
14260 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14261}
14262
14263/** Opcode 0xd9 0xed. */
14264FNIEMOP_DEF(iemOp_fldln2)
14265{
14266 IEMOP_MNEMONIC(fldln2, "fldln2");
14267 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14268}
14269
14270
14271/** Opcode 0xd9 0xee. */
14272FNIEMOP_DEF(iemOp_fldz)
14273{
14274 IEMOP_MNEMONIC(fldz, "fldz");
14275 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14276}
14277
14278
14279/** Opcode 0xd9 0xf0. */
14280FNIEMOP_DEF(iemOp_f2xm1)
14281{
14282 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
14283 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14284}
14285
14286
14287/**
14288 * Common worker for FPU instructions working on STn and ST0, storing the result
14289 * in STn, and popping the stack unless IE, DE or ZE was raised.
14290 *
14291 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14292 */
14293FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14294{
14295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14296
14297 IEM_MC_BEGIN(3, 1);
14298 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14299 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14302
14303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14305
14306 IEM_MC_PREPARE_FPU_USAGE();
14307 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14308 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14309 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14310 IEM_MC_ELSE()
14311 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14312 IEM_MC_ENDIF();
14313 IEM_MC_ADVANCE_RIP();
14314
14315 IEM_MC_END();
14316 return VINF_SUCCESS;
14317}
14318
14319
14320/** Opcode 0xd9 0xf1. */
14321FNIEMOP_DEF(iemOp_fyl2x)
14322{
14323 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
14324 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
14325}
14326
14327
14328/**
14329 * Common worker for FPU instructions working on ST0 and having two outputs, one
14330 * replacing ST0 and one pushed onto the stack.
14331 *
14332 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14333 */
14334FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14335{
14336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14337
14338 IEM_MC_BEGIN(2, 1);
14339 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14340 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14341 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14342
14343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14345 IEM_MC_PREPARE_FPU_USAGE();
14346 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14347 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14348 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14349 IEM_MC_ELSE()
14350 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14351 IEM_MC_ENDIF();
14352 IEM_MC_ADVANCE_RIP();
14353
14354 IEM_MC_END();
14355 return VINF_SUCCESS;
14356}
14357
14358
14359/** Opcode 0xd9 0xf2. */
14360FNIEMOP_DEF(iemOp_fptan)
14361{
14362 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
14363 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14364}
14365
14366
14367/** Opcode 0xd9 0xf3. */
14368FNIEMOP_DEF(iemOp_fpatan)
14369{
14370 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
14371 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14372}
14373
14374
14375/** Opcode 0xd9 0xf4. */
14376FNIEMOP_DEF(iemOp_fxtract)
14377{
14378 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
14379 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14380}
14381
14382
14383/** Opcode 0xd9 0xf5. */
14384FNIEMOP_DEF(iemOp_fprem1)
14385{
14386 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
14387 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14388}
14389
14390
14391/** Opcode 0xd9 0xf6. */
14392FNIEMOP_DEF(iemOp_fdecstp)
14393{
14394 IEMOP_MNEMONIC(fdecstp, "fdecstp");
14395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14396 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14397 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14398 * FINCSTP and FDECSTP. */
14399
14400 IEM_MC_BEGIN(0,0);
14401
14402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14404
14405 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14406 IEM_MC_FPU_STACK_DEC_TOP();
14407 IEM_MC_UPDATE_FSW_CONST(0);
14408
14409 IEM_MC_ADVANCE_RIP();
14410 IEM_MC_END();
14411 return VINF_SUCCESS;
14412}
14413
14414
14415/** Opcode 0xd9 0xf7. */
14416FNIEMOP_DEF(iemOp_fincstp)
14417{
14418 IEMOP_MNEMONIC(fincstp, "fincstp");
14419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14420 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14421 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14422 * FINCSTP and FDECSTP. */
14423
14424 IEM_MC_BEGIN(0,0);
14425
14426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14428
14429 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14430 IEM_MC_FPU_STACK_INC_TOP();
14431 IEM_MC_UPDATE_FSW_CONST(0);
14432
14433 IEM_MC_ADVANCE_RIP();
14434 IEM_MC_END();
14435 return VINF_SUCCESS;
14436}
14437
14438
14439/** Opcode 0xd9 0xf8. */
14440FNIEMOP_DEF(iemOp_fprem)
14441{
14442 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
14443 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14444}
14445
14446
14447/** Opcode 0xd9 0xf9. */
14448FNIEMOP_DEF(iemOp_fyl2xp1)
14449{
14450 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
14451 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14452}
14453
14454
14455/** Opcode 0xd9 0xfa. */
14456FNIEMOP_DEF(iemOp_fsqrt)
14457{
14458 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
14459 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14460}
14461
14462
14463/** Opcode 0xd9 0xfb. */
14464FNIEMOP_DEF(iemOp_fsincos)
14465{
14466 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
14467 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14468}
14469
14470
14471/** Opcode 0xd9 0xfc. */
14472FNIEMOP_DEF(iemOp_frndint)
14473{
14474 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
14475 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14476}
14477
14478
14479/** Opcode 0xd9 0xfd. */
14480FNIEMOP_DEF(iemOp_fscale)
14481{
14482 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
14483 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14484}
14485
14486
14487/** Opcode 0xd9 0xfe. */
14488FNIEMOP_DEF(iemOp_fsin)
14489{
14490 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
14491 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14492}
14493
14494
14495/** Opcode 0xd9 0xff. */
14496FNIEMOP_DEF(iemOp_fcos)
14497{
14498 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
14499 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14500}
14501
14502
14503/** Used by iemOp_EscF1. */
14504IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14505{
14506 /* 0xe0 */ iemOp_fchs,
14507 /* 0xe1 */ iemOp_fabs,
14508 /* 0xe2 */ iemOp_Invalid,
14509 /* 0xe3 */ iemOp_Invalid,
14510 /* 0xe4 */ iemOp_ftst,
14511 /* 0xe5 */ iemOp_fxam,
14512 /* 0xe6 */ iemOp_Invalid,
14513 /* 0xe7 */ iemOp_Invalid,
14514 /* 0xe8 */ iemOp_fld1,
14515 /* 0xe9 */ iemOp_fldl2t,
14516 /* 0xea */ iemOp_fldl2e,
14517 /* 0xeb */ iemOp_fldpi,
14518 /* 0xec */ iemOp_fldlg2,
14519 /* 0xed */ iemOp_fldln2,
14520 /* 0xee */ iemOp_fldz,
14521 /* 0xef */ iemOp_Invalid,
14522 /* 0xf0 */ iemOp_f2xm1,
14523 /* 0xf1 */ iemOp_fyl2x,
14524 /* 0xf2 */ iemOp_fptan,
14525 /* 0xf3 */ iemOp_fpatan,
14526 /* 0xf4 */ iemOp_fxtract,
14527 /* 0xf5 */ iemOp_fprem1,
14528 /* 0xf6 */ iemOp_fdecstp,
14529 /* 0xf7 */ iemOp_fincstp,
14530 /* 0xf8 */ iemOp_fprem,
14531 /* 0xf9 */ iemOp_fyl2xp1,
14532 /* 0xfa */ iemOp_fsqrt,
14533 /* 0xfb */ iemOp_fsincos,
14534 /* 0xfc */ iemOp_frndint,
14535 /* 0xfd */ iemOp_fscale,
14536 /* 0xfe */ iemOp_fsin,
14537 /* 0xff */ iemOp_fcos
14538};
14539
14540
14541/** Opcode 0xd9. */
14542FNIEMOP_DEF(iemOp_EscF1)
14543{
14544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14545 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
14546
14547 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14548 {
14549 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14550 {
14551 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14552 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14553 case 2:
14554 if (bRm == 0xd0)
14555 return FNIEMOP_CALL(iemOp_fnop);
14556 return IEMOP_RAISE_INVALID_OPCODE();
14557 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14558 case 4:
14559 case 5:
14560 case 6:
14561 case 7:
14562 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14563 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14565 }
14566 }
14567 else
14568 {
14569 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14570 {
14571 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14572 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14573 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14574 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14575 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14576 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14577 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14578 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14580 }
14581 }
14582}
14583
14584
14585/** Opcode 0xda 11/0. */
14586FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14587{
14588 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
14589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14590
14591 IEM_MC_BEGIN(0, 1);
14592 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14593
14594 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14595 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14596
14597 IEM_MC_PREPARE_FPU_USAGE();
14598 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14600 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14601 IEM_MC_ENDIF();
14602 IEM_MC_UPDATE_FPU_OPCODE_IP();
14603 IEM_MC_ELSE()
14604 IEM_MC_FPU_STACK_UNDERFLOW(0);
14605 IEM_MC_ENDIF();
14606 IEM_MC_ADVANCE_RIP();
14607
14608 IEM_MC_END();
14609 return VINF_SUCCESS;
14610}
14611
14612
14613/** Opcode 0xda 11/1. */
14614FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14615{
14616 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
14617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14618
14619 IEM_MC_BEGIN(0, 1);
14620 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14621
14622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14624
14625 IEM_MC_PREPARE_FPU_USAGE();
14626 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14627 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14628 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14629 IEM_MC_ENDIF();
14630 IEM_MC_UPDATE_FPU_OPCODE_IP();
14631 IEM_MC_ELSE()
14632 IEM_MC_FPU_STACK_UNDERFLOW(0);
14633 IEM_MC_ENDIF();
14634 IEM_MC_ADVANCE_RIP();
14635
14636 IEM_MC_END();
14637 return VINF_SUCCESS;
14638}
14639
14640
14641/** Opcode 0xda 11/2. */
14642FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14643{
14644 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
14645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14646
14647 IEM_MC_BEGIN(0, 1);
14648 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14649
14650 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14651 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14652
14653 IEM_MC_PREPARE_FPU_USAGE();
14654 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14655 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14656 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14657 IEM_MC_ENDIF();
14658 IEM_MC_UPDATE_FPU_OPCODE_IP();
14659 IEM_MC_ELSE()
14660 IEM_MC_FPU_STACK_UNDERFLOW(0);
14661 IEM_MC_ENDIF();
14662 IEM_MC_ADVANCE_RIP();
14663
14664 IEM_MC_END();
14665 return VINF_SUCCESS;
14666}
14667
14668
14669/** Opcode 0xda 11/3. */
14670FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14671{
14672 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
14673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14674
14675 IEM_MC_BEGIN(0, 1);
14676 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14677
14678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14679 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14680
14681 IEM_MC_PREPARE_FPU_USAGE();
14682 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14684 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14685 IEM_MC_ENDIF();
14686 IEM_MC_UPDATE_FPU_OPCODE_IP();
14687 IEM_MC_ELSE()
14688 IEM_MC_FPU_STACK_UNDERFLOW(0);
14689 IEM_MC_ENDIF();
14690 IEM_MC_ADVANCE_RIP();
14691
14692 IEM_MC_END();
14693 return VINF_SUCCESS;
14694}
14695
14696
14697/**
14698 * Common worker for FPU instructions working on ST0 and STn, only affecting
14699 * flags, and popping twice when done.
14700 *
14701 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14702 */
14703FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14704{
14705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14706
14707 IEM_MC_BEGIN(3, 1);
14708 IEM_MC_LOCAL(uint16_t, u16Fsw);
14709 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14710 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14711 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14712
14713 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14714 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14715
14716 IEM_MC_PREPARE_FPU_USAGE();
14717 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14718 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14719 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14720 IEM_MC_ELSE()
14721 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14722 IEM_MC_ENDIF();
14723 IEM_MC_ADVANCE_RIP();
14724
14725 IEM_MC_END();
14726 return VINF_SUCCESS;
14727}
14728
14729
14730/** Opcode 0xda 0xe9. */
14731FNIEMOP_DEF(iemOp_fucompp)
14732{
14733 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
14734 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14735}
14736
14737
14738/**
14739 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14740 * the result in ST0.
14741 *
14742 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14743 */
14744FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14745{
14746 IEM_MC_BEGIN(3, 3);
14747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14748 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14749 IEM_MC_LOCAL(int32_t, i32Val2);
14750 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14751 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14752 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14753
14754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14756
14757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14759 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14760
14761 IEM_MC_PREPARE_FPU_USAGE();
14762 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14763 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14764 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14765 IEM_MC_ELSE()
14766 IEM_MC_FPU_STACK_UNDERFLOW(0);
14767 IEM_MC_ENDIF();
14768 IEM_MC_ADVANCE_RIP();
14769
14770 IEM_MC_END();
14771 return VINF_SUCCESS;
14772}
14773
14774
14775/** Opcode 0xda !11/0. */
14776FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14777{
14778 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
14779 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14780}
14781
14782
14783/** Opcode 0xda !11/1. */
14784FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14785{
14786 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
14787 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14788}
14789
14790
14791/** Opcode 0xda !11/2. */
14792FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14793{
14794 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
14795
14796 IEM_MC_BEGIN(3, 3);
14797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14798 IEM_MC_LOCAL(uint16_t, u16Fsw);
14799 IEM_MC_LOCAL(int32_t, i32Val2);
14800 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14801 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14802 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14803
14804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14806
14807 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14808 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14809 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14810
14811 IEM_MC_PREPARE_FPU_USAGE();
14812 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14813 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14814 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14815 IEM_MC_ELSE()
14816 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14817 IEM_MC_ENDIF();
14818 IEM_MC_ADVANCE_RIP();
14819
14820 IEM_MC_END();
14821 return VINF_SUCCESS;
14822}
14823
14824
14825/** Opcode 0xda !11/3. */
14826FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14827{
14828 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
14829
14830 IEM_MC_BEGIN(3, 3);
14831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14832 IEM_MC_LOCAL(uint16_t, u16Fsw);
14833 IEM_MC_LOCAL(int32_t, i32Val2);
14834 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14835 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14836 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14837
14838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14840
14841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14842 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14843 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14844
14845 IEM_MC_PREPARE_FPU_USAGE();
14846 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14847 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14848 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14849 IEM_MC_ELSE()
14850 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14851 IEM_MC_ENDIF();
14852 IEM_MC_ADVANCE_RIP();
14853
14854 IEM_MC_END();
14855 return VINF_SUCCESS;
14856}
14857
14858
14859/** Opcode 0xda !11/4. */
14860FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14861{
14862 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
14863 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14864}
14865
14866
14867/** Opcode 0xda !11/5. */
14868FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14869{
14870 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
14871 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14872}
14873
14874
14875/** Opcode 0xda !11/6. */
14876FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14877{
14878 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
14879 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14880}
14881
14882
14883/** Opcode 0xda !11/7. */
14884FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14885{
14886 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
14887 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14888}
14889
14890
14891/** Opcode 0xda. */
14892FNIEMOP_DEF(iemOp_EscF2)
14893{
14894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14895 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
14896 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14897 {
14898 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14899 {
14900 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14901 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14902 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14903 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14904 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14905 case 5:
14906 if (bRm == 0xe9)
14907 return FNIEMOP_CALL(iemOp_fucompp);
14908 return IEMOP_RAISE_INVALID_OPCODE();
14909 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14910 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14912 }
14913 }
14914 else
14915 {
14916 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14917 {
14918 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14919 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14920 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14921 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14922 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14923 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14924 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14925 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14927 }
14928 }
14929}
14930
14931
14932/** Opcode 0xdb !11/0. */
14933FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14934{
14935 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
14936
14937 IEM_MC_BEGIN(2, 3);
14938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14939 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14940 IEM_MC_LOCAL(int32_t, i32Val);
14941 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14942 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14943
14944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14946
14947 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14948 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14949 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14950
14951 IEM_MC_PREPARE_FPU_USAGE();
14952 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14953 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14954 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14955 IEM_MC_ELSE()
14956 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14957 IEM_MC_ENDIF();
14958 IEM_MC_ADVANCE_RIP();
14959
14960 IEM_MC_END();
14961 return VINF_SUCCESS;
14962}
14963
14964
14965/** Opcode 0xdb !11/1. */
14966FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14967{
14968 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
14969 IEM_MC_BEGIN(3, 2);
14970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14971 IEM_MC_LOCAL(uint16_t, u16Fsw);
14972 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14973 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14974 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14975
14976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14978 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14979 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14980
14981 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14982 IEM_MC_PREPARE_FPU_USAGE();
14983 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14984 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14985 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14986 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14987 IEM_MC_ELSE()
14988 IEM_MC_IF_FCW_IM()
14989 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14990 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14991 IEM_MC_ENDIF();
14992 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14993 IEM_MC_ENDIF();
14994 IEM_MC_ADVANCE_RIP();
14995
14996 IEM_MC_END();
14997 return VINF_SUCCESS;
14998}
14999
15000
15001/** Opcode 0xdb !11/2. */
15002FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15003{
15004 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15005 IEM_MC_BEGIN(3, 2);
15006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15007 IEM_MC_LOCAL(uint16_t, u16Fsw);
15008 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15009 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15010 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15011
15012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15014 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15015 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15016
15017 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15018 IEM_MC_PREPARE_FPU_USAGE();
15019 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15020 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15021 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15022 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15023 IEM_MC_ELSE()
15024 IEM_MC_IF_FCW_IM()
15025 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15026 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15027 IEM_MC_ENDIF();
15028 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15029 IEM_MC_ENDIF();
15030 IEM_MC_ADVANCE_RIP();
15031
15032 IEM_MC_END();
15033 return VINF_SUCCESS;
15034}
15035
15036
15037/** Opcode 0xdb !11/3. */
15038FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15039{
15040 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15041 IEM_MC_BEGIN(3, 2);
15042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15043 IEM_MC_LOCAL(uint16_t, u16Fsw);
15044 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15045 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15046 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15047
15048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15050 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15051 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15052
15053 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15054 IEM_MC_PREPARE_FPU_USAGE();
15055 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15056 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15057 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15058 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15059 IEM_MC_ELSE()
15060 IEM_MC_IF_FCW_IM()
15061 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15062 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15063 IEM_MC_ENDIF();
15064 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15065 IEM_MC_ENDIF();
15066 IEM_MC_ADVANCE_RIP();
15067
15068 IEM_MC_END();
15069 return VINF_SUCCESS;
15070}
15071
15072
15073/** Opcode 0xdb !11/5. */
15074FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15075{
15076 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15077
15078 IEM_MC_BEGIN(2, 3);
15079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15080 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15081 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15082 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15083 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15084
15085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15087
15088 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15089 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15090 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15091
15092 IEM_MC_PREPARE_FPU_USAGE();
15093 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15094 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15095 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15096 IEM_MC_ELSE()
15097 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15098 IEM_MC_ENDIF();
15099 IEM_MC_ADVANCE_RIP();
15100
15101 IEM_MC_END();
15102 return VINF_SUCCESS;
15103}
15104
15105
15106/** Opcode 0xdb !11/7. */
15107FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15108{
15109 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15110 IEM_MC_BEGIN(3, 2);
15111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15112 IEM_MC_LOCAL(uint16_t, u16Fsw);
15113 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15114 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15115 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15116
15117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15120 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15121
15122 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15123 IEM_MC_PREPARE_FPU_USAGE();
15124 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15125 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15126 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15127 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15128 IEM_MC_ELSE()
15129 IEM_MC_IF_FCW_IM()
15130 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15131 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15132 IEM_MC_ENDIF();
15133 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15134 IEM_MC_ENDIF();
15135 IEM_MC_ADVANCE_RIP();
15136
15137 IEM_MC_END();
15138 return VINF_SUCCESS;
15139}
15140
15141
15142/** Opcode 0xdb 11/0. */
15143FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15144{
15145 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15147
15148 IEM_MC_BEGIN(0, 1);
15149 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15150
15151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15152 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15153
15154 IEM_MC_PREPARE_FPU_USAGE();
15155 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15156 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15157 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15158 IEM_MC_ENDIF();
15159 IEM_MC_UPDATE_FPU_OPCODE_IP();
15160 IEM_MC_ELSE()
15161 IEM_MC_FPU_STACK_UNDERFLOW(0);
15162 IEM_MC_ENDIF();
15163 IEM_MC_ADVANCE_RIP();
15164
15165 IEM_MC_END();
15166 return VINF_SUCCESS;
15167}
15168
15169
15170/** Opcode 0xdb 11/1. */
15171FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15172{
15173 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15175
15176 IEM_MC_BEGIN(0, 1);
15177 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15178
15179 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15180 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15181
15182 IEM_MC_PREPARE_FPU_USAGE();
15183 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15184 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15185 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15186 IEM_MC_ENDIF();
15187 IEM_MC_UPDATE_FPU_OPCODE_IP();
15188 IEM_MC_ELSE()
15189 IEM_MC_FPU_STACK_UNDERFLOW(0);
15190 IEM_MC_ENDIF();
15191 IEM_MC_ADVANCE_RIP();
15192
15193 IEM_MC_END();
15194 return VINF_SUCCESS;
15195}
15196
15197
15198/** Opcode 0xdb 11/2. */
15199FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15200{
15201 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15203
15204 IEM_MC_BEGIN(0, 1);
15205 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15206
15207 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15208 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15209
15210 IEM_MC_PREPARE_FPU_USAGE();
15211 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15212 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15213 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15214 IEM_MC_ENDIF();
15215 IEM_MC_UPDATE_FPU_OPCODE_IP();
15216 IEM_MC_ELSE()
15217 IEM_MC_FPU_STACK_UNDERFLOW(0);
15218 IEM_MC_ENDIF();
15219 IEM_MC_ADVANCE_RIP();
15220
15221 IEM_MC_END();
15222 return VINF_SUCCESS;
15223}
15224
15225
15226/** Opcode 0xdb 11/3. */
15227FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15228{
15229 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15231
15232 IEM_MC_BEGIN(0, 1);
15233 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15234
15235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15236 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15237
15238 IEM_MC_PREPARE_FPU_USAGE();
15239 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15240 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15241 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15242 IEM_MC_ENDIF();
15243 IEM_MC_UPDATE_FPU_OPCODE_IP();
15244 IEM_MC_ELSE()
15245 IEM_MC_FPU_STACK_UNDERFLOW(0);
15246 IEM_MC_ENDIF();
15247 IEM_MC_ADVANCE_RIP();
15248
15249 IEM_MC_END();
15250 return VINF_SUCCESS;
15251}
15252
15253
15254/** Opcode 0xdb 0xe0. */
15255FNIEMOP_DEF(iemOp_fneni)
15256{
15257 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
15258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15259 IEM_MC_BEGIN(0,0);
15260 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15261 IEM_MC_ADVANCE_RIP();
15262 IEM_MC_END();
15263 return VINF_SUCCESS;
15264}
15265
15266
15267/** Opcode 0xdb 0xe1. */
15268FNIEMOP_DEF(iemOp_fndisi)
15269{
15270 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
15271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15272 IEM_MC_BEGIN(0,0);
15273 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15274 IEM_MC_ADVANCE_RIP();
15275 IEM_MC_END();
15276 return VINF_SUCCESS;
15277}
15278
15279
15280/** Opcode 0xdb 0xe2. */
15281FNIEMOP_DEF(iemOp_fnclex)
15282{
15283 IEMOP_MNEMONIC(fnclex, "fnclex");
15284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15285
15286 IEM_MC_BEGIN(0,0);
15287 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15288 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15289 IEM_MC_CLEAR_FSW_EX();
15290 IEM_MC_ADVANCE_RIP();
15291 IEM_MC_END();
15292 return VINF_SUCCESS;
15293}
15294
15295
15296/** Opcode 0xdb 0xe3. */
15297FNIEMOP_DEF(iemOp_fninit)
15298{
15299 IEMOP_MNEMONIC(fninit, "fninit");
15300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15301 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15302}
15303
15304
15305/** Opcode 0xdb 0xe4. */
15306FNIEMOP_DEF(iemOp_fnsetpm)
15307{
15308 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15310 IEM_MC_BEGIN(0,0);
15311 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15312 IEM_MC_ADVANCE_RIP();
15313 IEM_MC_END();
15314 return VINF_SUCCESS;
15315}
15316
15317
15318/** Opcode 0xdb 0xe5. */
15319FNIEMOP_DEF(iemOp_frstpm)
15320{
15321 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15322#if 0 /* #UDs on newer CPUs */
15323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15324 IEM_MC_BEGIN(0,0);
15325 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15326 IEM_MC_ADVANCE_RIP();
15327 IEM_MC_END();
15328 return VINF_SUCCESS;
15329#else
15330 return IEMOP_RAISE_INVALID_OPCODE();
15331#endif
15332}
15333
15334
15335/** Opcode 0xdb 11/5. */
15336FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15337{
15338 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
15339 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15340}
15341
15342
15343/** Opcode 0xdb 11/6. */
15344FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15345{
15346 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
15347 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15348}
15349
15350
15351/** Opcode 0xdb. */
15352FNIEMOP_DEF(iemOp_EscF3)
15353{
15354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15355 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
15356 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15357 {
15358 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15359 {
15360 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15361 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15362 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15363 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15364 case 4:
15365 switch (bRm)
15366 {
15367 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15368 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15369 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15370 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15371 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15372 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15373 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15374 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15376 }
15377 break;
15378 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15379 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15380 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15382 }
15383 }
15384 else
15385 {
15386 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15387 {
15388 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15389 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15390 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15391 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15392 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15393 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15394 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15395 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15397 }
15398 }
15399}
15400
15401
15402/**
15403 * Common worker for FPU instructions working on STn and ST0, and storing the
15404 * result in STn unless IE, DE or ZE was raised.
15405 *
15406 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15407 */
15408FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15409{
15410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15411
15412 IEM_MC_BEGIN(3, 1);
15413 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15414 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15415 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15416 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15417
15418 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15419 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15420
15421 IEM_MC_PREPARE_FPU_USAGE();
15422 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15423 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15424 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15425 IEM_MC_ELSE()
15426 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15427 IEM_MC_ENDIF();
15428 IEM_MC_ADVANCE_RIP();
15429
15430 IEM_MC_END();
15431 return VINF_SUCCESS;
15432}
15433
15434
15435/** Opcode 0xdc 11/0. */
15436FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15437{
15438 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
15439 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15440}
15441
15442
15443/** Opcode 0xdc 11/1. */
15444FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15445{
15446 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
15447 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15448}
15449
15450
15451/** Opcode 0xdc 11/4. */
15452FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15453{
15454 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
15455 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15456}
15457
15458
15459/** Opcode 0xdc 11/5. */
15460FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15461{
15462 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
15463 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15464}
15465
15466
15467/** Opcode 0xdc 11/6. */
15468FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15469{
15470 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
15471 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15472}
15473
15474
15475/** Opcode 0xdc 11/7. */
15476FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15477{
15478 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
15479 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15480}
15481
15482
15483/**
15484 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15485 * memory operand, and storing the result in ST0.
15486 *
15487 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15488 */
15489FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15490{
15491 IEM_MC_BEGIN(3, 3);
15492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15493 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15494 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15495 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15496 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15497 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15498
15499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15502 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15503
15504 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15505 IEM_MC_PREPARE_FPU_USAGE();
15506 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15507 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15508 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15509 IEM_MC_ELSE()
15510 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15511 IEM_MC_ENDIF();
15512 IEM_MC_ADVANCE_RIP();
15513
15514 IEM_MC_END();
15515 return VINF_SUCCESS;
15516}
15517
15518
15519/** Opcode 0xdc !11/0. */
15520FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15521{
15522 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
15523 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15524}
15525
15526
15527/** Opcode 0xdc !11/1. */
15528FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15529{
15530 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
15531 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15532}
15533
15534
15535/** Opcode 0xdc !11/2. */
15536FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15537{
15538 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
15539
15540 IEM_MC_BEGIN(3, 3);
15541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15542 IEM_MC_LOCAL(uint16_t, u16Fsw);
15543 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15544 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15545 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15546 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15547
15548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15550
15551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15553 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15554
15555 IEM_MC_PREPARE_FPU_USAGE();
15556 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15557 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15558 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15559 IEM_MC_ELSE()
15560 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15561 IEM_MC_ENDIF();
15562 IEM_MC_ADVANCE_RIP();
15563
15564 IEM_MC_END();
15565 return VINF_SUCCESS;
15566}
15567
15568
15569/** Opcode 0xdc !11/3. */
15570FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15571{
15572 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
15573
15574 IEM_MC_BEGIN(3, 3);
15575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15576 IEM_MC_LOCAL(uint16_t, u16Fsw);
15577 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15578 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15580 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15581
15582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15584
15585 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15586 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15587 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15588
15589 IEM_MC_PREPARE_FPU_USAGE();
15590 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15591 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15592 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15593 IEM_MC_ELSE()
15594 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15595 IEM_MC_ENDIF();
15596 IEM_MC_ADVANCE_RIP();
15597
15598 IEM_MC_END();
15599 return VINF_SUCCESS;
15600}
15601
15602
15603/** Opcode 0xdc !11/4. */
15604FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15605{
15606 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
15607 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15608}
15609
15610
15611/** Opcode 0xdc !11/5. */
15612FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15613{
15614 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
15615 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15616}
15617
15618
15619/** Opcode 0xdc !11/6. */
15620FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15621{
15622 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
15623 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15624}
15625
15626
15627/** Opcode 0xdc !11/7. */
15628FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15629{
15630 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
15631 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15632}
15633
15634
15635/** Opcode 0xdc. */
15636FNIEMOP_DEF(iemOp_EscF4)
15637{
15638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15639 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
15640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15641 {
15642 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15643 {
15644 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15645 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15646 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15647 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15648 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15649 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15650 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15651 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15653 }
15654 }
15655 else
15656 {
15657 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15658 {
15659 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15660 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15661 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15662 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15663 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15664 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15665 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15666 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15668 }
15669 }
15670}
15671
15672
15673/** Opcode 0xdd !11/0.
15674 * @sa iemOp_fld_m32r */
15675FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15676{
15677 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
15678
15679 IEM_MC_BEGIN(2, 3);
15680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15681 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15682 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15683 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15684 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15685
15686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15688 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15689 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15690
15691 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15692 IEM_MC_PREPARE_FPU_USAGE();
15693 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15694 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15695 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15696 IEM_MC_ELSE()
15697 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15698 IEM_MC_ENDIF();
15699 IEM_MC_ADVANCE_RIP();
15700
15701 IEM_MC_END();
15702 return VINF_SUCCESS;
15703}
15704
15705
15706/** Opcode 0xdd !11/0. */
15707FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15708{
15709 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
15710 IEM_MC_BEGIN(3, 2);
15711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15712 IEM_MC_LOCAL(uint16_t, u16Fsw);
15713 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15714 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15715 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15716
15717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15719 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15720 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15721
15722 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15723 IEM_MC_PREPARE_FPU_USAGE();
15724 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15725 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15726 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15727 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15728 IEM_MC_ELSE()
15729 IEM_MC_IF_FCW_IM()
15730 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15731 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15732 IEM_MC_ENDIF();
15733 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15734 IEM_MC_ENDIF();
15735 IEM_MC_ADVANCE_RIP();
15736
15737 IEM_MC_END();
15738 return VINF_SUCCESS;
15739}
15740
15741
15742/** Opcode 0xdd !11/0. */
15743FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15744{
15745 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
15746 IEM_MC_BEGIN(3, 2);
15747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15748 IEM_MC_LOCAL(uint16_t, u16Fsw);
15749 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15750 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15751 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15752
15753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15755 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15756 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15757
15758 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15759 IEM_MC_PREPARE_FPU_USAGE();
15760 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15761 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15762 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15763 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15764 IEM_MC_ELSE()
15765 IEM_MC_IF_FCW_IM()
15766 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15767 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15768 IEM_MC_ENDIF();
15769 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15770 IEM_MC_ENDIF();
15771 IEM_MC_ADVANCE_RIP();
15772
15773 IEM_MC_END();
15774 return VINF_SUCCESS;
15775}
15776
15777
15778
15779
15780/** Opcode 0xdd !11/0. */
15781FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15782{
15783 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
15784 IEM_MC_BEGIN(3, 2);
15785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15786 IEM_MC_LOCAL(uint16_t, u16Fsw);
15787 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15788 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15789 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15790
15791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15794 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15795
15796 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15797 IEM_MC_PREPARE_FPU_USAGE();
15798 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15799 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15800 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15801 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15802 IEM_MC_ELSE()
15803 IEM_MC_IF_FCW_IM()
15804 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15805 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15806 IEM_MC_ENDIF();
15807 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15808 IEM_MC_ENDIF();
15809 IEM_MC_ADVANCE_RIP();
15810
15811 IEM_MC_END();
15812 return VINF_SUCCESS;
15813}
15814
15815
15816/** Opcode 0xdd !11/0. */
15817FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15818{
15819 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
15820 IEM_MC_BEGIN(3, 0);
15821 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15822 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15823 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15827 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15828 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15829 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15830 IEM_MC_END();
15831 return VINF_SUCCESS;
15832}
15833
15834
15835/** Opcode 0xdd !11/0. */
15836FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15837{
15838 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
15839 IEM_MC_BEGIN(3, 0);
15840 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15841 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15842 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15845 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15846 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15847 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15848 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15849 IEM_MC_END();
15850 return VINF_SUCCESS;
15851
15852}
15853
15854/** Opcode 0xdd !11/0. */
15855FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15856{
15857 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
15858
15859 IEM_MC_BEGIN(0, 2);
15860 IEM_MC_LOCAL(uint16_t, u16Tmp);
15861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15862
15863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15866
15867 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15868 IEM_MC_FETCH_FSW(u16Tmp);
15869 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
15870 IEM_MC_ADVANCE_RIP();
15871
15872/** @todo Debug / drop a hint to the verifier that things may differ
15873 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15874 * NT4SP1. (X86_FSW_PE) */
15875 IEM_MC_END();
15876 return VINF_SUCCESS;
15877}
15878
15879
15880/** Opcode 0xdd 11/0. */
15881FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15882{
15883 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
15884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15885 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15886 unmodified. */
15887
15888 IEM_MC_BEGIN(0, 0);
15889
15890 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15891 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15892
15893 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15894 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15895 IEM_MC_UPDATE_FPU_OPCODE_IP();
15896
15897 IEM_MC_ADVANCE_RIP();
15898 IEM_MC_END();
15899 return VINF_SUCCESS;
15900}
15901
15902
15903/** Opcode 0xdd 11/1. */
15904FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15905{
15906 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
15907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15908
15909 IEM_MC_BEGIN(0, 2);
15910 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15911 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15912 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15913 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15914
15915 IEM_MC_PREPARE_FPU_USAGE();
15916 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15917 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15918 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15919 IEM_MC_ELSE()
15920 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15921 IEM_MC_ENDIF();
15922
15923 IEM_MC_ADVANCE_RIP();
15924 IEM_MC_END();
15925 return VINF_SUCCESS;
15926}
15927
15928
15929/** Opcode 0xdd 11/3. */
15930FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15931{
15932 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
15933 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15934}
15935
15936
15937/** Opcode 0xdd 11/4. */
15938FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15939{
15940 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
15941 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15942}
15943
15944
15945/** Opcode 0xdd. */
15946FNIEMOP_DEF(iemOp_EscF5)
15947{
15948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15949 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
15950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15951 {
15952 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15953 {
15954 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15955 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15956 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15957 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15958 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15959 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15960 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15961 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15963 }
15964 }
15965 else
15966 {
15967 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15968 {
15969 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15970 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15971 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15972 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15973 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15974 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15975 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15976 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15978 }
15979 }
15980}
15981
15982
15983/** Opcode 0xde 11/0. */
15984FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15985{
15986 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
15987 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15988}
15989
15990
15991/** Opcode 0xde 11/0. */
15992FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15993{
15994 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
15995 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15996}
15997
15998
15999/** Opcode 0xde 0xd9. */
16000FNIEMOP_DEF(iemOp_fcompp)
16001{
16002 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16003 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16004}
16005
16006
16007/** Opcode 0xde 11/4. */
16008FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16009{
16010 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16011 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16012}
16013
16014
16015/** Opcode 0xde 11/5. */
16016FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16017{
16018 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16019 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16020}
16021
16022
16023/** Opcode 0xde 11/6. */
16024FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16025{
16026 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16027 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16028}
16029
16030
16031/** Opcode 0xde 11/7. */
16032FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16033{
16034 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16035 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16036}
16037
16038
16039/**
16040 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16041 * the result in ST0.
16042 *
16043 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16044 */
16045FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16046{
16047 IEM_MC_BEGIN(3, 3);
16048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16049 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16050 IEM_MC_LOCAL(int16_t, i16Val2);
16051 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16052 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16053 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16054
16055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16057
16058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16060 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16061
16062 IEM_MC_PREPARE_FPU_USAGE();
16063 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16064 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16065 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16066 IEM_MC_ELSE()
16067 IEM_MC_FPU_STACK_UNDERFLOW(0);
16068 IEM_MC_ENDIF();
16069 IEM_MC_ADVANCE_RIP();
16070
16071 IEM_MC_END();
16072 return VINF_SUCCESS;
16073}
16074
16075
16076/** Opcode 0xde !11/0. */
16077FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16078{
16079 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16080 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16081}
16082
16083
16084/** Opcode 0xde !11/1. */
16085FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16086{
16087 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16088 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16089}
16090
16091
16092/** Opcode 0xde !11/2. */
16093FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16094{
16095 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16096
16097 IEM_MC_BEGIN(3, 3);
16098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16099 IEM_MC_LOCAL(uint16_t, u16Fsw);
16100 IEM_MC_LOCAL(int16_t, i16Val2);
16101 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16102 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16103 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16104
16105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16107
16108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16109 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16110 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16111
16112 IEM_MC_PREPARE_FPU_USAGE();
16113 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16114 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16115 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16116 IEM_MC_ELSE()
16117 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16118 IEM_MC_ENDIF();
16119 IEM_MC_ADVANCE_RIP();
16120
16121 IEM_MC_END();
16122 return VINF_SUCCESS;
16123}
16124
16125
16126/** Opcode 0xde !11/3. */
16127FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16128{
16129 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16130
16131 IEM_MC_BEGIN(3, 3);
16132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16133 IEM_MC_LOCAL(uint16_t, u16Fsw);
16134 IEM_MC_LOCAL(int16_t, i16Val2);
16135 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16136 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16137 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16138
16139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16141
16142 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16143 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16144 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16145
16146 IEM_MC_PREPARE_FPU_USAGE();
16147 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16148 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16149 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16150 IEM_MC_ELSE()
16151 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16152 IEM_MC_ENDIF();
16153 IEM_MC_ADVANCE_RIP();
16154
16155 IEM_MC_END();
16156 return VINF_SUCCESS;
16157}
16158
16159
16160/** Opcode 0xde !11/4. */
16161FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16162{
16163 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16164 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16165}
16166
16167
16168/** Opcode 0xde !11/5. */
16169FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16170{
16171 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16172 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16173}
16174
16175
16176/** Opcode 0xde !11/6. */
16177FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16178{
16179 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16180 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16181}
16182
16183
16184/** Opcode 0xde !11/7. */
16185FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16186{
16187 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16188 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16189}
16190
16191
16192/** Opcode 0xde. */
16193FNIEMOP_DEF(iemOp_EscF6)
16194{
16195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16196 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16198 {
16199 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16200 {
16201 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16202 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16203 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16204 case 3: if (bRm == 0xd9)
16205 return FNIEMOP_CALL(iemOp_fcompp);
16206 return IEMOP_RAISE_INVALID_OPCODE();
16207 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16208 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16209 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16210 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16212 }
16213 }
16214 else
16215 {
16216 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16217 {
16218 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16219 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16220 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16221 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16222 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16223 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16224 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16225 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16227 }
16228 }
16229}
16230
16231
16232/** Opcode 0xdf 11/0.
16233 * Undocument instruction, assumed to work like ffree + fincstp. */
16234FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16235{
16236 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
16237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16238
16239 IEM_MC_BEGIN(0, 0);
16240
16241 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16242 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16243
16244 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16245 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16246 IEM_MC_FPU_STACK_INC_TOP();
16247 IEM_MC_UPDATE_FPU_OPCODE_IP();
16248
16249 IEM_MC_ADVANCE_RIP();
16250 IEM_MC_END();
16251 return VINF_SUCCESS;
16252}
16253
16254
16255/** Opcode 0xdf 0xe0. */
16256FNIEMOP_DEF(iemOp_fnstsw_ax)
16257{
16258 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
16259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16260
16261 IEM_MC_BEGIN(0, 1);
16262 IEM_MC_LOCAL(uint16_t, u16Tmp);
16263 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16264 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16265 IEM_MC_FETCH_FSW(u16Tmp);
16266 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16267 IEM_MC_ADVANCE_RIP();
16268 IEM_MC_END();
16269 return VINF_SUCCESS;
16270}
16271
16272
16273/** Opcode 0xdf 11/5. */
16274FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16275{
16276 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
16277 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16278}
16279
16280
16281/** Opcode 0xdf 11/6. */
16282FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16283{
16284 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
16285 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16286}
16287
16288
16289/** Opcode 0xdf !11/0. */
16290FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16291{
16292 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
16293
16294 IEM_MC_BEGIN(2, 3);
16295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16296 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16297 IEM_MC_LOCAL(int16_t, i16Val);
16298 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16299 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16300
16301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16303
16304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16306 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16307
16308 IEM_MC_PREPARE_FPU_USAGE();
16309 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16310 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16311 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16312 IEM_MC_ELSE()
16313 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16314 IEM_MC_ENDIF();
16315 IEM_MC_ADVANCE_RIP();
16316
16317 IEM_MC_END();
16318 return VINF_SUCCESS;
16319}
16320
16321
16322/** Opcode 0xdf !11/1. */
16323FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16324{
16325 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
16326 IEM_MC_BEGIN(3, 2);
16327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16328 IEM_MC_LOCAL(uint16_t, u16Fsw);
16329 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16330 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16331 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16332
16333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16335 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16336 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16337
16338 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16339 IEM_MC_PREPARE_FPU_USAGE();
16340 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16341 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16342 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16343 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16344 IEM_MC_ELSE()
16345 IEM_MC_IF_FCW_IM()
16346 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16347 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16348 IEM_MC_ENDIF();
16349 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16350 IEM_MC_ENDIF();
16351 IEM_MC_ADVANCE_RIP();
16352
16353 IEM_MC_END();
16354 return VINF_SUCCESS;
16355}
16356
16357
16358/** Opcode 0xdf !11/2. */
16359FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16360{
16361 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
16362 IEM_MC_BEGIN(3, 2);
16363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16364 IEM_MC_LOCAL(uint16_t, u16Fsw);
16365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16366 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16367 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16368
16369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16372 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16373
16374 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16375 IEM_MC_PREPARE_FPU_USAGE();
16376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16377 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16378 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16379 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16380 IEM_MC_ELSE()
16381 IEM_MC_IF_FCW_IM()
16382 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16383 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16384 IEM_MC_ENDIF();
16385 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16386 IEM_MC_ENDIF();
16387 IEM_MC_ADVANCE_RIP();
16388
16389 IEM_MC_END();
16390 return VINF_SUCCESS;
16391}
16392
16393
16394/** Opcode 0xdf !11/3. */
16395FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16396{
16397 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
16398 IEM_MC_BEGIN(3, 2);
16399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16400 IEM_MC_LOCAL(uint16_t, u16Fsw);
16401 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16402 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16403 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16404
16405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16409
16410 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16411 IEM_MC_PREPARE_FPU_USAGE();
16412 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16413 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16414 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16415 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16416 IEM_MC_ELSE()
16417 IEM_MC_IF_FCW_IM()
16418 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16419 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16420 IEM_MC_ENDIF();
16421 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16422 IEM_MC_ENDIF();
16423 IEM_MC_ADVANCE_RIP();
16424
16425 IEM_MC_END();
16426 return VINF_SUCCESS;
16427}
16428
16429
16430/** Opcode 0xdf !11/4. */
16431FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16432
16433
16434/** Opcode 0xdf !11/5. */
16435FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16436{
16437 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
16438
16439 IEM_MC_BEGIN(2, 3);
16440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16441 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16442 IEM_MC_LOCAL(int64_t, i64Val);
16443 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16444 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16445
16446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16448
16449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16451 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16452
16453 IEM_MC_PREPARE_FPU_USAGE();
16454 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16455 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16456 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16457 IEM_MC_ELSE()
16458 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16459 IEM_MC_ENDIF();
16460 IEM_MC_ADVANCE_RIP();
16461
16462 IEM_MC_END();
16463 return VINF_SUCCESS;
16464}
16465
16466
16467/** Opcode 0xdf !11/6. */
16468FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16469
16470
16471/** Opcode 0xdf !11/7. */
16472FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16473{
16474 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
16475 IEM_MC_BEGIN(3, 2);
16476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16477 IEM_MC_LOCAL(uint16_t, u16Fsw);
16478 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16479 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16480 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16481
16482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16486
16487 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16488 IEM_MC_PREPARE_FPU_USAGE();
16489 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16490 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16491 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16492 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16493 IEM_MC_ELSE()
16494 IEM_MC_IF_FCW_IM()
16495 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16496 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16497 IEM_MC_ENDIF();
16498 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16499 IEM_MC_ENDIF();
16500 IEM_MC_ADVANCE_RIP();
16501
16502 IEM_MC_END();
16503 return VINF_SUCCESS;
16504}
16505
16506
16507/** Opcode 0xdf. */
16508FNIEMOP_DEF(iemOp_EscF7)
16509{
16510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16512 {
16513 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16514 {
16515 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16516 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16517 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16518 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16519 case 4: if (bRm == 0xe0)
16520 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16521 return IEMOP_RAISE_INVALID_OPCODE();
16522 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16523 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16524 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16526 }
16527 }
16528 else
16529 {
16530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16531 {
16532 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16533 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16534 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16535 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16536 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16537 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16538 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16539 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16541 }
16542 }
16543}
16544
16545
16546/** Opcode 0xe0. */
16547FNIEMOP_DEF(iemOp_loopne_Jb)
16548{
16549 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
16550 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16553
16554 switch (pVCpu->iem.s.enmEffAddrMode)
16555 {
16556 case IEMMODE_16BIT:
16557 IEM_MC_BEGIN(0,0);
16558 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16559 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16560 IEM_MC_REL_JMP_S8(i8Imm);
16561 } IEM_MC_ELSE() {
16562 IEM_MC_ADVANCE_RIP();
16563 } IEM_MC_ENDIF();
16564 IEM_MC_END();
16565 return VINF_SUCCESS;
16566
16567 case IEMMODE_32BIT:
16568 IEM_MC_BEGIN(0,0);
16569 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16570 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16571 IEM_MC_REL_JMP_S8(i8Imm);
16572 } IEM_MC_ELSE() {
16573 IEM_MC_ADVANCE_RIP();
16574 } IEM_MC_ENDIF();
16575 IEM_MC_END();
16576 return VINF_SUCCESS;
16577
16578 case IEMMODE_64BIT:
16579 IEM_MC_BEGIN(0,0);
16580 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16581 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16582 IEM_MC_REL_JMP_S8(i8Imm);
16583 } IEM_MC_ELSE() {
16584 IEM_MC_ADVANCE_RIP();
16585 } IEM_MC_ENDIF();
16586 IEM_MC_END();
16587 return VINF_SUCCESS;
16588
16589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16590 }
16591}
16592
16593
16594/** Opcode 0xe1. */
16595FNIEMOP_DEF(iemOp_loope_Jb)
16596{
16597 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
16598 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16600 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16601
16602 switch (pVCpu->iem.s.enmEffAddrMode)
16603 {
16604 case IEMMODE_16BIT:
16605 IEM_MC_BEGIN(0,0);
16606 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16607 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16608 IEM_MC_REL_JMP_S8(i8Imm);
16609 } IEM_MC_ELSE() {
16610 IEM_MC_ADVANCE_RIP();
16611 } IEM_MC_ENDIF();
16612 IEM_MC_END();
16613 return VINF_SUCCESS;
16614
16615 case IEMMODE_32BIT:
16616 IEM_MC_BEGIN(0,0);
16617 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16618 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16619 IEM_MC_REL_JMP_S8(i8Imm);
16620 } IEM_MC_ELSE() {
16621 IEM_MC_ADVANCE_RIP();
16622 } IEM_MC_ENDIF();
16623 IEM_MC_END();
16624 return VINF_SUCCESS;
16625
16626 case IEMMODE_64BIT:
16627 IEM_MC_BEGIN(0,0);
16628 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16629 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16630 IEM_MC_REL_JMP_S8(i8Imm);
16631 } IEM_MC_ELSE() {
16632 IEM_MC_ADVANCE_RIP();
16633 } IEM_MC_ENDIF();
16634 IEM_MC_END();
16635 return VINF_SUCCESS;
16636
16637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16638 }
16639}
16640
16641
16642/** Opcode 0xe2. */
16643FNIEMOP_DEF(iemOp_loop_Jb)
16644{
16645 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
16646 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16648 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16649
16650 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16651 * using the 32-bit operand size override. How can that be restarted? See
16652 * weird pseudo code in intel manual. */
16653 switch (pVCpu->iem.s.enmEffAddrMode)
16654 {
16655 case IEMMODE_16BIT:
16656 IEM_MC_BEGIN(0,0);
16657 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16658 {
16659 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16660 IEM_MC_IF_CX_IS_NZ() {
16661 IEM_MC_REL_JMP_S8(i8Imm);
16662 } IEM_MC_ELSE() {
16663 IEM_MC_ADVANCE_RIP();
16664 } IEM_MC_ENDIF();
16665 }
16666 else
16667 {
16668 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16669 IEM_MC_ADVANCE_RIP();
16670 }
16671 IEM_MC_END();
16672 return VINF_SUCCESS;
16673
16674 case IEMMODE_32BIT:
16675 IEM_MC_BEGIN(0,0);
16676 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16677 {
16678 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16679 IEM_MC_IF_ECX_IS_NZ() {
16680 IEM_MC_REL_JMP_S8(i8Imm);
16681 } IEM_MC_ELSE() {
16682 IEM_MC_ADVANCE_RIP();
16683 } IEM_MC_ENDIF();
16684 }
16685 else
16686 {
16687 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16688 IEM_MC_ADVANCE_RIP();
16689 }
16690 IEM_MC_END();
16691 return VINF_SUCCESS;
16692
16693 case IEMMODE_64BIT:
16694 IEM_MC_BEGIN(0,0);
16695 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16696 {
16697 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16698 IEM_MC_IF_RCX_IS_NZ() {
16699 IEM_MC_REL_JMP_S8(i8Imm);
16700 } IEM_MC_ELSE() {
16701 IEM_MC_ADVANCE_RIP();
16702 } IEM_MC_ENDIF();
16703 }
16704 else
16705 {
16706 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16707 IEM_MC_ADVANCE_RIP();
16708 }
16709 IEM_MC_END();
16710 return VINF_SUCCESS;
16711
16712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16713 }
16714}
16715
16716
16717/** Opcode 0xe3. */
16718FNIEMOP_DEF(iemOp_jecxz_Jb)
16719{
16720 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
16721 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16723 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16724
16725 switch (pVCpu->iem.s.enmEffAddrMode)
16726 {
16727 case IEMMODE_16BIT:
16728 IEM_MC_BEGIN(0,0);
16729 IEM_MC_IF_CX_IS_NZ() {
16730 IEM_MC_ADVANCE_RIP();
16731 } IEM_MC_ELSE() {
16732 IEM_MC_REL_JMP_S8(i8Imm);
16733 } IEM_MC_ENDIF();
16734 IEM_MC_END();
16735 return VINF_SUCCESS;
16736
16737 case IEMMODE_32BIT:
16738 IEM_MC_BEGIN(0,0);
16739 IEM_MC_IF_ECX_IS_NZ() {
16740 IEM_MC_ADVANCE_RIP();
16741 } IEM_MC_ELSE() {
16742 IEM_MC_REL_JMP_S8(i8Imm);
16743 } IEM_MC_ENDIF();
16744 IEM_MC_END();
16745 return VINF_SUCCESS;
16746
16747 case IEMMODE_64BIT:
16748 IEM_MC_BEGIN(0,0);
16749 IEM_MC_IF_RCX_IS_NZ() {
16750 IEM_MC_ADVANCE_RIP();
16751 } IEM_MC_ELSE() {
16752 IEM_MC_REL_JMP_S8(i8Imm);
16753 } IEM_MC_ENDIF();
16754 IEM_MC_END();
16755 return VINF_SUCCESS;
16756
16757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16758 }
16759}
16760
16761
16762/** Opcode 0xe4 */
16763FNIEMOP_DEF(iemOp_in_AL_Ib)
16764{
16765 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
16766 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16768 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16769}
16770
16771
16772/** Opcode 0xe5 */
16773FNIEMOP_DEF(iemOp_in_eAX_Ib)
16774{
16775 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
16776 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16778 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16779}
16780
16781
16782/** Opcode 0xe6 */
16783FNIEMOP_DEF(iemOp_out_Ib_AL)
16784{
16785 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
16786 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16788 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16789}
16790
16791
16792/** Opcode 0xe7 */
16793FNIEMOP_DEF(iemOp_out_Ib_eAX)
16794{
16795 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
16796 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16798 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16799}
16800
16801
16802/** Opcode 0xe8. */
16803FNIEMOP_DEF(iemOp_call_Jv)
16804{
16805 IEMOP_MNEMONIC(call_Jv, "call Jv");
16806 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16807 switch (pVCpu->iem.s.enmEffOpSize)
16808 {
16809 case IEMMODE_16BIT:
16810 {
16811 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16812 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16813 }
16814
16815 case IEMMODE_32BIT:
16816 {
16817 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16818 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16819 }
16820
16821 case IEMMODE_64BIT:
16822 {
16823 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16824 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16825 }
16826
16827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16828 }
16829}
16830
16831
16832/** Opcode 0xe9. */
16833FNIEMOP_DEF(iemOp_jmp_Jv)
16834{
16835 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
16836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16837 switch (pVCpu->iem.s.enmEffOpSize)
16838 {
16839 case IEMMODE_16BIT:
16840 {
16841 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16842 IEM_MC_BEGIN(0, 0);
16843 IEM_MC_REL_JMP_S16(i16Imm);
16844 IEM_MC_END();
16845 return VINF_SUCCESS;
16846 }
16847
16848 case IEMMODE_64BIT:
16849 case IEMMODE_32BIT:
16850 {
16851 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16852 IEM_MC_BEGIN(0, 0);
16853 IEM_MC_REL_JMP_S32(i32Imm);
16854 IEM_MC_END();
16855 return VINF_SUCCESS;
16856 }
16857
16858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16859 }
16860}
16861
16862
16863/** Opcode 0xea. */
16864FNIEMOP_DEF(iemOp_jmp_Ap)
16865{
16866 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
16867 IEMOP_HLP_NO_64BIT();
16868
16869 /* Decode the far pointer address and pass it on to the far call C implementation. */
16870 uint32_t offSeg;
16871 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
16872 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16873 else
16874 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16875 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16877 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
16878}
16879
16880
16881/** Opcode 0xeb. */
16882FNIEMOP_DEF(iemOp_jmp_Jb)
16883{
16884 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
16885 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16887 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16888
16889 IEM_MC_BEGIN(0, 0);
16890 IEM_MC_REL_JMP_S8(i8Imm);
16891 IEM_MC_END();
16892 return VINF_SUCCESS;
16893}
16894
16895
16896/** Opcode 0xec */
16897FNIEMOP_DEF(iemOp_in_AL_DX)
16898{
16899 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
16900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16901 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16902}
16903
16904
16905/** Opcode 0xed */
16906FNIEMOP_DEF(iemOp_eAX_DX)
16907{
16908 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
16909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16910 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16911}
16912
16913
16914/** Opcode 0xee */
16915FNIEMOP_DEF(iemOp_out_DX_AL)
16916{
16917 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
16918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16919 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16920}
16921
16922
16923/** Opcode 0xef */
16924FNIEMOP_DEF(iemOp_out_DX_eAX)
16925{
16926 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
16927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16928 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16929}
16930
16931
16932/** Opcode 0xf0. */
16933FNIEMOP_DEF(iemOp_lock)
16934{
16935 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16936 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
16937
16938 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16939 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16940}
16941
16942
16943/** Opcode 0xf1. */
16944FNIEMOP_DEF(iemOp_int_1)
16945{
16946 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
16947 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16948 /** @todo testcase! */
16949 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16950}
16951
16952
16953/** Opcode 0xf2. */
16954FNIEMOP_DEF(iemOp_repne)
16955{
16956 /* This overrides any previous REPE prefix. */
16957 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
16958 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16959 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
16960
16961 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16962 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16963}
16964
16965
16966/** Opcode 0xf3. */
16967FNIEMOP_DEF(iemOp_repe)
16968{
16969 /* This overrides any previous REPNE prefix. */
16970 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
16971 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
16973
16974 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16975 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16976}
16977
16978
16979/** Opcode 0xf4. */
16980FNIEMOP_DEF(iemOp_hlt)
16981{
16982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16983 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16984}
16985
16986
16987/** Opcode 0xf5. */
16988FNIEMOP_DEF(iemOp_cmc)
16989{
16990 IEMOP_MNEMONIC(cmc, "cmc");
16991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16992 IEM_MC_BEGIN(0, 0);
16993 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16994 IEM_MC_ADVANCE_RIP();
16995 IEM_MC_END();
16996 return VINF_SUCCESS;
16997}
16998
16999
17000/**
17001 * Common implementation of 'inc/dec/not/neg Eb'.
17002 *
17003 * @param bRm The RM byte.
17004 * @param pImpl The instruction implementation.
17005 */
17006FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17007{
17008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17009 {
17010 /* register access */
17011 IEM_MC_BEGIN(2, 0);
17012 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17013 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17014 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17015 IEM_MC_REF_EFLAGS(pEFlags);
17016 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17017 IEM_MC_ADVANCE_RIP();
17018 IEM_MC_END();
17019 }
17020 else
17021 {
17022 /* memory access. */
17023 IEM_MC_BEGIN(2, 2);
17024 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17025 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17027
17028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17029 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17030 IEM_MC_FETCH_EFLAGS(EFlags);
17031 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17032 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17033 else
17034 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17035
17036 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17037 IEM_MC_COMMIT_EFLAGS(EFlags);
17038 IEM_MC_ADVANCE_RIP();
17039 IEM_MC_END();
17040 }
17041 return VINF_SUCCESS;
17042}
17043
17044
17045/**
17046 * Common implementation of 'inc/dec/not/neg Ev'.
17047 *
17048 * @param bRm The RM byte.
17049 * @param pImpl The instruction implementation.
17050 */
17051FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17052{
17053 /* Registers are handled by a common worker. */
17054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17055 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17056
17057 /* Memory we do here. */
17058 switch (pVCpu->iem.s.enmEffOpSize)
17059 {
17060 case IEMMODE_16BIT:
17061 IEM_MC_BEGIN(2, 2);
17062 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17063 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17065
17066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17067 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17068 IEM_MC_FETCH_EFLAGS(EFlags);
17069 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17070 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17071 else
17072 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17073
17074 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17075 IEM_MC_COMMIT_EFLAGS(EFlags);
17076 IEM_MC_ADVANCE_RIP();
17077 IEM_MC_END();
17078 return VINF_SUCCESS;
17079
17080 case IEMMODE_32BIT:
17081 IEM_MC_BEGIN(2, 2);
17082 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17083 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17085
17086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17087 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17088 IEM_MC_FETCH_EFLAGS(EFlags);
17089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17090 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17091 else
17092 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17093
17094 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17095 IEM_MC_COMMIT_EFLAGS(EFlags);
17096 IEM_MC_ADVANCE_RIP();
17097 IEM_MC_END();
17098 return VINF_SUCCESS;
17099
17100 case IEMMODE_64BIT:
17101 IEM_MC_BEGIN(2, 2);
17102 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17105
17106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17107 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17108 IEM_MC_FETCH_EFLAGS(EFlags);
17109 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17110 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17111 else
17112 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17113
17114 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17115 IEM_MC_COMMIT_EFLAGS(EFlags);
17116 IEM_MC_ADVANCE_RIP();
17117 IEM_MC_END();
17118 return VINF_SUCCESS;
17119
17120 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17121 }
17122}
17123
17124
17125/** Opcode 0xf6 /0. */
17126FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17127{
17128 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17129 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17130
17131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17132 {
17133 /* register access */
17134 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17136
17137 IEM_MC_BEGIN(3, 0);
17138 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17139 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17141 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17142 IEM_MC_REF_EFLAGS(pEFlags);
17143 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17144 IEM_MC_ADVANCE_RIP();
17145 IEM_MC_END();
17146 }
17147 else
17148 {
17149 /* memory access. */
17150 IEM_MC_BEGIN(3, 2);
17151 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17152 IEM_MC_ARG(uint8_t, u8Src, 1);
17153 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17155
17156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17157 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17158 IEM_MC_ASSIGN(u8Src, u8Imm);
17159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17160 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17161 IEM_MC_FETCH_EFLAGS(EFlags);
17162 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17163
17164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17165 IEM_MC_COMMIT_EFLAGS(EFlags);
17166 IEM_MC_ADVANCE_RIP();
17167 IEM_MC_END();
17168 }
17169 return VINF_SUCCESS;
17170}
17171
17172
17173/** Opcode 0xf7 /0. */
17174FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17175{
17176 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17177 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17178
17179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17180 {
17181 /* register access */
17182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17183 switch (pVCpu->iem.s.enmEffOpSize)
17184 {
17185 case IEMMODE_16BIT:
17186 {
17187 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17188 IEM_MC_BEGIN(3, 0);
17189 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17190 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17191 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17192 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17193 IEM_MC_REF_EFLAGS(pEFlags);
17194 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17195 IEM_MC_ADVANCE_RIP();
17196 IEM_MC_END();
17197 return VINF_SUCCESS;
17198 }
17199
17200 case IEMMODE_32BIT:
17201 {
17202 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17203 IEM_MC_BEGIN(3, 0);
17204 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17205 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17206 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17207 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17208 IEM_MC_REF_EFLAGS(pEFlags);
17209 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17210 /* No clearing the high dword here - test doesn't write back the result. */
17211 IEM_MC_ADVANCE_RIP();
17212 IEM_MC_END();
17213 return VINF_SUCCESS;
17214 }
17215
17216 case IEMMODE_64BIT:
17217 {
17218 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17219 IEM_MC_BEGIN(3, 0);
17220 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17221 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17222 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17223 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17224 IEM_MC_REF_EFLAGS(pEFlags);
17225 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17226 IEM_MC_ADVANCE_RIP();
17227 IEM_MC_END();
17228 return VINF_SUCCESS;
17229 }
17230
17231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17232 }
17233 }
17234 else
17235 {
17236 /* memory access. */
17237 switch (pVCpu->iem.s.enmEffOpSize)
17238 {
17239 case IEMMODE_16BIT:
17240 {
17241 IEM_MC_BEGIN(3, 2);
17242 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17243 IEM_MC_ARG(uint16_t, u16Src, 1);
17244 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17246
17247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17248 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17249 IEM_MC_ASSIGN(u16Src, u16Imm);
17250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17251 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17252 IEM_MC_FETCH_EFLAGS(EFlags);
17253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17254
17255 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17256 IEM_MC_COMMIT_EFLAGS(EFlags);
17257 IEM_MC_ADVANCE_RIP();
17258 IEM_MC_END();
17259 return VINF_SUCCESS;
17260 }
17261
17262 case IEMMODE_32BIT:
17263 {
17264 IEM_MC_BEGIN(3, 2);
17265 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17266 IEM_MC_ARG(uint32_t, u32Src, 1);
17267 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17269
17270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17271 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17272 IEM_MC_ASSIGN(u32Src, u32Imm);
17273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17274 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17275 IEM_MC_FETCH_EFLAGS(EFlags);
17276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17277
17278 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17279 IEM_MC_COMMIT_EFLAGS(EFlags);
17280 IEM_MC_ADVANCE_RIP();
17281 IEM_MC_END();
17282 return VINF_SUCCESS;
17283 }
17284
17285 case IEMMODE_64BIT:
17286 {
17287 IEM_MC_BEGIN(3, 2);
17288 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17289 IEM_MC_ARG(uint64_t, u64Src, 1);
17290 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17292
17293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17294 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17295 IEM_MC_ASSIGN(u64Src, u64Imm);
17296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17297 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17298 IEM_MC_FETCH_EFLAGS(EFlags);
17299 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17300
17301 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17302 IEM_MC_COMMIT_EFLAGS(EFlags);
17303 IEM_MC_ADVANCE_RIP();
17304 IEM_MC_END();
17305 return VINF_SUCCESS;
17306 }
17307
17308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17309 }
17310 }
17311}
17312
17313
17314/** Opcode 0xf6 /4, /5, /6 and /7. */
17315FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17316{
17317 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17318 {
17319 /* register access */
17320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17321 IEM_MC_BEGIN(3, 1);
17322 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17323 IEM_MC_ARG(uint8_t, u8Value, 1);
17324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17325 IEM_MC_LOCAL(int32_t, rc);
17326
17327 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17328 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17329 IEM_MC_REF_EFLAGS(pEFlags);
17330 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17331 IEM_MC_IF_LOCAL_IS_Z(rc) {
17332 IEM_MC_ADVANCE_RIP();
17333 } IEM_MC_ELSE() {
17334 IEM_MC_RAISE_DIVIDE_ERROR();
17335 } IEM_MC_ENDIF();
17336
17337 IEM_MC_END();
17338 }
17339 else
17340 {
17341 /* memory access. */
17342 IEM_MC_BEGIN(3, 2);
17343 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17344 IEM_MC_ARG(uint8_t, u8Value, 1);
17345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17347 IEM_MC_LOCAL(int32_t, rc);
17348
17349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17351 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17352 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17353 IEM_MC_REF_EFLAGS(pEFlags);
17354 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17355 IEM_MC_IF_LOCAL_IS_Z(rc) {
17356 IEM_MC_ADVANCE_RIP();
17357 } IEM_MC_ELSE() {
17358 IEM_MC_RAISE_DIVIDE_ERROR();
17359 } IEM_MC_ENDIF();
17360
17361 IEM_MC_END();
17362 }
17363 return VINF_SUCCESS;
17364}
17365
17366
17367/** Opcode 0xf7 /4, /5, /6 and /7. */
17368FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17369{
17370 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17371
17372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17373 {
17374 /* register access */
17375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17376 switch (pVCpu->iem.s.enmEffOpSize)
17377 {
17378 case IEMMODE_16BIT:
17379 {
17380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17381 IEM_MC_BEGIN(4, 1);
17382 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17383 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17384 IEM_MC_ARG(uint16_t, u16Value, 2);
17385 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17386 IEM_MC_LOCAL(int32_t, rc);
17387
17388 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17389 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17390 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17391 IEM_MC_REF_EFLAGS(pEFlags);
17392 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17393 IEM_MC_IF_LOCAL_IS_Z(rc) {
17394 IEM_MC_ADVANCE_RIP();
17395 } IEM_MC_ELSE() {
17396 IEM_MC_RAISE_DIVIDE_ERROR();
17397 } IEM_MC_ENDIF();
17398
17399 IEM_MC_END();
17400 return VINF_SUCCESS;
17401 }
17402
17403 case IEMMODE_32BIT:
17404 {
17405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17406 IEM_MC_BEGIN(4, 1);
17407 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17408 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17409 IEM_MC_ARG(uint32_t, u32Value, 2);
17410 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17411 IEM_MC_LOCAL(int32_t, rc);
17412
17413 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17414 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17415 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17416 IEM_MC_REF_EFLAGS(pEFlags);
17417 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17418 IEM_MC_IF_LOCAL_IS_Z(rc) {
17419 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17420 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17421 IEM_MC_ADVANCE_RIP();
17422 } IEM_MC_ELSE() {
17423 IEM_MC_RAISE_DIVIDE_ERROR();
17424 } IEM_MC_ENDIF();
17425
17426 IEM_MC_END();
17427 return VINF_SUCCESS;
17428 }
17429
17430 case IEMMODE_64BIT:
17431 {
17432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17433 IEM_MC_BEGIN(4, 1);
17434 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17435 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17436 IEM_MC_ARG(uint64_t, u64Value, 2);
17437 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17438 IEM_MC_LOCAL(int32_t, rc);
17439
17440 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17441 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17442 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17443 IEM_MC_REF_EFLAGS(pEFlags);
17444 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17445 IEM_MC_IF_LOCAL_IS_Z(rc) {
17446 IEM_MC_ADVANCE_RIP();
17447 } IEM_MC_ELSE() {
17448 IEM_MC_RAISE_DIVIDE_ERROR();
17449 } IEM_MC_ENDIF();
17450
17451 IEM_MC_END();
17452 return VINF_SUCCESS;
17453 }
17454
17455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17456 }
17457 }
17458 else
17459 {
17460 /* memory access. */
17461 switch (pVCpu->iem.s.enmEffOpSize)
17462 {
17463 case IEMMODE_16BIT:
17464 {
17465 IEM_MC_BEGIN(4, 2);
17466 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17467 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17468 IEM_MC_ARG(uint16_t, u16Value, 2);
17469 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17471 IEM_MC_LOCAL(int32_t, rc);
17472
17473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17475 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17476 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17477 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17478 IEM_MC_REF_EFLAGS(pEFlags);
17479 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17480 IEM_MC_IF_LOCAL_IS_Z(rc) {
17481 IEM_MC_ADVANCE_RIP();
17482 } IEM_MC_ELSE() {
17483 IEM_MC_RAISE_DIVIDE_ERROR();
17484 } IEM_MC_ENDIF();
17485
17486 IEM_MC_END();
17487 return VINF_SUCCESS;
17488 }
17489
17490 case IEMMODE_32BIT:
17491 {
17492 IEM_MC_BEGIN(4, 2);
17493 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17494 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17495 IEM_MC_ARG(uint32_t, u32Value, 2);
17496 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17498 IEM_MC_LOCAL(int32_t, rc);
17499
17500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17502 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17503 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17504 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17505 IEM_MC_REF_EFLAGS(pEFlags);
17506 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17507 IEM_MC_IF_LOCAL_IS_Z(rc) {
17508 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17509 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17510 IEM_MC_ADVANCE_RIP();
17511 } IEM_MC_ELSE() {
17512 IEM_MC_RAISE_DIVIDE_ERROR();
17513 } IEM_MC_ENDIF();
17514
17515 IEM_MC_END();
17516 return VINF_SUCCESS;
17517 }
17518
17519 case IEMMODE_64BIT:
17520 {
17521 IEM_MC_BEGIN(4, 2);
17522 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17523 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17524 IEM_MC_ARG(uint64_t, u64Value, 2);
17525 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17527 IEM_MC_LOCAL(int32_t, rc);
17528
17529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17531 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17532 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17533 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17534 IEM_MC_REF_EFLAGS(pEFlags);
17535 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17536 IEM_MC_IF_LOCAL_IS_Z(rc) {
17537 IEM_MC_ADVANCE_RIP();
17538 } IEM_MC_ELSE() {
17539 IEM_MC_RAISE_DIVIDE_ERROR();
17540 } IEM_MC_ENDIF();
17541
17542 IEM_MC_END();
17543 return VINF_SUCCESS;
17544 }
17545
17546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17547 }
17548 }
17549}
17550
17551/** Opcode 0xf6. */
17552FNIEMOP_DEF(iemOp_Grp3_Eb)
17553{
17554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17555 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17556 {
17557 case 0:
17558 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17559 case 1:
17560/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17561 return IEMOP_RAISE_INVALID_OPCODE();
17562 case 2:
17563 IEMOP_MNEMONIC(not_Eb, "not Eb");
17564 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17565 case 3:
17566 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
17567 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17568 case 4:
17569 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
17570 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17571 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17572 case 5:
17573 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
17574 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17575 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17576 case 6:
17577 IEMOP_MNEMONIC(div_Eb, "div Eb");
17578 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17579 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17580 case 7:
17581 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
17582 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17583 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17585 }
17586}
17587
17588
17589/** Opcode 0xf7. */
17590FNIEMOP_DEF(iemOp_Grp3_Ev)
17591{
17592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17593 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17594 {
17595 case 0:
17596 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17597 case 1:
17598/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17599 return IEMOP_RAISE_INVALID_OPCODE();
17600 case 2:
17601 IEMOP_MNEMONIC(not_Ev, "not Ev");
17602 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17603 case 3:
17604 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
17605 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17606 case 4:
17607 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
17608 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17609 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17610 case 5:
17611 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
17612 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17613 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17614 case 6:
17615 IEMOP_MNEMONIC(div_Ev, "div Ev");
17616 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17617 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17618 case 7:
17619 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
17620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17621 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17623 }
17624}
17625
17626
17627/** Opcode 0xf8. */
17628FNIEMOP_DEF(iemOp_clc)
17629{
17630 IEMOP_MNEMONIC(clc, "clc");
17631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17632 IEM_MC_BEGIN(0, 0);
17633 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17634 IEM_MC_ADVANCE_RIP();
17635 IEM_MC_END();
17636 return VINF_SUCCESS;
17637}
17638
17639
17640/** Opcode 0xf9. */
17641FNIEMOP_DEF(iemOp_stc)
17642{
17643 IEMOP_MNEMONIC(stc, "stc");
17644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17645 IEM_MC_BEGIN(0, 0);
17646 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17647 IEM_MC_ADVANCE_RIP();
17648 IEM_MC_END();
17649 return VINF_SUCCESS;
17650}
17651
17652
17653/** Opcode 0xfa. */
17654FNIEMOP_DEF(iemOp_cli)
17655{
17656 IEMOP_MNEMONIC(cli, "cli");
17657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17658 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17659}
17660
17661
17662FNIEMOP_DEF(iemOp_sti)
17663{
17664 IEMOP_MNEMONIC(sti, "sti");
17665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17666 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17667}
17668
17669
17670/** Opcode 0xfc. */
17671FNIEMOP_DEF(iemOp_cld)
17672{
17673 IEMOP_MNEMONIC(cld, "cld");
17674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17675 IEM_MC_BEGIN(0, 0);
17676 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17677 IEM_MC_ADVANCE_RIP();
17678 IEM_MC_END();
17679 return VINF_SUCCESS;
17680}
17681
17682
17683/** Opcode 0xfd. */
17684FNIEMOP_DEF(iemOp_std)
17685{
17686 IEMOP_MNEMONIC(std, "std");
17687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17688 IEM_MC_BEGIN(0, 0);
17689 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17690 IEM_MC_ADVANCE_RIP();
17691 IEM_MC_END();
17692 return VINF_SUCCESS;
17693}
17694
17695
17696/** Opcode 0xfe. */
17697FNIEMOP_DEF(iemOp_Grp4)
17698{
17699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17700 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17701 {
17702 case 0:
17703 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
17704 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17705 case 1:
17706 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
17707 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17708 default:
17709 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
17710 return IEMOP_RAISE_INVALID_OPCODE();
17711 }
17712}
17713
17714
17715/**
17716 * Opcode 0xff /2.
17717 * @param bRm The RM byte.
17718 */
17719FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17720{
17721 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
17722 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17723
17724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17725 {
17726 /* The new RIP is taken from a register. */
17727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17728 switch (pVCpu->iem.s.enmEffOpSize)
17729 {
17730 case IEMMODE_16BIT:
17731 IEM_MC_BEGIN(1, 0);
17732 IEM_MC_ARG(uint16_t, u16Target, 0);
17733 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17734 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17735 IEM_MC_END()
17736 return VINF_SUCCESS;
17737
17738 case IEMMODE_32BIT:
17739 IEM_MC_BEGIN(1, 0);
17740 IEM_MC_ARG(uint32_t, u32Target, 0);
17741 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17742 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17743 IEM_MC_END()
17744 return VINF_SUCCESS;
17745
17746 case IEMMODE_64BIT:
17747 IEM_MC_BEGIN(1, 0);
17748 IEM_MC_ARG(uint64_t, u64Target, 0);
17749 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17750 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17751 IEM_MC_END()
17752 return VINF_SUCCESS;
17753
17754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17755 }
17756 }
17757 else
17758 {
17759 /* The new RIP is taken from a register. */
17760 switch (pVCpu->iem.s.enmEffOpSize)
17761 {
17762 case IEMMODE_16BIT:
17763 IEM_MC_BEGIN(1, 1);
17764 IEM_MC_ARG(uint16_t, u16Target, 0);
17765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17768 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17769 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17770 IEM_MC_END()
17771 return VINF_SUCCESS;
17772
17773 case IEMMODE_32BIT:
17774 IEM_MC_BEGIN(1, 1);
17775 IEM_MC_ARG(uint32_t, u32Target, 0);
17776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17779 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17780 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17781 IEM_MC_END()
17782 return VINF_SUCCESS;
17783
17784 case IEMMODE_64BIT:
17785 IEM_MC_BEGIN(1, 1);
17786 IEM_MC_ARG(uint64_t, u64Target, 0);
17787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17790 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17791 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17792 IEM_MC_END()
17793 return VINF_SUCCESS;
17794
17795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17796 }
17797 }
17798}
17799
17800typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17801
17802FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17803{
17804 /* Registers? How?? */
17805 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
17806 { /* likely */ }
17807 else
17808 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17809
17810 /* Far pointer loaded from memory. */
17811 switch (pVCpu->iem.s.enmEffOpSize)
17812 {
17813 case IEMMODE_16BIT:
17814 IEM_MC_BEGIN(3, 1);
17815 IEM_MC_ARG(uint16_t, u16Sel, 0);
17816 IEM_MC_ARG(uint16_t, offSeg, 1);
17817 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17821 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17822 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
17823 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17824 IEM_MC_END();
17825 return VINF_SUCCESS;
17826
17827 case IEMMODE_64BIT:
17828 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17829 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17830 * and call far qword [rsp] encodings. */
17831 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
17832 {
17833 IEM_MC_BEGIN(3, 1);
17834 IEM_MC_ARG(uint16_t, u16Sel, 0);
17835 IEM_MC_ARG(uint64_t, offSeg, 1);
17836 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17840 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17841 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
17842 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17843 IEM_MC_END();
17844 return VINF_SUCCESS;
17845 }
17846 /* AMD falls thru. */
17847
17848 case IEMMODE_32BIT:
17849 IEM_MC_BEGIN(3, 1);
17850 IEM_MC_ARG(uint16_t, u16Sel, 0);
17851 IEM_MC_ARG(uint32_t, offSeg, 1);
17852 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17856 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17857 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
17858 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17859 IEM_MC_END();
17860 return VINF_SUCCESS;
17861
17862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17863 }
17864}
17865
17866
17867/**
17868 * Opcode 0xff /3.
17869 * @param bRm The RM byte.
17870 */
17871FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17872{
17873 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
17874 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17875}
17876
17877
17878/**
17879 * Opcode 0xff /4.
17880 * @param bRm The RM byte.
17881 */
17882FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17883{
17884 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
17885 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17886
17887 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17888 {
17889 /* The new RIP is taken from a register. */
17890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17891 switch (pVCpu->iem.s.enmEffOpSize)
17892 {
17893 case IEMMODE_16BIT:
17894 IEM_MC_BEGIN(0, 1);
17895 IEM_MC_LOCAL(uint16_t, u16Target);
17896 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17897 IEM_MC_SET_RIP_U16(u16Target);
17898 IEM_MC_END()
17899 return VINF_SUCCESS;
17900
17901 case IEMMODE_32BIT:
17902 IEM_MC_BEGIN(0, 1);
17903 IEM_MC_LOCAL(uint32_t, u32Target);
17904 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17905 IEM_MC_SET_RIP_U32(u32Target);
17906 IEM_MC_END()
17907 return VINF_SUCCESS;
17908
17909 case IEMMODE_64BIT:
17910 IEM_MC_BEGIN(0, 1);
17911 IEM_MC_LOCAL(uint64_t, u64Target);
17912 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17913 IEM_MC_SET_RIP_U64(u64Target);
17914 IEM_MC_END()
17915 return VINF_SUCCESS;
17916
17917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17918 }
17919 }
17920 else
17921 {
17922 /* The new RIP is taken from a memory location. */
17923 switch (pVCpu->iem.s.enmEffOpSize)
17924 {
17925 case IEMMODE_16BIT:
17926 IEM_MC_BEGIN(0, 2);
17927 IEM_MC_LOCAL(uint16_t, u16Target);
17928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17931 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17932 IEM_MC_SET_RIP_U16(u16Target);
17933 IEM_MC_END()
17934 return VINF_SUCCESS;
17935
17936 case IEMMODE_32BIT:
17937 IEM_MC_BEGIN(0, 2);
17938 IEM_MC_LOCAL(uint32_t, u32Target);
17939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17942 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17943 IEM_MC_SET_RIP_U32(u32Target);
17944 IEM_MC_END()
17945 return VINF_SUCCESS;
17946
17947 case IEMMODE_64BIT:
17948 IEM_MC_BEGIN(0, 2);
17949 IEM_MC_LOCAL(uint64_t, u64Target);
17950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17953 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17954 IEM_MC_SET_RIP_U64(u64Target);
17955 IEM_MC_END()
17956 return VINF_SUCCESS;
17957
17958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17959 }
17960 }
17961}
17962
17963
17964/**
17965 * Opcode 0xff /5.
17966 * @param bRm The RM byte.
17967 */
17968FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17969{
17970 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
17971 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17972}
17973
17974
17975/**
17976 * Opcode 0xff /6.
17977 * @param bRm The RM byte.
17978 */
17979FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17980{
17981 IEMOP_MNEMONIC(push_Ev, "push Ev");
17982
17983 /* Registers are handled by a common worker. */
17984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17985 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17986
17987 /* Memory we do here. */
17988 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17989 switch (pVCpu->iem.s.enmEffOpSize)
17990 {
17991 case IEMMODE_16BIT:
17992 IEM_MC_BEGIN(0, 2);
17993 IEM_MC_LOCAL(uint16_t, u16Src);
17994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17997 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17998 IEM_MC_PUSH_U16(u16Src);
17999 IEM_MC_ADVANCE_RIP();
18000 IEM_MC_END();
18001 return VINF_SUCCESS;
18002
18003 case IEMMODE_32BIT:
18004 IEM_MC_BEGIN(0, 2);
18005 IEM_MC_LOCAL(uint32_t, u32Src);
18006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18009 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18010 IEM_MC_PUSH_U32(u32Src);
18011 IEM_MC_ADVANCE_RIP();
18012 IEM_MC_END();
18013 return VINF_SUCCESS;
18014
18015 case IEMMODE_64BIT:
18016 IEM_MC_BEGIN(0, 2);
18017 IEM_MC_LOCAL(uint64_t, u64Src);
18018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18021 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18022 IEM_MC_PUSH_U64(u64Src);
18023 IEM_MC_ADVANCE_RIP();
18024 IEM_MC_END();
18025 return VINF_SUCCESS;
18026
18027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18028 }
18029}
18030
18031
18032/** Opcode 0xff. */
18033FNIEMOP_DEF(iemOp_Grp5)
18034{
18035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18036 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18037 {
18038 case 0:
18039 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18040 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18041 case 1:
18042 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18043 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18044 case 2:
18045 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18046 case 3:
18047 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18048 case 4:
18049 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18050 case 5:
18051 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18052 case 6:
18053 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18054 case 7:
18055 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18056 return IEMOP_RAISE_INVALID_OPCODE();
18057 }
18058 AssertFailedReturn(VERR_IEM_IPE_3);
18059}
18060
18061
18062
18063const PFNIEMOP g_apfnOneByteMap[256] =
18064{
18065 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18066 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18067 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18068 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18069 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18070 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18071 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18072 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18073 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18074 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18075 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18076 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18077 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18078 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18079 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18080 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18081 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18082 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18083 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18084 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18085 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18086 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18087 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18088 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18089 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18090 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18091 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18092 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18093 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18094 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18095 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18096 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18097 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18098 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18099 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18100 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18101 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18102 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18103 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18104 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18105 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18106 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18107 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18108 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18109 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18110 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18111 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18112 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18113 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18114 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18115 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18116 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18117 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18118 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18119 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18120 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18121 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18122 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18123 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18124 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18125 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18126 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18127 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18128 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18129};
18130
18131
18132/** @} */
18133
18134#ifdef _MSC_VER
18135# pragma warning(pop)
18136#endif
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette