VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65587

Last change on this file since 65587 was 65587, checked in by vboxsync, 8 years ago

IEM: Changed IEMMODE from enum to uint8_t to save strcuture space. Prepping for VEX, EVEX and SSE decoder changes.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 634.4 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65587 2017-02-02 12:35:27Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551
552/** @name ..... opcodes.
553 *
554 * @{
555 */
556
557/** @} */
558
559
560/** @name Two byte opcodes (first byte 0x0f).
561 *
562 * @{
563 */
564
565/** Opcode 0x0f 0x00 /0. */
566FNIEMOPRM_DEF(iemOp_Grp6_sldt)
567{
568 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
569 IEMOP_HLP_MIN_286();
570 IEMOP_HLP_NO_REAL_OR_V86_MODE();
571
572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
573 {
574 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
575 switch (pVCpu->iem.s.enmEffOpSize)
576 {
577 case IEMMODE_16BIT:
578 IEM_MC_BEGIN(0, 1);
579 IEM_MC_LOCAL(uint16_t, u16Ldtr);
580 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
581 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 break;
585
586 case IEMMODE_32BIT:
587 IEM_MC_BEGIN(0, 1);
588 IEM_MC_LOCAL(uint32_t, u32Ldtr);
589 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
590 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
591 IEM_MC_ADVANCE_RIP();
592 IEM_MC_END();
593 break;
594
595 case IEMMODE_64BIT:
596 IEM_MC_BEGIN(0, 1);
597 IEM_MC_LOCAL(uint64_t, u64Ldtr);
598 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
599 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
600 IEM_MC_ADVANCE_RIP();
601 IEM_MC_END();
602 break;
603
604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
605 }
606 }
607 else
608 {
609 IEM_MC_BEGIN(0, 2);
610 IEM_MC_LOCAL(uint16_t, u16Ldtr);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
613 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
614 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
615 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
616 IEM_MC_ADVANCE_RIP();
617 IEM_MC_END();
618 }
619 return VINF_SUCCESS;
620}
621
622
623/** Opcode 0x0f 0x00 /1. */
624FNIEMOPRM_DEF(iemOp_Grp6_str)
625{
626 IEMOP_MNEMONIC(str, "str Rv/Mw");
627 IEMOP_HLP_MIN_286();
628 IEMOP_HLP_NO_REAL_OR_V86_MODE();
629
630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
631 {
632 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
633 switch (pVCpu->iem.s.enmEffOpSize)
634 {
635 case IEMMODE_16BIT:
636 IEM_MC_BEGIN(0, 1);
637 IEM_MC_LOCAL(uint16_t, u16Tr);
638 IEM_MC_FETCH_TR_U16(u16Tr);
639 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
640 IEM_MC_ADVANCE_RIP();
641 IEM_MC_END();
642 break;
643
644 case IEMMODE_32BIT:
645 IEM_MC_BEGIN(0, 1);
646 IEM_MC_LOCAL(uint32_t, u32Tr);
647 IEM_MC_FETCH_TR_U32(u32Tr);
648 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
649 IEM_MC_ADVANCE_RIP();
650 IEM_MC_END();
651 break;
652
653 case IEMMODE_64BIT:
654 IEM_MC_BEGIN(0, 1);
655 IEM_MC_LOCAL(uint64_t, u64Tr);
656 IEM_MC_FETCH_TR_U64(u64Tr);
657 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
658 IEM_MC_ADVANCE_RIP();
659 IEM_MC_END();
660 break;
661
662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
663 }
664 }
665 else
666 {
667 IEM_MC_BEGIN(0, 2);
668 IEM_MC_LOCAL(uint16_t, u16Tr);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
671 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
672 IEM_MC_FETCH_TR_U16(u16Tr);
673 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 }
677 return VINF_SUCCESS;
678}
679
680
681/** Opcode 0x0f 0x00 /2. */
682FNIEMOPRM_DEF(iemOp_Grp6_lldt)
683{
684 IEMOP_MNEMONIC(lldt, "lldt Ew");
685 IEMOP_HLP_MIN_286();
686 IEMOP_HLP_NO_REAL_OR_V86_MODE();
687
688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
689 {
690 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
691 IEM_MC_BEGIN(1, 0);
692 IEM_MC_ARG(uint16_t, u16Sel, 0);
693 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
694 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
695 IEM_MC_END();
696 }
697 else
698 {
699 IEM_MC_BEGIN(1, 1);
700 IEM_MC_ARG(uint16_t, u16Sel, 0);
701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
703 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
704 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
705 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
706 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
707 IEM_MC_END();
708 }
709 return VINF_SUCCESS;
710}
711
712
713/** Opcode 0x0f 0x00 /3. */
714FNIEMOPRM_DEF(iemOp_Grp6_ltr)
715{
716 IEMOP_MNEMONIC(ltr, "ltr Ew");
717 IEMOP_HLP_MIN_286();
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
723 IEM_MC_BEGIN(1, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
726 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
727 IEM_MC_END();
728 }
729 else
730 {
731 IEM_MC_BEGIN(1, 1);
732 IEM_MC_ARG(uint16_t, u16Sel, 0);
733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
737 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
738 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
739 IEM_MC_END();
740 }
741 return VINF_SUCCESS;
742}
743
744
745/** Opcode 0x0f 0x00 /3. */
746FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
747{
748 IEMOP_HLP_MIN_286();
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750
751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
752 {
753 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
754 IEM_MC_BEGIN(2, 0);
755 IEM_MC_ARG(uint16_t, u16Sel, 0);
756 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
757 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
758 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
759 IEM_MC_END();
760 }
761 else
762 {
763 IEM_MC_BEGIN(2, 1);
764 IEM_MC_ARG(uint16_t, u16Sel, 0);
765 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
768 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
769 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
770 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
771 IEM_MC_END();
772 }
773 return VINF_SUCCESS;
774}
775
776
777/** Opcode 0x0f 0x00 /4. */
778FNIEMOPRM_DEF(iemOp_Grp6_verr)
779{
780 IEMOP_MNEMONIC(verr, "verr Ew");
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
783}
784
785
786/** Opcode 0x0f 0x00 /5. */
787FNIEMOPRM_DEF(iemOp_Grp6_verw)
788{
789 IEMOP_MNEMONIC(verw, "verw Ew");
790 IEMOP_HLP_MIN_286();
791 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
792}
793
794
795/**
796 * Group 6 jump table.
797 */
798IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
799{
800 iemOp_Grp6_sldt,
801 iemOp_Grp6_str,
802 iemOp_Grp6_lldt,
803 iemOp_Grp6_ltr,
804 iemOp_Grp6_verr,
805 iemOp_Grp6_verw,
806 iemOp_InvalidWithRM,
807 iemOp_InvalidWithRM
808};
809
810/** Opcode 0x0f 0x00. */
811FNIEMOP_DEF(iemOp_Grp6)
812{
813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
814 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
815}
816
817
818/** Opcode 0x0f 0x01 /0. */
819FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
820{
821 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
822 IEMOP_HLP_MIN_286();
823 IEMOP_HLP_64BIT_OP_SIZE();
824 IEM_MC_BEGIN(2, 1);
825 IEM_MC_ARG(uint8_t, iEffSeg, 0);
826 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
829 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
830 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
831 IEM_MC_END();
832 return VINF_SUCCESS;
833}
834
835
836/** Opcode 0x0f 0x01 /0. */
837FNIEMOP_DEF(iemOp_Grp7_vmcall)
838{
839 IEMOP_BITCH_ABOUT_STUB();
840 return IEMOP_RAISE_INVALID_OPCODE();
841}
842
843
844/** Opcode 0x0f 0x01 /0. */
845FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
846{
847 IEMOP_BITCH_ABOUT_STUB();
848 return IEMOP_RAISE_INVALID_OPCODE();
849}
850
851
852/** Opcode 0x0f 0x01 /0. */
853FNIEMOP_DEF(iemOp_Grp7_vmresume)
854{
855 IEMOP_BITCH_ABOUT_STUB();
856 return IEMOP_RAISE_INVALID_OPCODE();
857}
858
859
860/** Opcode 0x0f 0x01 /0. */
861FNIEMOP_DEF(iemOp_Grp7_vmxoff)
862{
863 IEMOP_BITCH_ABOUT_STUB();
864 return IEMOP_RAISE_INVALID_OPCODE();
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC(sidt, "sidt Ms");
872 IEMOP_HLP_MIN_286();
873 IEMOP_HLP_64BIT_OP_SIZE();
874 IEM_MC_BEGIN(2, 1);
875 IEM_MC_ARG(uint8_t, iEffSeg, 0);
876 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
880 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 /1. */
887FNIEMOP_DEF(iemOp_Grp7_monitor)
888{
889 IEMOP_MNEMONIC(monitor, "monitor");
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
891 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
892}
893
894
895/** Opcode 0x0f 0x01 /1. */
896FNIEMOP_DEF(iemOp_Grp7_mwait)
897{
898 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
901}
902
903
904/** Opcode 0x0f 0x01 /2. */
905FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
906{
907 IEMOP_MNEMONIC(lgdt, "lgdt");
908 IEMOP_HLP_64BIT_OP_SIZE();
909 IEM_MC_BEGIN(3, 1);
910 IEM_MC_ARG(uint8_t, iEffSeg, 0);
911 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
912 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
915 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
916 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
917 IEM_MC_END();
918 return VINF_SUCCESS;
919}
920
921
922/** Opcode 0x0f 0x01 0xd0. */
923FNIEMOP_DEF(iemOp_Grp7_xgetbv)
924{
925 IEMOP_MNEMONIC(xgetbv, "xgetbv");
926 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
927 {
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
929 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
930 }
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 0xd1. */
936FNIEMOP_DEF(iemOp_Grp7_xsetbv)
937{
938 IEMOP_MNEMONIC(xsetbv, "xsetbv");
939 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
940 {
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
942 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
943 }
944 return IEMOP_RAISE_INVALID_OPCODE();
945}
946
947
948/** Opcode 0x0f 0x01 /3. */
949FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
950{
951 IEMOP_MNEMONIC(lidt, "lidt");
952 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
953 ? IEMMODE_64BIT
954 : pVCpu->iem.s.enmEffOpSize;
955 IEM_MC_BEGIN(3, 1);
956 IEM_MC_ARG(uint8_t, iEffSeg, 0);
957 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
958 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
961 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
962 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x01 0xd8. */
969FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
970
971/** Opcode 0x0f 0x01 0xd9. */
972FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
973
974/** Opcode 0x0f 0x01 0xda. */
975FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
976
977/** Opcode 0x0f 0x01 0xdb. */
978FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
979
980/** Opcode 0x0f 0x01 0xdc. */
981FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
982
983/** Opcode 0x0f 0x01 0xdd. */
984FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
985
986/** Opcode 0x0f 0x01 0xde. */
987FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
988
989/** Opcode 0x0f 0x01 0xdf. */
990FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
991
992/** Opcode 0x0f 0x01 /4. */
993FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
994{
995 IEMOP_MNEMONIC(smsw, "smsw");
996 IEMOP_HLP_MIN_286();
997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
998 {
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 switch (pVCpu->iem.s.enmEffOpSize)
1001 {
1002 case IEMMODE_16BIT:
1003 IEM_MC_BEGIN(0, 1);
1004 IEM_MC_LOCAL(uint16_t, u16Tmp);
1005 IEM_MC_FETCH_CR0_U16(u16Tmp);
1006 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1007 { /* likely */ }
1008 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1009 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1010 else
1011 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1012 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1013 IEM_MC_ADVANCE_RIP();
1014 IEM_MC_END();
1015 return VINF_SUCCESS;
1016
1017 case IEMMODE_32BIT:
1018 IEM_MC_BEGIN(0, 1);
1019 IEM_MC_LOCAL(uint32_t, u32Tmp);
1020 IEM_MC_FETCH_CR0_U32(u32Tmp);
1021 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 return VINF_SUCCESS;
1025
1026 case IEMMODE_64BIT:
1027 IEM_MC_BEGIN(0, 1);
1028 IEM_MC_LOCAL(uint64_t, u64Tmp);
1029 IEM_MC_FETCH_CR0_U64(u64Tmp);
1030 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1031 IEM_MC_ADVANCE_RIP();
1032 IEM_MC_END();
1033 return VINF_SUCCESS;
1034
1035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1036 }
1037 }
1038 else
1039 {
1040 /* Ignore operand size here, memory refs are always 16-bit. */
1041 IEM_MC_BEGIN(0, 2);
1042 IEM_MC_LOCAL(uint16_t, u16Tmp);
1043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1046 IEM_MC_FETCH_CR0_U16(u16Tmp);
1047 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1048 { /* likely */ }
1049 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1050 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1051 else
1052 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1053 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1054 IEM_MC_ADVANCE_RIP();
1055 IEM_MC_END();
1056 return VINF_SUCCESS;
1057 }
1058}
1059
1060
1061/** Opcode 0x0f 0x01 /6. */
1062FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1063{
1064 /* The operand size is effectively ignored, all is 16-bit and only the
1065 lower 3-bits are used. */
1066 IEMOP_MNEMONIC(lmsw, "lmsw");
1067 IEMOP_HLP_MIN_286();
1068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1069 {
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_BEGIN(1, 0);
1072 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1073 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1074 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1075 IEM_MC_END();
1076 }
1077 else
1078 {
1079 IEM_MC_BEGIN(1, 1);
1080 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1084 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1085 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1086 IEM_MC_END();
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/** Opcode 0x0f 0x01 /7. */
1093FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1094{
1095 IEMOP_MNEMONIC(invlpg, "invlpg");
1096 IEMOP_HLP_MIN_486();
1097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1098 IEM_MC_BEGIN(1, 1);
1099 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1101 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1102 IEM_MC_END();
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/** Opcode 0x0f 0x01 /7. */
1108FNIEMOP_DEF(iemOp_Grp7_swapgs)
1109{
1110 IEMOP_MNEMONIC(swapgs, "swapgs");
1111 IEMOP_HLP_ONLY_64BIT();
1112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1113 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1114}
1115
1116
1117/** Opcode 0x0f 0x01 /7. */
1118FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1119{
1120 NOREF(pVCpu);
1121 IEMOP_BITCH_ABOUT_STUB();
1122 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1123}
1124
1125
1126/** Opcode 0x0f 0x01. */
1127FNIEMOP_DEF(iemOp_Grp7)
1128{
1129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1130 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1131 {
1132 case 0:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1138 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1139 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1140 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1141 }
1142 return IEMOP_RAISE_INVALID_OPCODE();
1143
1144 case 1:
1145 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1146 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1147 switch (bRm & X86_MODRM_RM_MASK)
1148 {
1149 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1150 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1151 }
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 2:
1155 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1156 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1157 switch (bRm & X86_MODRM_RM_MASK)
1158 {
1159 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1160 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1161 }
1162 return IEMOP_RAISE_INVALID_OPCODE();
1163
1164 case 3:
1165 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1166 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1167 switch (bRm & X86_MODRM_RM_MASK)
1168 {
1169 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1170 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1171 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1172 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1173 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1174 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1175 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1176 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1178 }
1179
1180 case 4:
1181 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1182
1183 case 5:
1184 return IEMOP_RAISE_INVALID_OPCODE();
1185
1186 case 6:
1187 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1188
1189 case 7:
1190 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1191 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1192 switch (bRm & X86_MODRM_RM_MASK)
1193 {
1194 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1195 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1196 }
1197 return IEMOP_RAISE_INVALID_OPCODE();
1198
1199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1200 }
1201}
1202
1203/** Opcode 0x0f 0x00 /3. */
1204FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1205{
1206 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1208
1209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1210 {
1211 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1212 switch (pVCpu->iem.s.enmEffOpSize)
1213 {
1214 case IEMMODE_16BIT:
1215 {
1216 IEM_MC_BEGIN(3, 0);
1217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1218 IEM_MC_ARG(uint16_t, u16Sel, 1);
1219 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1220
1221 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1222 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1223 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1224
1225 IEM_MC_END();
1226 return VINF_SUCCESS;
1227 }
1228
1229 case IEMMODE_32BIT:
1230 case IEMMODE_64BIT:
1231 {
1232 IEM_MC_BEGIN(3, 0);
1233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1234 IEM_MC_ARG(uint16_t, u16Sel, 1);
1235 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1236
1237 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1238 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1239 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1246 }
1247 }
1248 else
1249 {
1250 switch (pVCpu->iem.s.enmEffOpSize)
1251 {
1252 case IEMMODE_16BIT:
1253 {
1254 IEM_MC_BEGIN(3, 1);
1255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1256 IEM_MC_ARG(uint16_t, u16Sel, 1);
1257 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259
1260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1261 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1262
1263 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1264 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1265 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1266
1267 IEM_MC_END();
1268 return VINF_SUCCESS;
1269 }
1270
1271 case IEMMODE_32BIT:
1272 case IEMMODE_64BIT:
1273 {
1274 IEM_MC_BEGIN(3, 1);
1275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1276 IEM_MC_ARG(uint16_t, u16Sel, 1);
1277 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279
1280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1281 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1282/** @todo testcase: make sure it's a 16-bit read. */
1283
1284 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1286 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1287
1288 IEM_MC_END();
1289 return VINF_SUCCESS;
1290 }
1291
1292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1293 }
1294 }
1295}
1296
1297
1298
1299/** Opcode 0x0f 0x02. */
1300FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1301{
1302 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1303 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1304}
1305
1306
1307/** Opcode 0x0f 0x03. */
1308FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1309{
1310 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1311 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1312}
1313
1314
1315/** Opcode 0x0f 0x05. */
1316FNIEMOP_DEF(iemOp_syscall)
1317{
1318 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1321}
1322
1323
1324/** Opcode 0x0f 0x06. */
1325FNIEMOP_DEF(iemOp_clts)
1326{
1327 IEMOP_MNEMONIC(clts, "clts");
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1330}
1331
1332
1333/** Opcode 0x0f 0x07. */
1334FNIEMOP_DEF(iemOp_sysret)
1335{
1336 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1339}
1340
1341
1342/** Opcode 0x0f 0x08. */
1343FNIEMOP_STUB(iemOp_invd);
1344// IEMOP_HLP_MIN_486();
1345
1346
1347/** Opcode 0x0f 0x09. */
1348FNIEMOP_DEF(iemOp_wbinvd)
1349{
1350 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1351 IEMOP_HLP_MIN_486();
1352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1353 IEM_MC_BEGIN(0, 0);
1354 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1355 IEM_MC_ADVANCE_RIP();
1356 IEM_MC_END();
1357 return VINF_SUCCESS; /* ignore for now */
1358}
1359
1360
1361/** Opcode 0x0f 0x0b. */
1362FNIEMOP_DEF(iemOp_ud2)
1363{
1364 IEMOP_MNEMONIC(ud2, "ud2");
1365 return IEMOP_RAISE_INVALID_OPCODE();
1366}
1367
1368/** Opcode 0x0f 0x0d. */
1369FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1370{
1371 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1372 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1373 {
1374 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1375 return IEMOP_RAISE_INVALID_OPCODE();
1376 }
1377
1378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1380 {
1381 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383 }
1384
1385 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1386 {
1387 case 2: /* Aliased to /0 for the time being. */
1388 case 4: /* Aliased to /0 for the time being. */
1389 case 5: /* Aliased to /0 for the time being. */
1390 case 6: /* Aliased to /0 for the time being. */
1391 case 7: /* Aliased to /0 for the time being. */
1392 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1393 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1394 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1396 }
1397
1398 IEM_MC_BEGIN(0, 1);
1399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1402 /* Currently a NOP. */
1403 NOREF(GCPtrEffSrc);
1404 IEM_MC_ADVANCE_RIP();
1405 IEM_MC_END();
1406 return VINF_SUCCESS;
1407}
1408
1409
1410/** Opcode 0x0f 0x0e. */
1411FNIEMOP_STUB(iemOp_femms);
1412
1413
1414/** Opcode 0x0f 0x0f 0x0c. */
1415FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1416
1417/** Opcode 0x0f 0x0f 0x0d. */
1418FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1419
1420/** Opcode 0x0f 0x0f 0x1c. */
1421FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1422
1423/** Opcode 0x0f 0x0f 0x1d. */
1424FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1425
1426/** Opcode 0x0f 0x0f 0x8a. */
1427FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1428
1429/** Opcode 0x0f 0x0f 0x8e. */
1430FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1431
1432/** Opcode 0x0f 0x0f 0x90. */
1433FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1434
1435/** Opcode 0x0f 0x0f 0x94. */
1436FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1437
1438/** Opcode 0x0f 0x0f 0x96. */
1439FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1440
1441/** Opcode 0x0f 0x0f 0x97. */
1442FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1443
1444/** Opcode 0x0f 0x0f 0x9a. */
1445FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1446
1447/** Opcode 0x0f 0x0f 0x9e. */
1448FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1449
1450/** Opcode 0x0f 0x0f 0xa0. */
1451FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1452
1453/** Opcode 0x0f 0x0f 0xa4. */
1454FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1455
1456/** Opcode 0x0f 0x0f 0xa6. */
1457FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1458
1459/** Opcode 0x0f 0x0f 0xa7. */
1460FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1461
1462/** Opcode 0x0f 0x0f 0xaa. */
1463FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1464
1465/** Opcode 0x0f 0x0f 0xae. */
1466FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1467
1468/** Opcode 0x0f 0x0f 0xb0. */
1469FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1470
1471/** Opcode 0x0f 0x0f 0xb4. */
1472FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1473
1474/** Opcode 0x0f 0x0f 0xb6. */
1475FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1476
1477/** Opcode 0x0f 0x0f 0xb7. */
1478FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1479
1480/** Opcode 0x0f 0x0f 0xbb. */
1481FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1482
1483/** Opcode 0x0f 0x0f 0xbf. */
1484FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1485
1486
1487/** Opcode 0x0f 0x0f. */
1488FNIEMOP_DEF(iemOp_3Dnow)
1489{
1490 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1491 {
1492 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1493 return IEMOP_RAISE_INVALID_OPCODE();
1494 }
1495
1496 /* This is pretty sparse, use switch instead of table. */
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 switch (b)
1499 {
1500 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1501 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1502 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1503 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1504 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1505 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1506 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1507 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1508 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1509 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1510 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1511 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1512 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1513 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1514 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1515 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1516 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1517 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1518 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1519 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1520 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1521 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1522 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1523 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1524 default:
1525 return IEMOP_RAISE_INVALID_OPCODE();
1526 }
1527}
1528
1529
1530/** Opcode 0x0f 0x10. */
1531FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1532
1533
1534/** Opcode 0x0f 0x11. */
1535FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1536{
1537 /* Quick hack. Need to restructure all of this later some time. */
1538 uint32_t const fRelevantPrefix = pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ);
1539 if (fRelevantPrefix == 0)
1540 {
1541 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1544 {
1545 /*
1546 * Register, register.
1547 */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_BEGIN(0, 0);
1550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1552 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1553 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 }
1557 else
1558 {
1559 /*
1560 * Memory, register.
1561 */
1562 IEM_MC_BEGIN(0, 2);
1563 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1565
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1568 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1570
1571 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1572 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1573
1574 IEM_MC_ADVANCE_RIP();
1575 IEM_MC_END();
1576 }
1577 }
1578 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ)
1579 {
1580 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1583 {
1584 /*
1585 * Register, register.
1586 */
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1588 IEM_MC_BEGIN(0, 1);
1589 IEM_MC_LOCAL(uint64_t, uSrc);
1590
1591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1593 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1594 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1595
1596 IEM_MC_ADVANCE_RIP();
1597 IEM_MC_END();
1598 }
1599 else
1600 {
1601 /*
1602 * Memory, register.
1603 */
1604 IEM_MC_BEGIN(0, 2);
1605 IEM_MC_LOCAL(uint64_t, uSrc);
1606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1607
1608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1610 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1611 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1612
1613 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 }
1620 else
1621 {
1622 IEMOP_BITCH_ABOUT_STUB();
1623 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1624 }
1625 return VINF_SUCCESS;
1626}
1627
1628
1629/** Opcode 0x0f 0x12. */
1630FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1631
1632
1633/** Opcode 0x0f 0x13. */
1634FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1635{
1636 /* Quick hack. Need to restructure all of this later some time. */
1637 if (pVCpu->iem.s.fPrefixes == IEM_OP_PRF_SIZE_OP)
1638 {
1639 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1642 {
1643#if 0
1644 /*
1645 * Register, register.
1646 */
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1648 IEM_MC_BEGIN(0, 1);
1649 IEM_MC_LOCAL(uint64_t, uSrc);
1650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1652 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1653 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1654 IEM_MC_ADVANCE_RIP();
1655 IEM_MC_END();
1656#else
1657 return IEMOP_RAISE_INVALID_OPCODE();
1658#endif
1659 }
1660 else
1661 {
1662 /*
1663 * Memory, register.
1664 */
1665 IEM_MC_BEGIN(0, 2);
1666 IEM_MC_LOCAL(uint64_t, uSrc);
1667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1668
1669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1670 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1671 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1673
1674 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1675 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1676
1677 IEM_MC_ADVANCE_RIP();
1678 IEM_MC_END();
1679 }
1680 return VINF_SUCCESS;
1681 }
1682
1683 IEMOP_BITCH_ABOUT_STUB();
1684 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1685}
1686
1687
1688/** Opcode 0x0f 0x14. */
1689FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1690/** Opcode 0x0f 0x15. */
1691FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1692/** Opcode 0x0f 0x16. */
1693FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1694/** Opcode 0x0f 0x17. */
1695FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1696
1697
1698/** Opcode 0x0f 0x18. */
1699FNIEMOP_DEF(iemOp_prefetch_Grp16)
1700{
1701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1702 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1703 {
1704 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1705 {
1706 case 4: /* Aliased to /0 for the time being according to AMD. */
1707 case 5: /* Aliased to /0 for the time being according to AMD. */
1708 case 6: /* Aliased to /0 for the time being according to AMD. */
1709 case 7: /* Aliased to /0 for the time being according to AMD. */
1710 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1711 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1712 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1713 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1715 }
1716
1717 IEM_MC_BEGIN(0, 1);
1718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1721 /* Currently a NOP. */
1722 NOREF(GCPtrEffSrc);
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 return VINF_SUCCESS;
1726 }
1727
1728 return IEMOP_RAISE_INVALID_OPCODE();
1729}
1730
1731
1732/** Opcode 0x0f 0x19..0x1f. */
1733FNIEMOP_DEF(iemOp_nop_Ev)
1734{
1735 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1738 {
1739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740 IEM_MC_BEGIN(0, 0);
1741 IEM_MC_ADVANCE_RIP();
1742 IEM_MC_END();
1743 }
1744 else
1745 {
1746 IEM_MC_BEGIN(0, 1);
1747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 /* Currently a NOP. */
1751 NOREF(GCPtrEffSrc);
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756}
1757
1758
1759/** Opcode 0x0f 0x20. */
1760FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1761{
1762 /* mod is ignored, as is operand size overrides. */
1763 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1764 IEMOP_HLP_MIN_386();
1765 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1766 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1767 else
1768 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1769
1770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1771 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1773 {
1774 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1776 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1777 iCrReg |= 8;
1778 }
1779 switch (iCrReg)
1780 {
1781 case 0: case 2: case 3: case 4: case 8:
1782 break;
1783 default:
1784 return IEMOP_RAISE_INVALID_OPCODE();
1785 }
1786 IEMOP_HLP_DONE_DECODING();
1787
1788 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1789}
1790
1791
1792/** Opcode 0x0f 0x21. */
1793FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1794{
1795 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1796 IEMOP_HLP_MIN_386();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1800 return IEMOP_RAISE_INVALID_OPCODE();
1801 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1802 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1803 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1804}
1805
1806
1807/** Opcode 0x0f 0x22. */
1808FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1809{
1810 /* mod is ignored, as is operand size overrides. */
1811 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1812 IEMOP_HLP_MIN_386();
1813 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1814 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1815 else
1816 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1817
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1821 {
1822 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1823 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1824 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1825 iCrReg |= 8;
1826 }
1827 switch (iCrReg)
1828 {
1829 case 0: case 2: case 3: case 4: case 8:
1830 break;
1831 default:
1832 return IEMOP_RAISE_INVALID_OPCODE();
1833 }
1834 IEMOP_HLP_DONE_DECODING();
1835
1836 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1837}
1838
1839
1840/** Opcode 0x0f 0x23. */
1841FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1842{
1843 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1844 IEMOP_HLP_MIN_386();
1845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1848 return IEMOP_RAISE_INVALID_OPCODE();
1849 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1850 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1851 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1852}
1853
1854
1855/** Opcode 0x0f 0x24. */
1856FNIEMOP_DEF(iemOp_mov_Rd_Td)
1857{
1858 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1859 /** @todo works on 386 and 486. */
1860 /* The RM byte is not considered, see testcase. */
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862}
1863
1864
1865/** Opcode 0x0f 0x26. */
1866FNIEMOP_DEF(iemOp_mov_Td_Rd)
1867{
1868 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1869 /** @todo works on 386 and 486. */
1870 /* The RM byte is not considered, see testcase. */
1871 return IEMOP_RAISE_INVALID_OPCODE();
1872}
1873
1874
1875/** Opcode 0x0f 0x28. */
1876FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1877{
1878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1879 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1880 else
1881 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1884 {
1885 /*
1886 * Register, register.
1887 */
1888 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1889 IEM_MC_BEGIN(0, 0);
1890 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1892 else
1893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1895 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1896 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1897 IEM_MC_ADVANCE_RIP();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 /*
1903 * Register, memory.
1904 */
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1911 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1912 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1913 else
1914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1916
1917 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1918 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1919
1920 IEM_MC_ADVANCE_RIP();
1921 IEM_MC_END();
1922 }
1923 return VINF_SUCCESS;
1924}
1925
1926
1927/** Opcode 0x0f 0x29. */
1928FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1929{
1930 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1931 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1932 else
1933 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1936 {
1937 /*
1938 * Register, register.
1939 */
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1941 IEM_MC_BEGIN(0, 0);
1942 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1944 else
1945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1946 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1947 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1948 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1949 IEM_MC_ADVANCE_RIP();
1950 IEM_MC_END();
1951 }
1952 else
1953 {
1954 /*
1955 * Memory, register.
1956 */
1957 IEM_MC_BEGIN(0, 2);
1958 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1960
1961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1964 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1965 else
1966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1968
1969 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1970 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1971
1972 IEM_MC_ADVANCE_RIP();
1973 IEM_MC_END();
1974 }
1975 return VINF_SUCCESS;
1976}
1977
1978
1979/** Opcode 0x0f 0x2a. */
1980FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1981
1982
1983/** Opcode 0x0f 0x2b. */
1984FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1985{
1986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1987 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1988 else
1989 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1991 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1992 {
1993 /*
1994 * memory, register.
1995 */
1996 IEM_MC_BEGIN(0, 2);
1997 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1999
2000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2002 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2003 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2004 else
2005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2007
2008 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 /* The register, register encoding is invalid. */
2015 else
2016 return IEMOP_RAISE_INVALID_OPCODE();
2017 return VINF_SUCCESS;
2018}
2019
2020
2021/** Opcode 0x0f 0x2c. */
2022FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
2023/** Opcode 0x0f 0x2d. */
2024FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
2025/** Opcode 0x0f 0x2e. */
2026FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
2027/** Opcode 0x0f 0x2f. */
2028FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
2029
2030
2031/** Opcode 0x0f 0x30. */
2032FNIEMOP_DEF(iemOp_wrmsr)
2033{
2034 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2037}
2038
2039
2040/** Opcode 0x0f 0x31. */
2041FNIEMOP_DEF(iemOp_rdtsc)
2042{
2043 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2046}
2047
2048
2049/** Opcode 0x0f 0x33. */
2050FNIEMOP_DEF(iemOp_rdmsr)
2051{
2052 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2055}
2056
2057
2058/** Opcode 0x0f 0x34. */
2059FNIEMOP_STUB(iemOp_rdpmc);
2060/** Opcode 0x0f 0x34. */
2061FNIEMOP_STUB(iemOp_sysenter);
2062/** Opcode 0x0f 0x35. */
2063FNIEMOP_STUB(iemOp_sysexit);
2064/** Opcode 0x0f 0x37. */
2065FNIEMOP_STUB(iemOp_getsec);
2066/** Opcode 0x0f 0x38. */
2067FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2068/** Opcode 0x0f 0x3a. */
2069FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2070
2071
2072/**
2073 * Implements a conditional move.
2074 *
2075 * Wish there was an obvious way to do this where we could share and reduce
2076 * code bloat.
2077 *
2078 * @param a_Cnd The conditional "microcode" operation.
2079 */
2080#define CMOV_X(a_Cnd) \
2081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2083 { \
2084 switch (pVCpu->iem.s.enmEffOpSize) \
2085 { \
2086 case IEMMODE_16BIT: \
2087 IEM_MC_BEGIN(0, 1); \
2088 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2089 a_Cnd { \
2090 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2091 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2092 } IEM_MC_ENDIF(); \
2093 IEM_MC_ADVANCE_RIP(); \
2094 IEM_MC_END(); \
2095 return VINF_SUCCESS; \
2096 \
2097 case IEMMODE_32BIT: \
2098 IEM_MC_BEGIN(0, 1); \
2099 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2100 a_Cnd { \
2101 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2102 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2103 } IEM_MC_ELSE() { \
2104 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2105 } IEM_MC_ENDIF(); \
2106 IEM_MC_ADVANCE_RIP(); \
2107 IEM_MC_END(); \
2108 return VINF_SUCCESS; \
2109 \
2110 case IEMMODE_64BIT: \
2111 IEM_MC_BEGIN(0, 1); \
2112 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2113 a_Cnd { \
2114 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2115 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2116 } IEM_MC_ENDIF(); \
2117 IEM_MC_ADVANCE_RIP(); \
2118 IEM_MC_END(); \
2119 return VINF_SUCCESS; \
2120 \
2121 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2122 } \
2123 } \
2124 else \
2125 { \
2126 switch (pVCpu->iem.s.enmEffOpSize) \
2127 { \
2128 case IEMMODE_16BIT: \
2129 IEM_MC_BEGIN(0, 2); \
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2131 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2133 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2134 a_Cnd { \
2135 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2136 } IEM_MC_ENDIF(); \
2137 IEM_MC_ADVANCE_RIP(); \
2138 IEM_MC_END(); \
2139 return VINF_SUCCESS; \
2140 \
2141 case IEMMODE_32BIT: \
2142 IEM_MC_BEGIN(0, 2); \
2143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2144 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2146 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2147 a_Cnd { \
2148 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2149 } IEM_MC_ELSE() { \
2150 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2151 } IEM_MC_ENDIF(); \
2152 IEM_MC_ADVANCE_RIP(); \
2153 IEM_MC_END(); \
2154 return VINF_SUCCESS; \
2155 \
2156 case IEMMODE_64BIT: \
2157 IEM_MC_BEGIN(0, 2); \
2158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2159 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2161 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2162 a_Cnd { \
2163 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2164 } IEM_MC_ENDIF(); \
2165 IEM_MC_ADVANCE_RIP(); \
2166 IEM_MC_END(); \
2167 return VINF_SUCCESS; \
2168 \
2169 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2170 } \
2171 } do {} while (0)
2172
2173
2174
2175/** Opcode 0x0f 0x40. */
2176FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2177{
2178 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2179 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2180}
2181
2182
2183/** Opcode 0x0f 0x41. */
2184FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2185{
2186 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2187 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2188}
2189
2190
2191/** Opcode 0x0f 0x42. */
2192FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2193{
2194 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2195 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2196}
2197
2198
2199/** Opcode 0x0f 0x43. */
2200FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2201{
2202 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2203 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2204}
2205
2206
2207/** Opcode 0x0f 0x44. */
2208FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2209{
2210 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2211 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2212}
2213
2214
2215/** Opcode 0x0f 0x45. */
2216FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2217{
2218 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2219 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2220}
2221
2222
2223/** Opcode 0x0f 0x46. */
2224FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2225{
2226 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2227 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2228}
2229
2230
2231/** Opcode 0x0f 0x47. */
2232FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2233{
2234 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2235 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2236}
2237
2238
2239/** Opcode 0x0f 0x48. */
2240FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2241{
2242 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2243 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2244}
2245
2246
2247/** Opcode 0x0f 0x49. */
2248FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2249{
2250 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2251 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2252}
2253
2254
2255/** Opcode 0x0f 0x4a. */
2256FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2257{
2258 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2259 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2260}
2261
2262
2263/** Opcode 0x0f 0x4b. */
2264FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2265{
2266 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2267 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2268}
2269
2270
2271/** Opcode 0x0f 0x4c. */
2272FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2273{
2274 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2275 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2276}
2277
2278
2279/** Opcode 0x0f 0x4d. */
2280FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2281{
2282 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2283 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2284}
2285
2286
2287/** Opcode 0x0f 0x4e. */
2288FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2289{
2290 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2291 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2292}
2293
2294
2295/** Opcode 0x0f 0x4f. */
2296FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2297{
2298 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2299 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2300}
2301
2302#undef CMOV_X
2303
2304/** Opcode 0x0f 0x50. */
2305FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2306/** Opcode 0x0f 0x51. */
2307FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2308/** Opcode 0x0f 0x52. */
2309FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2310/** Opcode 0x0f 0x53. */
2311FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2312/** Opcode 0x0f 0x54. */
2313FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2314/** Opcode 0x0f 0x55. */
2315FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2316/** Opcode 0x0f 0x56. */
2317FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2318/** Opcode 0x0f 0x57. */
2319FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2320/** Opcode 0x0f 0x58. */
2321FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2322/** Opcode 0x0f 0x59. */
2323FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2324/** Opcode 0x0f 0x5a. */
2325FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2326/** Opcode 0x0f 0x5b. */
2327FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2328/** Opcode 0x0f 0x5c. */
2329FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2330/** Opcode 0x0f 0x5d. */
2331FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2332/** Opcode 0x0f 0x5e. */
2333FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2334/** Opcode 0x0f 0x5f. */
2335FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2336
2337
2338/**
2339 * Common worker for SSE2 and MMX instructions on the forms:
2340 * pxxxx xmm1, xmm2/mem128
2341 * pxxxx mm1, mm2/mem32
2342 *
2343 * The 2nd operand is the first half of a register, which in the memory case
2344 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2345 * memory accessed for MMX.
2346 *
2347 * Exceptions type 4.
2348 */
2349FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2350{
2351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2352 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2353 {
2354 case IEM_OP_PRF_SIZE_OP: /* SSE */
2355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2356 {
2357 /*
2358 * Register, register.
2359 */
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_BEGIN(2, 0);
2362 IEM_MC_ARG(uint128_t *, pDst, 0);
2363 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_PREPARE_SSE_USAGE();
2366 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2367 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2368 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 else
2373 {
2374 /*
2375 * Register, memory.
2376 */
2377 IEM_MC_BEGIN(2, 2);
2378 IEM_MC_ARG(uint128_t *, pDst, 0);
2379 IEM_MC_LOCAL(uint64_t, uSrc);
2380 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2386 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2387
2388 IEM_MC_PREPARE_SSE_USAGE();
2389 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2390 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2391
2392 IEM_MC_ADVANCE_RIP();
2393 IEM_MC_END();
2394 }
2395 return VINF_SUCCESS;
2396
2397 case 0: /* MMX */
2398 if (!pImpl->pfnU64)
2399 return IEMOP_RAISE_INVALID_OPCODE();
2400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2401 {
2402 /*
2403 * Register, register.
2404 */
2405 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2406 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2408 IEM_MC_BEGIN(2, 0);
2409 IEM_MC_ARG(uint64_t *, pDst, 0);
2410 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2411 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2412 IEM_MC_PREPARE_FPU_USAGE();
2413 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2414 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2415 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2416 IEM_MC_ADVANCE_RIP();
2417 IEM_MC_END();
2418 }
2419 else
2420 {
2421 /*
2422 * Register, memory.
2423 */
2424 IEM_MC_BEGIN(2, 2);
2425 IEM_MC_ARG(uint64_t *, pDst, 0);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2429
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2432 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2433 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2434
2435 IEM_MC_PREPARE_FPU_USAGE();
2436 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2437 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2438
2439 IEM_MC_ADVANCE_RIP();
2440 IEM_MC_END();
2441 }
2442 return VINF_SUCCESS;
2443
2444 default:
2445 return IEMOP_RAISE_INVALID_OPCODE();
2446 }
2447}
2448
2449
2450/** Opcode 0x0f 0x60. */
2451FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2452{
2453 IEMOP_MNEMONIC(punpcklbw, "punpcklbw");
2454 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2455}
2456
2457
2458/** Opcode 0x0f 0x61. */
2459FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2460{
2461 IEMOP_MNEMONIC(punpcklwd, "punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2462 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2463}
2464
2465
2466/** Opcode 0x0f 0x62. */
2467FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2468{
2469 IEMOP_MNEMONIC(punpckldq, "punpckldq");
2470 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2471}
2472
2473
2474/** Opcode 0x0f 0x63. */
2475FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2476/** Opcode 0x0f 0x64. */
2477FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2478/** Opcode 0x0f 0x65. */
2479FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2480/** Opcode 0x0f 0x66. */
2481FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2482/** Opcode 0x0f 0x67. */
2483FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2484
2485
2486/**
2487 * Common worker for SSE2 and MMX instructions on the forms:
2488 * pxxxx xmm1, xmm2/mem128
2489 * pxxxx mm1, mm2/mem64
2490 *
2491 * The 2nd operand is the second half of a register, which in the memory case
2492 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2493 * where it may read the full 128 bits or only the upper 64 bits.
2494 *
2495 * Exceptions type 4.
2496 */
2497FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2498{
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2501 {
2502 case IEM_OP_PRF_SIZE_OP: /* SSE */
2503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2504 {
2505 /*
2506 * Register, register.
2507 */
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_BEGIN(2, 0);
2510 IEM_MC_ARG(uint128_t *, pDst, 0);
2511 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2512 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2513 IEM_MC_PREPARE_SSE_USAGE();
2514 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2515 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2516 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2517 IEM_MC_ADVANCE_RIP();
2518 IEM_MC_END();
2519 }
2520 else
2521 {
2522 /*
2523 * Register, memory.
2524 */
2525 IEM_MC_BEGIN(2, 2);
2526 IEM_MC_ARG(uint128_t *, pDst, 0);
2527 IEM_MC_LOCAL(uint128_t, uSrc);
2528 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2535
2536 IEM_MC_PREPARE_SSE_USAGE();
2537 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2538 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2539
2540 IEM_MC_ADVANCE_RIP();
2541 IEM_MC_END();
2542 }
2543 return VINF_SUCCESS;
2544
2545 case 0: /* MMX */
2546 if (!pImpl->pfnU64)
2547 return IEMOP_RAISE_INVALID_OPCODE();
2548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2549 {
2550 /*
2551 * Register, register.
2552 */
2553 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2554 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2556 IEM_MC_BEGIN(2, 0);
2557 IEM_MC_ARG(uint64_t *, pDst, 0);
2558 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2560 IEM_MC_PREPARE_FPU_USAGE();
2561 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2562 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2563 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2564 IEM_MC_ADVANCE_RIP();
2565 IEM_MC_END();
2566 }
2567 else
2568 {
2569 /*
2570 * Register, memory.
2571 */
2572 IEM_MC_BEGIN(2, 2);
2573 IEM_MC_ARG(uint64_t *, pDst, 0);
2574 IEM_MC_LOCAL(uint64_t, uSrc);
2575 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2577
2578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2582
2583 IEM_MC_PREPARE_FPU_USAGE();
2584 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2585 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2586
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 return VINF_SUCCESS;
2591
2592 default:
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594 }
2595}
2596
2597
2598/** Opcode 0x0f 0x68. */
2599FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2600{
2601 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2602 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2603}
2604
2605
2606/** Opcode 0x0f 0x69. */
2607FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2608{
2609 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2610 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2611}
2612
2613
2614/** Opcode 0x0f 0x6a. */
2615FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2616{
2617 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2618 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2619}
2620
2621/** Opcode 0x0f 0x6b. */
2622FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2623
2624
2625/** Opcode 0x0f 0x6c. */
2626FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2627{
2628 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq");
2629 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2630}
2631
2632
2633/** Opcode 0x0f 0x6d. */
2634FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2635{
2636 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2637 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2638}
2639
2640
2641/** Opcode 0x0f 0x6e. */
2642FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2646 {
2647 case IEM_OP_PRF_SIZE_OP: /* SSE */
2648 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2649 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2650 else
2651 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2653 {
2654 /* XMM, greg*/
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_BEGIN(0, 1);
2657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2660 {
2661 IEM_MC_LOCAL(uint64_t, u64Tmp);
2662 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2663 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2664 }
2665 else
2666 {
2667 IEM_MC_LOCAL(uint32_t, u32Tmp);
2668 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2669 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2670 }
2671 IEM_MC_ADVANCE_RIP();
2672 IEM_MC_END();
2673 }
2674 else
2675 {
2676 /* XMM, [mem] */
2677 IEM_MC_BEGIN(0, 2);
2678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2679 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2683 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2684 {
2685 IEM_MC_LOCAL(uint64_t, u64Tmp);
2686 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2687 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2688 }
2689 else
2690 {
2691 IEM_MC_LOCAL(uint32_t, u32Tmp);
2692 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2693 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2694 }
2695 IEM_MC_ADVANCE_RIP();
2696 IEM_MC_END();
2697 }
2698 return VINF_SUCCESS;
2699
2700 case 0: /* MMX */
2701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2702 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2703 else
2704 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2706 {
2707 /* MMX, greg */
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEM_MC_BEGIN(0, 1);
2710 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2712 IEM_MC_LOCAL(uint64_t, u64Tmp);
2713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2714 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2715 else
2716 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2717 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2718 IEM_MC_ADVANCE_RIP();
2719 IEM_MC_END();
2720 }
2721 else
2722 {
2723 /* MMX, [mem] */
2724 IEM_MC_BEGIN(0, 2);
2725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2726 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2730 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2731 {
2732 IEM_MC_LOCAL(uint64_t, u64Tmp);
2733 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2734 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2735 }
2736 else
2737 {
2738 IEM_MC_LOCAL(uint32_t, u32Tmp);
2739 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2740 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2741 }
2742 IEM_MC_ADVANCE_RIP();
2743 IEM_MC_END();
2744 }
2745 return VINF_SUCCESS;
2746
2747 default:
2748 return IEMOP_RAISE_INVALID_OPCODE();
2749 }
2750}
2751
2752
2753/** Opcode 0x0f 0x6f. */
2754FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2755{
2756 bool fAligned = false;
2757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2758 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2759 {
2760 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2761 fAligned = true;
2762 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2763 if (fAligned)
2764 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2765 else
2766 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2768 {
2769 /*
2770 * Register, register.
2771 */
2772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2773 IEM_MC_BEGIN(0, 0);
2774 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2775 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2776 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2777 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 else
2782 {
2783 /*
2784 * Register, memory.
2785 */
2786 IEM_MC_BEGIN(0, 2);
2787 IEM_MC_LOCAL(uint128_t, u128Tmp);
2788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2789
2790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2792 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 if (fAligned)
2795 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2796 else
2797 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2798 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2799
2800 IEM_MC_ADVANCE_RIP();
2801 IEM_MC_END();
2802 }
2803 return VINF_SUCCESS;
2804
2805 case 0: /* MMX */
2806 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2808 {
2809 /*
2810 * Register, register.
2811 */
2812 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2813 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEM_MC_BEGIN(0, 1);
2816 IEM_MC_LOCAL(uint64_t, u64Tmp);
2817 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2819 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2820 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2821 IEM_MC_ADVANCE_RIP();
2822 IEM_MC_END();
2823 }
2824 else
2825 {
2826 /*
2827 * Register, memory.
2828 */
2829 IEM_MC_BEGIN(0, 2);
2830 IEM_MC_LOCAL(uint64_t, u64Tmp);
2831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2832
2833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2835 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2836 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2837 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2838 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2839
2840 IEM_MC_ADVANCE_RIP();
2841 IEM_MC_END();
2842 }
2843 return VINF_SUCCESS;
2844
2845 default:
2846 return IEMOP_RAISE_INVALID_OPCODE();
2847 }
2848}
2849
2850
2851/** Opcode 0x0f 0x70. The immediate here is evil! */
2852FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2853{
2854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2855 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2856 {
2857 case IEM_OP_PRF_SIZE_OP: /* SSE */
2858 case IEM_OP_PRF_REPNZ: /* SSE */
2859 case IEM_OP_PRF_REPZ: /* SSE */
2860 {
2861 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2862 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2863 {
2864 case IEM_OP_PRF_SIZE_OP:
2865 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
2866 pfnAImpl = iemAImpl_pshufd;
2867 break;
2868 case IEM_OP_PRF_REPNZ:
2869 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
2870 pfnAImpl = iemAImpl_pshuflw;
2871 break;
2872 case IEM_OP_PRF_REPZ:
2873 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
2874 pfnAImpl = iemAImpl_pshufhw;
2875 break;
2876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2877 }
2878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2879 {
2880 /*
2881 * Register, register.
2882 */
2883 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2885
2886 IEM_MC_BEGIN(3, 0);
2887 IEM_MC_ARG(uint128_t *, pDst, 0);
2888 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2889 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2891 IEM_MC_PREPARE_SSE_USAGE();
2892 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2893 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2894 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2895 IEM_MC_ADVANCE_RIP();
2896 IEM_MC_END();
2897 }
2898 else
2899 {
2900 /*
2901 * Register, memory.
2902 */
2903 IEM_MC_BEGIN(3, 2);
2904 IEM_MC_ARG(uint128_t *, pDst, 0);
2905 IEM_MC_LOCAL(uint128_t, uSrc);
2906 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2908
2909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2910 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2911 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2914
2915 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2916 IEM_MC_PREPARE_SSE_USAGE();
2917 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2918 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2919
2920 IEM_MC_ADVANCE_RIP();
2921 IEM_MC_END();
2922 }
2923 return VINF_SUCCESS;
2924 }
2925
2926 case 0: /* MMX Extension */
2927 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2929 {
2930 /*
2931 * Register, register.
2932 */
2933 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935
2936 IEM_MC_BEGIN(3, 0);
2937 IEM_MC_ARG(uint64_t *, pDst, 0);
2938 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2939 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2940 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2941 IEM_MC_PREPARE_FPU_USAGE();
2942 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2943 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2944 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2945 IEM_MC_ADVANCE_RIP();
2946 IEM_MC_END();
2947 }
2948 else
2949 {
2950 /*
2951 * Register, memory.
2952 */
2953 IEM_MC_BEGIN(3, 2);
2954 IEM_MC_ARG(uint64_t *, pDst, 0);
2955 IEM_MC_LOCAL(uint64_t, uSrc);
2956 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2958
2959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2960 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2961 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2964
2965 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2966 IEM_MC_PREPARE_FPU_USAGE();
2967 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2968 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2969
2970 IEM_MC_ADVANCE_RIP();
2971 IEM_MC_END();
2972 }
2973 return VINF_SUCCESS;
2974
2975 default:
2976 return IEMOP_RAISE_INVALID_OPCODE();
2977 }
2978}
2979
2980
2981/** Opcode 0x0f 0x71 11/2. */
2982FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2983
2984/** Opcode 0x66 0x0f 0x71 11/2. */
2985FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2986
2987/** Opcode 0x0f 0x71 11/4. */
2988FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2989
2990/** Opcode 0x66 0x0f 0x71 11/4. */
2991FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2992
2993/** Opcode 0x0f 0x71 11/6. */
2994FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2995
2996/** Opcode 0x66 0x0f 0x71 11/6. */
2997FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2998
2999
3000/** Opcode 0x0f 0x71. */
3001FNIEMOP_DEF(iemOp_Grp12)
3002{
3003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3004 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3005 return IEMOP_RAISE_INVALID_OPCODE();
3006 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3007 {
3008 case 0: case 1: case 3: case 5: case 7:
3009 return IEMOP_RAISE_INVALID_OPCODE();
3010 case 2:
3011 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3012 {
3013 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3014 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3015 default: return IEMOP_RAISE_INVALID_OPCODE();
3016 }
3017 case 4:
3018 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3019 {
3020 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3021 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3022 default: return IEMOP_RAISE_INVALID_OPCODE();
3023 }
3024 case 6:
3025 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3026 {
3027 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3028 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3029 default: return IEMOP_RAISE_INVALID_OPCODE();
3030 }
3031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3032 }
3033}
3034
3035
3036/** Opcode 0x0f 0x72 11/2. */
3037FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3038
3039/** Opcode 0x66 0x0f 0x72 11/2. */
3040FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3041
3042/** Opcode 0x0f 0x72 11/4. */
3043FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3044
3045/** Opcode 0x66 0x0f 0x72 11/4. */
3046FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3047
3048/** Opcode 0x0f 0x72 11/6. */
3049FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3050
3051/** Opcode 0x66 0x0f 0x72 11/6. */
3052FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3053
3054
3055/** Opcode 0x0f 0x72. */
3056FNIEMOP_DEF(iemOp_Grp13)
3057{
3058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3059 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3060 return IEMOP_RAISE_INVALID_OPCODE();
3061 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3062 {
3063 case 0: case 1: case 3: case 5: case 7:
3064 return IEMOP_RAISE_INVALID_OPCODE();
3065 case 2:
3066 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3067 {
3068 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3069 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3070 default: return IEMOP_RAISE_INVALID_OPCODE();
3071 }
3072 case 4:
3073 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3074 {
3075 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3076 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3077 default: return IEMOP_RAISE_INVALID_OPCODE();
3078 }
3079 case 6:
3080 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3081 {
3082 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3083 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3084 default: return IEMOP_RAISE_INVALID_OPCODE();
3085 }
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088}
3089
3090
3091/** Opcode 0x0f 0x73 11/2. */
3092FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3093
3094/** Opcode 0x66 0x0f 0x73 11/2. */
3095FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3096
3097/** Opcode 0x66 0x0f 0x73 11/3. */
3098FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3099
3100/** Opcode 0x0f 0x73 11/6. */
3101FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3102
3103/** Opcode 0x66 0x0f 0x73 11/6. */
3104FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3105
3106/** Opcode 0x66 0x0f 0x73 11/7. */
3107FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3108
3109
3110/** Opcode 0x0f 0x73. */
3111FNIEMOP_DEF(iemOp_Grp14)
3112{
3113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3114 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3115 return IEMOP_RAISE_INVALID_OPCODE();
3116 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3117 {
3118 case 0: case 1: case 4: case 5:
3119 return IEMOP_RAISE_INVALID_OPCODE();
3120 case 2:
3121 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3122 {
3123 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3124 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3125 default: return IEMOP_RAISE_INVALID_OPCODE();
3126 }
3127 case 3:
3128 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3129 {
3130 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3131 default: return IEMOP_RAISE_INVALID_OPCODE();
3132 }
3133 case 6:
3134 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3135 {
3136 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3137 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3138 default: return IEMOP_RAISE_INVALID_OPCODE();
3139 }
3140 case 7:
3141 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3142 {
3143 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3144 default: return IEMOP_RAISE_INVALID_OPCODE();
3145 }
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148}
3149
3150
3151/**
3152 * Common worker for SSE2 and MMX instructions on the forms:
3153 * pxxx mm1, mm2/mem64
3154 * pxxx xmm1, xmm2/mem128
3155 *
3156 * Proper alignment of the 128-bit operand is enforced.
3157 * Exceptions type 4. SSE2 and MMX cpuid checks.
3158 */
3159FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3160{
3161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3162 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3163 {
3164 case IEM_OP_PRF_SIZE_OP: /* SSE */
3165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3166 {
3167 /*
3168 * Register, register.
3169 */
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171 IEM_MC_BEGIN(2, 0);
3172 IEM_MC_ARG(uint128_t *, pDst, 0);
3173 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3174 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3175 IEM_MC_PREPARE_SSE_USAGE();
3176 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3177 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3178 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3179 IEM_MC_ADVANCE_RIP();
3180 IEM_MC_END();
3181 }
3182 else
3183 {
3184 /*
3185 * Register, memory.
3186 */
3187 IEM_MC_BEGIN(2, 2);
3188 IEM_MC_ARG(uint128_t *, pDst, 0);
3189 IEM_MC_LOCAL(uint128_t, uSrc);
3190 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3192
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3196 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3197
3198 IEM_MC_PREPARE_SSE_USAGE();
3199 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3200 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3201
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 }
3205 return VINF_SUCCESS;
3206
3207 case 0: /* MMX */
3208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3209 {
3210 /*
3211 * Register, register.
3212 */
3213 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3214 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3216 IEM_MC_BEGIN(2, 0);
3217 IEM_MC_ARG(uint64_t *, pDst, 0);
3218 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3219 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3220 IEM_MC_PREPARE_FPU_USAGE();
3221 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3222 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3223 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3224 IEM_MC_ADVANCE_RIP();
3225 IEM_MC_END();
3226 }
3227 else
3228 {
3229 /*
3230 * Register, memory.
3231 */
3232 IEM_MC_BEGIN(2, 2);
3233 IEM_MC_ARG(uint64_t *, pDst, 0);
3234 IEM_MC_LOCAL(uint64_t, uSrc);
3235 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3241 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3242
3243 IEM_MC_PREPARE_FPU_USAGE();
3244 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3245 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3246
3247 IEM_MC_ADVANCE_RIP();
3248 IEM_MC_END();
3249 }
3250 return VINF_SUCCESS;
3251
3252 default:
3253 return IEMOP_RAISE_INVALID_OPCODE();
3254 }
3255}
3256
3257
3258/** Opcode 0x0f 0x74. */
3259FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3260{
3261 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3262 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3263}
3264
3265
3266/** Opcode 0x0f 0x75. */
3267FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3268{
3269 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3270 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3271}
3272
3273
3274/** Opcode 0x0f 0x76. */
3275FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3276{
3277 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3278 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3279}
3280
3281
3282/** Opcode 0x0f 0x77. */
3283FNIEMOP_STUB(iemOp_emms);
3284/** Opcode 0x0f 0x78. */
3285FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3286/** Opcode 0x0f 0x79. */
3287FNIEMOP_UD_STUB(iemOp_vmwrite);
3288/** Opcode 0x0f 0x7c. */
3289FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3290/** Opcode 0x0f 0x7d. */
3291FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3292
3293
3294/** Opcode 0x0f 0x7e. */
3295FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3296{
3297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3298 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3299 {
3300 case IEM_OP_PRF_SIZE_OP: /* SSE */
3301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3302 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3303 else
3304 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3306 {
3307 /* greg, XMM */
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 IEM_MC_BEGIN(0, 1);
3310 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3312 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3313 {
3314 IEM_MC_LOCAL(uint64_t, u64Tmp);
3315 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3316 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3317 }
3318 else
3319 {
3320 IEM_MC_LOCAL(uint32_t, u32Tmp);
3321 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3322 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3323 }
3324 IEM_MC_ADVANCE_RIP();
3325 IEM_MC_END();
3326 }
3327 else
3328 {
3329 /* [mem], XMM */
3330 IEM_MC_BEGIN(0, 2);
3331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3335 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3337 {
3338 IEM_MC_LOCAL(uint64_t, u64Tmp);
3339 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3340 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3341 }
3342 else
3343 {
3344 IEM_MC_LOCAL(uint32_t, u32Tmp);
3345 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3346 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3347 }
3348 IEM_MC_ADVANCE_RIP();
3349 IEM_MC_END();
3350 }
3351 return VINF_SUCCESS;
3352
3353 case 0: /* MMX */
3354 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3355 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3356 else
3357 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3358 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3359 {
3360 /* greg, MMX */
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_BEGIN(0, 1);
3363 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3365 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3366 {
3367 IEM_MC_LOCAL(uint64_t, u64Tmp);
3368 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3369 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3370 }
3371 else
3372 {
3373 IEM_MC_LOCAL(uint32_t, u32Tmp);
3374 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3375 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3376 }
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 }
3380 else
3381 {
3382 /* [mem], MMX */
3383 IEM_MC_BEGIN(0, 2);
3384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3389 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3390 {
3391 IEM_MC_LOCAL(uint64_t, u64Tmp);
3392 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3393 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3394 }
3395 else
3396 {
3397 IEM_MC_LOCAL(uint32_t, u32Tmp);
3398 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3399 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3400 }
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 return VINF_SUCCESS;
3405
3406 default:
3407 return IEMOP_RAISE_INVALID_OPCODE();
3408 }
3409}
3410
3411
3412/** Opcode 0x0f 0x7f. */
3413FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3414{
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 bool fAligned = false;
3417 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3418 {
3419 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3420 fAligned = true;
3421 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3422 if (fAligned)
3423 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3424 else
3425 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3427 {
3428 /*
3429 * Register, register.
3430 */
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3432 IEM_MC_BEGIN(0, 0);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3436 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3437 IEM_MC_ADVANCE_RIP();
3438 IEM_MC_END();
3439 }
3440 else
3441 {
3442 /*
3443 * Register, memory.
3444 */
3445 IEM_MC_BEGIN(0, 2);
3446 IEM_MC_LOCAL(uint128_t, u128Tmp);
3447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3453
3454 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3455 if (fAligned)
3456 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3457 else
3458 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3459
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 }
3463 return VINF_SUCCESS;
3464
3465 case 0: /* MMX */
3466 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3467
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /*
3471 * Register, register.
3472 */
3473 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3474 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3476 IEM_MC_BEGIN(0, 1);
3477 IEM_MC_LOCAL(uint64_t, u64Tmp);
3478 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3479 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3480 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3481 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /*
3488 * Register, memory.
3489 */
3490 IEM_MC_BEGIN(0, 2);
3491 IEM_MC_LOCAL(uint64_t, u64Tmp);
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3498
3499 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3500 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3501
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 return VINF_SUCCESS;
3506
3507 default:
3508 return IEMOP_RAISE_INVALID_OPCODE();
3509 }
3510}
3511
3512
3513
3514/** Opcode 0x0f 0x80. */
3515FNIEMOP_DEF(iemOp_jo_Jv)
3516{
3517 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3518 IEMOP_HLP_MIN_386();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3520 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3521 {
3522 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524
3525 IEM_MC_BEGIN(0, 0);
3526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3527 IEM_MC_REL_JMP_S16(i16Imm);
3528 } IEM_MC_ELSE() {
3529 IEM_MC_ADVANCE_RIP();
3530 } IEM_MC_ENDIF();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537
3538 IEM_MC_BEGIN(0, 0);
3539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3540 IEM_MC_REL_JMP_S32(i32Imm);
3541 } IEM_MC_ELSE() {
3542 IEM_MC_ADVANCE_RIP();
3543 } IEM_MC_ENDIF();
3544 IEM_MC_END();
3545 }
3546 return VINF_SUCCESS;
3547}
3548
3549
3550/** Opcode 0x0f 0x81. */
3551FNIEMOP_DEF(iemOp_jno_Jv)
3552{
3553 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3554 IEMOP_HLP_MIN_386();
3555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3556 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3557 {
3558 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3563 IEM_MC_ADVANCE_RIP();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S16(i16Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3576 IEM_MC_ADVANCE_RIP();
3577 } IEM_MC_ELSE() {
3578 IEM_MC_REL_JMP_S32(i32Imm);
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581 }
3582 return VINF_SUCCESS;
3583}
3584
3585
3586/** Opcode 0x0f 0x82. */
3587FNIEMOP_DEF(iemOp_jc_Jv)
3588{
3589 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3590 IEMOP_HLP_MIN_386();
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3592 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3593 {
3594 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596
3597 IEM_MC_BEGIN(0, 0);
3598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3599 IEM_MC_REL_JMP_S16(i16Imm);
3600 } IEM_MC_ELSE() {
3601 IEM_MC_ADVANCE_RIP();
3602 } IEM_MC_ENDIF();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3609
3610 IEM_MC_BEGIN(0, 0);
3611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3612 IEM_MC_REL_JMP_S32(i32Imm);
3613 } IEM_MC_ELSE() {
3614 IEM_MC_ADVANCE_RIP();
3615 } IEM_MC_ENDIF();
3616 IEM_MC_END();
3617 }
3618 return VINF_SUCCESS;
3619}
3620
3621
3622/** Opcode 0x0f 0x83. */
3623FNIEMOP_DEF(iemOp_jnc_Jv)
3624{
3625 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3626 IEMOP_HLP_MIN_386();
3627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3628 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3629 {
3630 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0);
3634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3635 IEM_MC_ADVANCE_RIP();
3636 } IEM_MC_ELSE() {
3637 IEM_MC_REL_JMP_S16(i16Imm);
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640 }
3641 else
3642 {
3643 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645
3646 IEM_MC_BEGIN(0, 0);
3647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3648 IEM_MC_ADVANCE_RIP();
3649 } IEM_MC_ELSE() {
3650 IEM_MC_REL_JMP_S32(i32Imm);
3651 } IEM_MC_ENDIF();
3652 IEM_MC_END();
3653 }
3654 return VINF_SUCCESS;
3655}
3656
3657
3658/** Opcode 0x0f 0x84. */
3659FNIEMOP_DEF(iemOp_je_Jv)
3660{
3661 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3662 IEMOP_HLP_MIN_386();
3663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3664 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3665 {
3666 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3668
3669 IEM_MC_BEGIN(0, 0);
3670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3671 IEM_MC_REL_JMP_S16(i16Imm);
3672 } IEM_MC_ELSE() {
3673 IEM_MC_ADVANCE_RIP();
3674 } IEM_MC_ENDIF();
3675 IEM_MC_END();
3676 }
3677 else
3678 {
3679 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681
3682 IEM_MC_BEGIN(0, 0);
3683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3684 IEM_MC_REL_JMP_S32(i32Imm);
3685 } IEM_MC_ELSE() {
3686 IEM_MC_ADVANCE_RIP();
3687 } IEM_MC_ENDIF();
3688 IEM_MC_END();
3689 }
3690 return VINF_SUCCESS;
3691}
3692
3693
3694/** Opcode 0x0f 0x85. */
3695FNIEMOP_DEF(iemOp_jne_Jv)
3696{
3697 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3698 IEMOP_HLP_MIN_386();
3699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3700 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3701 {
3702 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3704
3705 IEM_MC_BEGIN(0, 0);
3706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3707 IEM_MC_ADVANCE_RIP();
3708 } IEM_MC_ELSE() {
3709 IEM_MC_REL_JMP_S16(i16Imm);
3710 } IEM_MC_ENDIF();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3720 IEM_MC_ADVANCE_RIP();
3721 } IEM_MC_ELSE() {
3722 IEM_MC_REL_JMP_S32(i32Imm);
3723 } IEM_MC_ENDIF();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729
3730/** Opcode 0x0f 0x86. */
3731FNIEMOP_DEF(iemOp_jbe_Jv)
3732{
3733 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3734 IEMOP_HLP_MIN_386();
3735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3737 {
3738 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740
3741 IEM_MC_BEGIN(0, 0);
3742 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3743 IEM_MC_REL_JMP_S16(i16Imm);
3744 } IEM_MC_ELSE() {
3745 IEM_MC_ADVANCE_RIP();
3746 } IEM_MC_ENDIF();
3747 IEM_MC_END();
3748 }
3749 else
3750 {
3751 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0);
3755 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3756 IEM_MC_REL_JMP_S32(i32Imm);
3757 } IEM_MC_ELSE() {
3758 IEM_MC_ADVANCE_RIP();
3759 } IEM_MC_ENDIF();
3760 IEM_MC_END();
3761 }
3762 return VINF_SUCCESS;
3763}
3764
3765
3766/** Opcode 0x0f 0x87. */
3767FNIEMOP_DEF(iemOp_jnbe_Jv)
3768{
3769 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3770 IEMOP_HLP_MIN_386();
3771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3772 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3773 {
3774 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776
3777 IEM_MC_BEGIN(0, 0);
3778 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3779 IEM_MC_ADVANCE_RIP();
3780 } IEM_MC_ELSE() {
3781 IEM_MC_REL_JMP_S16(i16Imm);
3782 } IEM_MC_ENDIF();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789
3790 IEM_MC_BEGIN(0, 0);
3791 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3792 IEM_MC_ADVANCE_RIP();
3793 } IEM_MC_ELSE() {
3794 IEM_MC_REL_JMP_S32(i32Imm);
3795 } IEM_MC_ENDIF();
3796 IEM_MC_END();
3797 }
3798 return VINF_SUCCESS;
3799}
3800
3801
3802/** Opcode 0x0f 0x88. */
3803FNIEMOP_DEF(iemOp_js_Jv)
3804{
3805 IEMOP_MNEMONIC(js_Jv, "js Jv");
3806 IEMOP_HLP_MIN_386();
3807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3809 {
3810 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3812
3813 IEM_MC_BEGIN(0, 0);
3814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3815 IEM_MC_REL_JMP_S16(i16Imm);
3816 } IEM_MC_ELSE() {
3817 IEM_MC_ADVANCE_RIP();
3818 } IEM_MC_ENDIF();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825
3826 IEM_MC_BEGIN(0, 0);
3827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3828 IEM_MC_REL_JMP_S32(i32Imm);
3829 } IEM_MC_ELSE() {
3830 IEM_MC_ADVANCE_RIP();
3831 } IEM_MC_ENDIF();
3832 IEM_MC_END();
3833 }
3834 return VINF_SUCCESS;
3835}
3836
3837
3838/** Opcode 0x0f 0x89. */
3839FNIEMOP_DEF(iemOp_jns_Jv)
3840{
3841 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3842 IEMOP_HLP_MIN_386();
3843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3844 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3845 {
3846 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3848
3849 IEM_MC_BEGIN(0, 0);
3850 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3851 IEM_MC_ADVANCE_RIP();
3852 } IEM_MC_ELSE() {
3853 IEM_MC_REL_JMP_S16(i16Imm);
3854 } IEM_MC_ENDIF();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861
3862 IEM_MC_BEGIN(0, 0);
3863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3864 IEM_MC_ADVANCE_RIP();
3865 } IEM_MC_ELSE() {
3866 IEM_MC_REL_JMP_S32(i32Imm);
3867 } IEM_MC_ENDIF();
3868 IEM_MC_END();
3869 }
3870 return VINF_SUCCESS;
3871}
3872
3873
3874/** Opcode 0x0f 0x8a. */
3875FNIEMOP_DEF(iemOp_jp_Jv)
3876{
3877 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3878 IEMOP_HLP_MIN_386();
3879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3881 {
3882 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3884
3885 IEM_MC_BEGIN(0, 0);
3886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3887 IEM_MC_REL_JMP_S16(i16Imm);
3888 } IEM_MC_ELSE() {
3889 IEM_MC_ADVANCE_RIP();
3890 } IEM_MC_ENDIF();
3891 IEM_MC_END();
3892 }
3893 else
3894 {
3895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3897
3898 IEM_MC_BEGIN(0, 0);
3899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3900 IEM_MC_REL_JMP_S32(i32Imm);
3901 } IEM_MC_ELSE() {
3902 IEM_MC_ADVANCE_RIP();
3903 } IEM_MC_ENDIF();
3904 IEM_MC_END();
3905 }
3906 return VINF_SUCCESS;
3907}
3908
3909
3910/** Opcode 0x0f 0x8b. */
3911FNIEMOP_DEF(iemOp_jnp_Jv)
3912{
3913 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3914 IEMOP_HLP_MIN_386();
3915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3916 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3917 {
3918 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920
3921 IEM_MC_BEGIN(0, 0);
3922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3923 IEM_MC_ADVANCE_RIP();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_REL_JMP_S16(i16Imm);
3926 } IEM_MC_ENDIF();
3927 IEM_MC_END();
3928 }
3929 else
3930 {
3931 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0);
3935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3936 IEM_MC_ADVANCE_RIP();
3937 } IEM_MC_ELSE() {
3938 IEM_MC_REL_JMP_S32(i32Imm);
3939 } IEM_MC_ENDIF();
3940 IEM_MC_END();
3941 }
3942 return VINF_SUCCESS;
3943}
3944
3945
3946/** Opcode 0x0f 0x8c. */
3947FNIEMOP_DEF(iemOp_jl_Jv)
3948{
3949 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3950 IEMOP_HLP_MIN_386();
3951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3952 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3953 {
3954 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3956
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3959 IEM_MC_REL_JMP_S16(i16Imm);
3960 } IEM_MC_ELSE() {
3961 IEM_MC_ADVANCE_RIP();
3962 } IEM_MC_ENDIF();
3963 IEM_MC_END();
3964 }
3965 else
3966 {
3967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3969
3970 IEM_MC_BEGIN(0, 0);
3971 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3972 IEM_MC_REL_JMP_S32(i32Imm);
3973 } IEM_MC_ELSE() {
3974 IEM_MC_ADVANCE_RIP();
3975 } IEM_MC_ENDIF();
3976 IEM_MC_END();
3977 }
3978 return VINF_SUCCESS;
3979}
3980
3981
3982/** Opcode 0x0f 0x8d. */
3983FNIEMOP_DEF(iemOp_jnl_Jv)
3984{
3985 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3986 IEMOP_HLP_MIN_386();
3987 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3988 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3989 {
3990 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3992
3993 IEM_MC_BEGIN(0, 0);
3994 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3995 IEM_MC_ADVANCE_RIP();
3996 } IEM_MC_ELSE() {
3997 IEM_MC_REL_JMP_S16(i16Imm);
3998 } IEM_MC_ENDIF();
3999 IEM_MC_END();
4000 }
4001 else
4002 {
4003 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005
4006 IEM_MC_BEGIN(0, 0);
4007 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4008 IEM_MC_ADVANCE_RIP();
4009 } IEM_MC_ELSE() {
4010 IEM_MC_REL_JMP_S32(i32Imm);
4011 } IEM_MC_ENDIF();
4012 IEM_MC_END();
4013 }
4014 return VINF_SUCCESS;
4015}
4016
4017
4018/** Opcode 0x0f 0x8e. */
4019FNIEMOP_DEF(iemOp_jle_Jv)
4020{
4021 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4022 IEMOP_HLP_MIN_386();
4023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4024 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4025 {
4026 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4028
4029 IEM_MC_BEGIN(0, 0);
4030 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4031 IEM_MC_REL_JMP_S16(i16Imm);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_ADVANCE_RIP();
4034 } IEM_MC_ENDIF();
4035 IEM_MC_END();
4036 }
4037 else
4038 {
4039 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4041
4042 IEM_MC_BEGIN(0, 0);
4043 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4044 IEM_MC_REL_JMP_S32(i32Imm);
4045 } IEM_MC_ELSE() {
4046 IEM_MC_ADVANCE_RIP();
4047 } IEM_MC_ENDIF();
4048 IEM_MC_END();
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0x0f 0x8f. */
4055FNIEMOP_DEF(iemOp_jnle_Jv)
4056{
4057 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4058 IEMOP_HLP_MIN_386();
4059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4061 {
4062 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4064
4065 IEM_MC_BEGIN(0, 0);
4066 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4067 IEM_MC_ADVANCE_RIP();
4068 } IEM_MC_ELSE() {
4069 IEM_MC_REL_JMP_S16(i16Imm);
4070 } IEM_MC_ENDIF();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077
4078 IEM_MC_BEGIN(0, 0);
4079 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4080 IEM_MC_ADVANCE_RIP();
4081 } IEM_MC_ELSE() {
4082 IEM_MC_REL_JMP_S32(i32Imm);
4083 } IEM_MC_ENDIF();
4084 IEM_MC_END();
4085 }
4086 return VINF_SUCCESS;
4087}
4088
4089
4090/** Opcode 0x0f 0x90. */
4091FNIEMOP_DEF(iemOp_seto_Eb)
4092{
4093 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4094 IEMOP_HLP_MIN_386();
4095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4096
4097 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4098 * any way. AMD says it's "unused", whatever that means. We're
4099 * ignoring for now. */
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 {
4102 /* register target */
4103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4104 IEM_MC_BEGIN(0, 0);
4105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4107 } IEM_MC_ELSE() {
4108 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4109 } IEM_MC_ENDIF();
4110 IEM_MC_ADVANCE_RIP();
4111 IEM_MC_END();
4112 }
4113 else
4114 {
4115 /* memory target */
4116 IEM_MC_BEGIN(0, 1);
4117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4122 } IEM_MC_ELSE() {
4123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4124 } IEM_MC_ENDIF();
4125 IEM_MC_ADVANCE_RIP();
4126 IEM_MC_END();
4127 }
4128 return VINF_SUCCESS;
4129}
4130
4131
4132/** Opcode 0x0f 0x91. */
4133FNIEMOP_DEF(iemOp_setno_Eb)
4134{
4135 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4136 IEMOP_HLP_MIN_386();
4137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4138
4139 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4140 * any way. AMD says it's "unused", whatever that means. We're
4141 * ignoring for now. */
4142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4143 {
4144 /* register target */
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146 IEM_MC_BEGIN(0, 0);
4147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4149 } IEM_MC_ELSE() {
4150 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4151 } IEM_MC_ENDIF();
4152 IEM_MC_ADVANCE_RIP();
4153 IEM_MC_END();
4154 }
4155 else
4156 {
4157 /* memory target */
4158 IEM_MC_BEGIN(0, 1);
4159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4164 } IEM_MC_ELSE() {
4165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4166 } IEM_MC_ENDIF();
4167 IEM_MC_ADVANCE_RIP();
4168 IEM_MC_END();
4169 }
4170 return VINF_SUCCESS;
4171}
4172
4173
4174/** Opcode 0x0f 0x92. */
4175FNIEMOP_DEF(iemOp_setc_Eb)
4176{
4177 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4178 IEMOP_HLP_MIN_386();
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180
4181 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4182 * any way. AMD says it's "unused", whatever that means. We're
4183 * ignoring for now. */
4184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4185 {
4186 /* register target */
4187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4188 IEM_MC_BEGIN(0, 0);
4189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4190 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4191 } IEM_MC_ELSE() {
4192 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4193 } IEM_MC_ENDIF();
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 }
4197 else
4198 {
4199 /* memory target */
4200 IEM_MC_BEGIN(0, 1);
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4206 } IEM_MC_ELSE() {
4207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x93. */
4217FNIEMOP_DEF(iemOp_setnc_Eb)
4218{
4219 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4220 IEMOP_HLP_MIN_386();
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222
4223 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4224 * any way. AMD says it's "unused", whatever that means. We're
4225 * ignoring for now. */
4226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4227 {
4228 /* register target */
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4230 IEM_MC_BEGIN(0, 0);
4231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4232 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4233 } IEM_MC_ELSE() {
4234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4235 } IEM_MC_ENDIF();
4236 IEM_MC_ADVANCE_RIP();
4237 IEM_MC_END();
4238 }
4239 else
4240 {
4241 /* memory target */
4242 IEM_MC_BEGIN(0, 1);
4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4247 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4248 } IEM_MC_ELSE() {
4249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4250 } IEM_MC_ENDIF();
4251 IEM_MC_ADVANCE_RIP();
4252 IEM_MC_END();
4253 }
4254 return VINF_SUCCESS;
4255}
4256
4257
4258/** Opcode 0x0f 0x94. */
4259FNIEMOP_DEF(iemOp_sete_Eb)
4260{
4261 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4262 IEMOP_HLP_MIN_386();
4263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4264
4265 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4266 * any way. AMD says it's "unused", whatever that means. We're
4267 * ignoring for now. */
4268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4269 {
4270 /* register target */
4271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4272 IEM_MC_BEGIN(0, 0);
4273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4274 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4275 } IEM_MC_ELSE() {
4276 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4277 } IEM_MC_ENDIF();
4278 IEM_MC_ADVANCE_RIP();
4279 IEM_MC_END();
4280 }
4281 else
4282 {
4283 /* memory target */
4284 IEM_MC_BEGIN(0, 1);
4285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4290 } IEM_MC_ELSE() {
4291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4292 } IEM_MC_ENDIF();
4293 IEM_MC_ADVANCE_RIP();
4294 IEM_MC_END();
4295 }
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** Opcode 0x0f 0x95. */
4301FNIEMOP_DEF(iemOp_setne_Eb)
4302{
4303 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4304 IEMOP_HLP_MIN_386();
4305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4306
4307 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4308 * any way. AMD says it's "unused", whatever that means. We're
4309 * ignoring for now. */
4310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4311 {
4312 /* register target */
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314 IEM_MC_BEGIN(0, 0);
4315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 /* memory target */
4326 IEM_MC_BEGIN(0, 1);
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4332 } IEM_MC_ELSE() {
4333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4334 } IEM_MC_ENDIF();
4335 IEM_MC_ADVANCE_RIP();
4336 IEM_MC_END();
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0x0f 0x96. */
4343FNIEMOP_DEF(iemOp_setbe_Eb)
4344{
4345 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4346 IEMOP_HLP_MIN_386();
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348
4349 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4350 * any way. AMD says it's "unused", whatever that means. We're
4351 * ignoring for now. */
4352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4353 {
4354 /* register target */
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356 IEM_MC_BEGIN(0, 0);
4357 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4358 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4359 } IEM_MC_ELSE() {
4360 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4361 } IEM_MC_ENDIF();
4362 IEM_MC_ADVANCE_RIP();
4363 IEM_MC_END();
4364 }
4365 else
4366 {
4367 /* memory target */
4368 IEM_MC_BEGIN(0, 1);
4369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4373 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4374 } IEM_MC_ELSE() {
4375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_ADVANCE_RIP();
4378 IEM_MC_END();
4379 }
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/** Opcode 0x0f 0x97. */
4385FNIEMOP_DEF(iemOp_setnbe_Eb)
4386{
4387 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4388 IEMOP_HLP_MIN_386();
4389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4390
4391 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4392 * any way. AMD says it's "unused", whatever that means. We're
4393 * ignoring for now. */
4394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4395 {
4396 /* register target */
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_BEGIN(0, 0);
4399 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4400 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4401 } IEM_MC_ELSE() {
4402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4403 } IEM_MC_ENDIF();
4404 IEM_MC_ADVANCE_RIP();
4405 IEM_MC_END();
4406 }
4407 else
4408 {
4409 /* memory target */
4410 IEM_MC_BEGIN(0, 1);
4411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4414 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4416 } IEM_MC_ELSE() {
4417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4418 } IEM_MC_ENDIF();
4419 IEM_MC_ADVANCE_RIP();
4420 IEM_MC_END();
4421 }
4422 return VINF_SUCCESS;
4423}
4424
4425
4426/** Opcode 0x0f 0x98. */
4427FNIEMOP_DEF(iemOp_sets_Eb)
4428{
4429 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4430 IEMOP_HLP_MIN_386();
4431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4432
4433 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4434 * any way. AMD says it's "unused", whatever that means. We're
4435 * ignoring for now. */
4436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4437 {
4438 /* register target */
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4440 IEM_MC_BEGIN(0, 0);
4441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4442 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4443 } IEM_MC_ELSE() {
4444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4445 } IEM_MC_ENDIF();
4446 IEM_MC_ADVANCE_RIP();
4447 IEM_MC_END();
4448 }
4449 else
4450 {
4451 /* memory target */
4452 IEM_MC_BEGIN(0, 1);
4453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4458 } IEM_MC_ELSE() {
4459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4460 } IEM_MC_ENDIF();
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x99. */
4469FNIEMOP_DEF(iemOp_setns_Eb)
4470{
4471 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4472 IEMOP_HLP_MIN_386();
4473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4474
4475 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4476 * any way. AMD says it's "unused", whatever that means. We're
4477 * ignoring for now. */
4478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4479 {
4480 /* register target */
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482 IEM_MC_BEGIN(0, 0);
4483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4484 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4485 } IEM_MC_ELSE() {
4486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4487 } IEM_MC_ENDIF();
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 }
4491 else
4492 {
4493 /* memory target */
4494 IEM_MC_BEGIN(0, 1);
4495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4500 } IEM_MC_ELSE() {
4501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4502 } IEM_MC_ENDIF();
4503 IEM_MC_ADVANCE_RIP();
4504 IEM_MC_END();
4505 }
4506 return VINF_SUCCESS;
4507}
4508
4509
4510/** Opcode 0x0f 0x9a. */
4511FNIEMOP_DEF(iemOp_setp_Eb)
4512{
4513 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4514 IEMOP_HLP_MIN_386();
4515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4516
4517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4518 * any way. AMD says it's "unused", whatever that means. We're
4519 * ignoring for now. */
4520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4521 {
4522 /* register target */
4523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4524 IEM_MC_BEGIN(0, 0);
4525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4526 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4527 } IEM_MC_ELSE() {
4528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4529 } IEM_MC_ENDIF();
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 else
4534 {
4535 /* memory target */
4536 IEM_MC_BEGIN(0, 1);
4537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4542 } IEM_MC_ELSE() {
4543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4544 } IEM_MC_ENDIF();
4545 IEM_MC_ADVANCE_RIP();
4546 IEM_MC_END();
4547 }
4548 return VINF_SUCCESS;
4549}
4550
4551
4552/** Opcode 0x0f 0x9b. */
4553FNIEMOP_DEF(iemOp_setnp_Eb)
4554{
4555 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4556 IEMOP_HLP_MIN_386();
4557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4558
4559 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4560 * any way. AMD says it's "unused", whatever that means. We're
4561 * ignoring for now. */
4562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4563 {
4564 /* register target */
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566 IEM_MC_BEGIN(0, 0);
4567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4568 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4569 } IEM_MC_ELSE() {
4570 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4571 } IEM_MC_ENDIF();
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 }
4575 else
4576 {
4577 /* memory target */
4578 IEM_MC_BEGIN(0, 1);
4579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4583 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4584 } IEM_MC_ELSE() {
4585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4586 } IEM_MC_ENDIF();
4587 IEM_MC_ADVANCE_RIP();
4588 IEM_MC_END();
4589 }
4590 return VINF_SUCCESS;
4591}
4592
4593
4594/** Opcode 0x0f 0x9c. */
4595FNIEMOP_DEF(iemOp_setl_Eb)
4596{
4597 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4598 IEMOP_HLP_MIN_386();
4599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4600
4601 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4602 * any way. AMD says it's "unused", whatever that means. We're
4603 * ignoring for now. */
4604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4605 {
4606 /* register target */
4607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4608 IEM_MC_BEGIN(0, 0);
4609 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4610 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4611 } IEM_MC_ELSE() {
4612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4613 } IEM_MC_ENDIF();
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 }
4617 else
4618 {
4619 /* memory target */
4620 IEM_MC_BEGIN(0, 1);
4621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4625 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4626 } IEM_MC_ELSE() {
4627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4628 } IEM_MC_ENDIF();
4629 IEM_MC_ADVANCE_RIP();
4630 IEM_MC_END();
4631 }
4632 return VINF_SUCCESS;
4633}
4634
4635
4636/** Opcode 0x0f 0x9d. */
4637FNIEMOP_DEF(iemOp_setnl_Eb)
4638{
4639 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4640 IEMOP_HLP_MIN_386();
4641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4642
4643 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4644 * any way. AMD says it's "unused", whatever that means. We're
4645 * ignoring for now. */
4646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4647 {
4648 /* register target */
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_BEGIN(0, 0);
4651 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4652 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4653 } IEM_MC_ELSE() {
4654 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4655 } IEM_MC_ENDIF();
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 else
4660 {
4661 /* memory target */
4662 IEM_MC_BEGIN(0, 1);
4663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4666 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4667 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4668 } IEM_MC_ELSE() {
4669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4670 } IEM_MC_ENDIF();
4671 IEM_MC_ADVANCE_RIP();
4672 IEM_MC_END();
4673 }
4674 return VINF_SUCCESS;
4675}
4676
4677
4678/** Opcode 0x0f 0x9e. */
4679FNIEMOP_DEF(iemOp_setle_Eb)
4680{
4681 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4682 IEMOP_HLP_MIN_386();
4683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4684
4685 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4686 * any way. AMD says it's "unused", whatever that means. We're
4687 * ignoring for now. */
4688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4689 {
4690 /* register target */
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692 IEM_MC_BEGIN(0, 0);
4693 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4694 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4695 } IEM_MC_ELSE() {
4696 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4697 } IEM_MC_ENDIF();
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 /* memory target */
4704 IEM_MC_BEGIN(0, 1);
4705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4708 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4709 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4710 } IEM_MC_ELSE() {
4711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4712 } IEM_MC_ENDIF();
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 }
4716 return VINF_SUCCESS;
4717}
4718
4719
4720/** Opcode 0x0f 0x9f. */
4721FNIEMOP_DEF(iemOp_setnle_Eb)
4722{
4723 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4724 IEMOP_HLP_MIN_386();
4725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4726
4727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4728 * any way. AMD says it's "unused", whatever that means. We're
4729 * ignoring for now. */
4730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4731 {
4732 /* register target */
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4736 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4737 } IEM_MC_ELSE() {
4738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4739 } IEM_MC_ENDIF();
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 else
4744 {
4745 /* memory target */
4746 IEM_MC_BEGIN(0, 1);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4751 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4752 } IEM_MC_ELSE() {
4753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4754 } IEM_MC_ENDIF();
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 }
4758 return VINF_SUCCESS;
4759}
4760
4761
4762/**
4763 * Common 'push segment-register' helper.
4764 */
4765FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4766{
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768 if (iReg < X86_SREG_FS)
4769 IEMOP_HLP_NO_64BIT();
4770 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4771
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0, 1);
4776 IEM_MC_LOCAL(uint16_t, u16Value);
4777 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4778 IEM_MC_PUSH_U16(u16Value);
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 break;
4782
4783 case IEMMODE_32BIT:
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(uint32_t, u32Value);
4786 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4787 IEM_MC_PUSH_U32_SREG(u32Value);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 break;
4791
4792 case IEMMODE_64BIT:
4793 IEM_MC_BEGIN(0, 1);
4794 IEM_MC_LOCAL(uint64_t, u64Value);
4795 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4796 IEM_MC_PUSH_U64(u64Value);
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 break;
4800 }
4801
4802 return VINF_SUCCESS;
4803}
4804
4805
4806/** Opcode 0x0f 0xa0. */
4807FNIEMOP_DEF(iemOp_push_fs)
4808{
4809 IEMOP_MNEMONIC(push_fs, "push fs");
4810 IEMOP_HLP_MIN_386();
4811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4812 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4813}
4814
4815
4816/** Opcode 0x0f 0xa1. */
4817FNIEMOP_DEF(iemOp_pop_fs)
4818{
4819 IEMOP_MNEMONIC(pop_fs, "pop fs");
4820 IEMOP_HLP_MIN_386();
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4823}
4824
4825
4826/** Opcode 0x0f 0xa2. */
4827FNIEMOP_DEF(iemOp_cpuid)
4828{
4829 IEMOP_MNEMONIC(cpuid, "cpuid");
4830 IEMOP_HLP_MIN_486(); /* not all 486es. */
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4833}
4834
4835
4836/**
4837 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4838 * iemOp_bts_Ev_Gv.
4839 */
4840FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4841{
4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4843 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4844
4845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4846 {
4847 /* register destination. */
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849 switch (pVCpu->iem.s.enmEffOpSize)
4850 {
4851 case IEMMODE_16BIT:
4852 IEM_MC_BEGIN(3, 0);
4853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4854 IEM_MC_ARG(uint16_t, u16Src, 1);
4855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4856
4857 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4858 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4859 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4860 IEM_MC_REF_EFLAGS(pEFlags);
4861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4862
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 return VINF_SUCCESS;
4866
4867 case IEMMODE_32BIT:
4868 IEM_MC_BEGIN(3, 0);
4869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4870 IEM_MC_ARG(uint32_t, u32Src, 1);
4871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4872
4873 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4874 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4875 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4876 IEM_MC_REF_EFLAGS(pEFlags);
4877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4878
4879 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 case IEMMODE_64BIT:
4885 IEM_MC_BEGIN(3, 0);
4886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4887 IEM_MC_ARG(uint64_t, u64Src, 1);
4888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4889
4890 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4891 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4892 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4893 IEM_MC_REF_EFLAGS(pEFlags);
4894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4895
4896 IEM_MC_ADVANCE_RIP();
4897 IEM_MC_END();
4898 return VINF_SUCCESS;
4899
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4901 }
4902 }
4903 else
4904 {
4905 /* memory destination. */
4906
4907 uint32_t fAccess;
4908 if (pImpl->pfnLockedU16)
4909 fAccess = IEM_ACCESS_DATA_RW;
4910 else /* BT */
4911 fAccess = IEM_ACCESS_DATA_R;
4912
4913 /** @todo test negative bit offsets! */
4914 switch (pVCpu->iem.s.enmEffOpSize)
4915 {
4916 case IEMMODE_16BIT:
4917 IEM_MC_BEGIN(3, 2);
4918 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4919 IEM_MC_ARG(uint16_t, u16Src, 1);
4920 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4922 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4923
4924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4925 if (pImpl->pfnLockedU16)
4926 IEMOP_HLP_DONE_DECODING();
4927 else
4928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4929 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4930 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4931 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4932 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4933 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4934 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4935 IEM_MC_FETCH_EFLAGS(EFlags);
4936
4937 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4940 else
4941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4942 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4943
4944 IEM_MC_COMMIT_EFLAGS(EFlags);
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 return VINF_SUCCESS;
4948
4949 case IEMMODE_32BIT:
4950 IEM_MC_BEGIN(3, 2);
4951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4952 IEM_MC_ARG(uint32_t, u32Src, 1);
4953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4955 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4956
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4958 if (pImpl->pfnLockedU16)
4959 IEMOP_HLP_DONE_DECODING();
4960 else
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4963 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4964 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4965 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4966 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4967 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4968 IEM_MC_FETCH_EFLAGS(EFlags);
4969
4970 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4971 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4973 else
4974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4975 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4976
4977 IEM_MC_COMMIT_EFLAGS(EFlags);
4978 IEM_MC_ADVANCE_RIP();
4979 IEM_MC_END();
4980 return VINF_SUCCESS;
4981
4982 case IEMMODE_64BIT:
4983 IEM_MC_BEGIN(3, 2);
4984 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4985 IEM_MC_ARG(uint64_t, u64Src, 1);
4986 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4988 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4989
4990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4991 if (pImpl->pfnLockedU16)
4992 IEMOP_HLP_DONE_DECODING();
4993 else
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4996 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4997 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4998 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4999 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5000 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5001 IEM_MC_FETCH_EFLAGS(EFlags);
5002
5003 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5004 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5006 else
5007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5009
5010 IEM_MC_COMMIT_EFLAGS(EFlags);
5011 IEM_MC_ADVANCE_RIP();
5012 IEM_MC_END();
5013 return VINF_SUCCESS;
5014
5015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5016 }
5017 }
5018}
5019
5020
5021/** Opcode 0x0f 0xa3. */
5022FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5023{
5024 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5025 IEMOP_HLP_MIN_386();
5026 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5027}
5028
5029
5030/**
5031 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5032 */
5033FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5034{
5035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5036 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5037
5038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5039 {
5040 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5042
5043 switch (pVCpu->iem.s.enmEffOpSize)
5044 {
5045 case IEMMODE_16BIT:
5046 IEM_MC_BEGIN(4, 0);
5047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5048 IEM_MC_ARG(uint16_t, u16Src, 1);
5049 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5050 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5051
5052 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5053 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5054 IEM_MC_REF_EFLAGS(pEFlags);
5055 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5056
5057 IEM_MC_ADVANCE_RIP();
5058 IEM_MC_END();
5059 return VINF_SUCCESS;
5060
5061 case IEMMODE_32BIT:
5062 IEM_MC_BEGIN(4, 0);
5063 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5064 IEM_MC_ARG(uint32_t, u32Src, 1);
5065 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5066 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5067
5068 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5069 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5070 IEM_MC_REF_EFLAGS(pEFlags);
5071 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5072
5073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5074 IEM_MC_ADVANCE_RIP();
5075 IEM_MC_END();
5076 return VINF_SUCCESS;
5077
5078 case IEMMODE_64BIT:
5079 IEM_MC_BEGIN(4, 0);
5080 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5081 IEM_MC_ARG(uint64_t, u64Src, 1);
5082 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5083 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5084
5085 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5086 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5087 IEM_MC_REF_EFLAGS(pEFlags);
5088 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5089
5090 IEM_MC_ADVANCE_RIP();
5091 IEM_MC_END();
5092 return VINF_SUCCESS;
5093
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5095 }
5096 }
5097 else
5098 {
5099 switch (pVCpu->iem.s.enmEffOpSize)
5100 {
5101 case IEMMODE_16BIT:
5102 IEM_MC_BEGIN(4, 2);
5103 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5104 IEM_MC_ARG(uint16_t, u16Src, 1);
5105 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5106 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5108
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5110 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5111 IEM_MC_ASSIGN(cShiftArg, cShift);
5112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5113 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5114 IEM_MC_FETCH_EFLAGS(EFlags);
5115 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5116 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5117
5118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5119 IEM_MC_COMMIT_EFLAGS(EFlags);
5120 IEM_MC_ADVANCE_RIP();
5121 IEM_MC_END();
5122 return VINF_SUCCESS;
5123
5124 case IEMMODE_32BIT:
5125 IEM_MC_BEGIN(4, 2);
5126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5127 IEM_MC_ARG(uint32_t, u32Src, 1);
5128 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5129 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5131
5132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5133 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5134 IEM_MC_ASSIGN(cShiftArg, cShift);
5135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5136 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5137 IEM_MC_FETCH_EFLAGS(EFlags);
5138 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5139 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5140
5141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5142 IEM_MC_COMMIT_EFLAGS(EFlags);
5143 IEM_MC_ADVANCE_RIP();
5144 IEM_MC_END();
5145 return VINF_SUCCESS;
5146
5147 case IEMMODE_64BIT:
5148 IEM_MC_BEGIN(4, 2);
5149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5150 IEM_MC_ARG(uint64_t, u64Src, 1);
5151 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5154
5155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5156 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5157 IEM_MC_ASSIGN(cShiftArg, cShift);
5158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5159 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5160 IEM_MC_FETCH_EFLAGS(EFlags);
5161 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5162 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5163
5164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5165 IEM_MC_COMMIT_EFLAGS(EFlags);
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 return VINF_SUCCESS;
5169
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172 }
5173}
5174
5175
5176/**
5177 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5178 */
5179FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5180{
5181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5182 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5183
5184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5185 {
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187
5188 switch (pVCpu->iem.s.enmEffOpSize)
5189 {
5190 case IEMMODE_16BIT:
5191 IEM_MC_BEGIN(4, 0);
5192 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5193 IEM_MC_ARG(uint16_t, u16Src, 1);
5194 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5196
5197 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5198 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5199 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5200 IEM_MC_REF_EFLAGS(pEFlags);
5201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5202
5203 IEM_MC_ADVANCE_RIP();
5204 IEM_MC_END();
5205 return VINF_SUCCESS;
5206
5207 case IEMMODE_32BIT:
5208 IEM_MC_BEGIN(4, 0);
5209 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5210 IEM_MC_ARG(uint32_t, u32Src, 1);
5211 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5212 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5213
5214 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5215 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5216 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5217 IEM_MC_REF_EFLAGS(pEFlags);
5218 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5219
5220 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5221 IEM_MC_ADVANCE_RIP();
5222 IEM_MC_END();
5223 return VINF_SUCCESS;
5224
5225 case IEMMODE_64BIT:
5226 IEM_MC_BEGIN(4, 0);
5227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5228 IEM_MC_ARG(uint64_t, u64Src, 1);
5229 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5230 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5231
5232 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5233 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5234 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5235 IEM_MC_REF_EFLAGS(pEFlags);
5236 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5237
5238 IEM_MC_ADVANCE_RIP();
5239 IEM_MC_END();
5240 return VINF_SUCCESS;
5241
5242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5243 }
5244 }
5245 else
5246 {
5247 switch (pVCpu->iem.s.enmEffOpSize)
5248 {
5249 case IEMMODE_16BIT:
5250 IEM_MC_BEGIN(4, 2);
5251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5252 IEM_MC_ARG(uint16_t, u16Src, 1);
5253 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5256
5257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5260 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5261 IEM_MC_FETCH_EFLAGS(EFlags);
5262 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5264
5265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5266 IEM_MC_COMMIT_EFLAGS(EFlags);
5267 IEM_MC_ADVANCE_RIP();
5268 IEM_MC_END();
5269 return VINF_SUCCESS;
5270
5271 case IEMMODE_32BIT:
5272 IEM_MC_BEGIN(4, 2);
5273 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5274 IEM_MC_ARG(uint32_t, u32Src, 1);
5275 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5278
5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5281 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5282 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5283 IEM_MC_FETCH_EFLAGS(EFlags);
5284 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5286
5287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5288 IEM_MC_COMMIT_EFLAGS(EFlags);
5289 IEM_MC_ADVANCE_RIP();
5290 IEM_MC_END();
5291 return VINF_SUCCESS;
5292
5293 case IEMMODE_64BIT:
5294 IEM_MC_BEGIN(4, 2);
5295 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5296 IEM_MC_ARG(uint64_t, u64Src, 1);
5297 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5304 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5305 IEM_MC_FETCH_EFLAGS(EFlags);
5306 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5307 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5308
5309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5310 IEM_MC_COMMIT_EFLAGS(EFlags);
5311 IEM_MC_ADVANCE_RIP();
5312 IEM_MC_END();
5313 return VINF_SUCCESS;
5314
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 }
5318}
5319
5320
5321
5322/** Opcode 0x0f 0xa4. */
5323FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5324{
5325 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5326 IEMOP_HLP_MIN_386();
5327 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5328}
5329
5330
5331/** Opcode 0x0f 0xa5. */
5332FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5333{
5334 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5335 IEMOP_HLP_MIN_386();
5336 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5337}
5338
5339
5340/** Opcode 0x0f 0xa8. */
5341FNIEMOP_DEF(iemOp_push_gs)
5342{
5343 IEMOP_MNEMONIC(push_gs, "push gs");
5344 IEMOP_HLP_MIN_386();
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5347}
5348
5349
5350/** Opcode 0x0f 0xa9. */
5351FNIEMOP_DEF(iemOp_pop_gs)
5352{
5353 IEMOP_MNEMONIC(pop_gs, "pop gs");
5354 IEMOP_HLP_MIN_386();
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5357}
5358
5359
5360/** Opcode 0x0f 0xaa. */
5361FNIEMOP_STUB(iemOp_rsm);
5362//IEMOP_HLP_MIN_386();
5363
5364
5365/** Opcode 0x0f 0xab. */
5366FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5367{
5368 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5369 IEMOP_HLP_MIN_386();
5370 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5371}
5372
5373
5374/** Opcode 0x0f 0xac. */
5375FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5376{
5377 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5378 IEMOP_HLP_MIN_386();
5379 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5380}
5381
5382
5383/** Opcode 0x0f 0xad. */
5384FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5385{
5386 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5387 IEMOP_HLP_MIN_386();
5388 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5389}
5390
5391
5392/** Opcode 0x0f 0xae mem/0. */
5393FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5394{
5395 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5396 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5397 return IEMOP_RAISE_INVALID_OPCODE();
5398
5399 IEM_MC_BEGIN(3, 1);
5400 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5401 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5402 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5406 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5407 IEM_MC_END();
5408 return VINF_SUCCESS;
5409}
5410
5411
5412/** Opcode 0x0f 0xae mem/1. */
5413FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5414{
5415 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5416 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5417 return IEMOP_RAISE_INVALID_OPCODE();
5418
5419 IEM_MC_BEGIN(3, 1);
5420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5421 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5422 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5426 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5427 IEM_MC_END();
5428 return VINF_SUCCESS;
5429}
5430
5431
5432/** Opcode 0x0f 0xae mem/2. */
5433FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5434
5435/** Opcode 0x0f 0xae mem/3. */
5436FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5437
5438/** Opcode 0x0f 0xae mem/4. */
5439FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5440
5441/** Opcode 0x0f 0xae mem/5. */
5442FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5443
5444/** Opcode 0x0f 0xae mem/6. */
5445FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5446
5447/** Opcode 0x0f 0xae mem/7. */
5448FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5449
5450
5451/** Opcode 0x0f 0xae 11b/5. */
5452FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5453{
5454 RT_NOREF_PV(bRm);
5455 IEMOP_MNEMONIC(lfence, "lfence");
5456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5457 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5458 return IEMOP_RAISE_INVALID_OPCODE();
5459
5460 IEM_MC_BEGIN(0, 0);
5461 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5462 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5463 else
5464 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5465 IEM_MC_ADVANCE_RIP();
5466 IEM_MC_END();
5467 return VINF_SUCCESS;
5468}
5469
5470
5471/** Opcode 0x0f 0xae 11b/6. */
5472FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5473{
5474 RT_NOREF_PV(bRm);
5475 IEMOP_MNEMONIC(mfence, "mfence");
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5478 return IEMOP_RAISE_INVALID_OPCODE();
5479
5480 IEM_MC_BEGIN(0, 0);
5481 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5482 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5483 else
5484 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487 return VINF_SUCCESS;
5488}
5489
5490
5491/** Opcode 0x0f 0xae 11b/7. */
5492FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5493{
5494 RT_NOREF_PV(bRm);
5495 IEMOP_MNEMONIC(sfence, "sfence");
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5498 return IEMOP_RAISE_INVALID_OPCODE();
5499
5500 IEM_MC_BEGIN(0, 0);
5501 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5502 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5503 else
5504 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508}
5509
5510
5511/** Opcode 0xf3 0x0f 0xae 11b/0. */
5512FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5513
5514/** Opcode 0xf3 0x0f 0xae 11b/1. */
5515FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5516
5517/** Opcode 0xf3 0x0f 0xae 11b/2. */
5518FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5519
5520/** Opcode 0xf3 0x0f 0xae 11b/3. */
5521FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5522
5523
5524/** Opcode 0x0f 0xae. */
5525FNIEMOP_DEF(iemOp_Grp15)
5526{
5527 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5529 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5530 {
5531 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5532 {
5533 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5534 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5535 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5536 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5537 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5538 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5539 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5540 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5542 }
5543 }
5544 else
5545 {
5546 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5547 {
5548 case 0:
5549 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5550 {
5551 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5552 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5553 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5554 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5555 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5556 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5557 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5558 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5560 }
5561 break;
5562
5563 case IEM_OP_PRF_REPZ:
5564 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5565 {
5566 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5567 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5568 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5569 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5570 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5571 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5572 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5573 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5575 }
5576 break;
5577
5578 default:
5579 return IEMOP_RAISE_INVALID_OPCODE();
5580 }
5581 }
5582}
5583
5584
5585/** Opcode 0x0f 0xaf. */
5586FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5587{
5588 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5589 IEMOP_HLP_MIN_386();
5590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5592}
5593
5594
5595/** Opcode 0x0f 0xb0. */
5596FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5597{
5598 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5599 IEMOP_HLP_MIN_486();
5600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5601
5602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5603 {
5604 IEMOP_HLP_DONE_DECODING();
5605 IEM_MC_BEGIN(4, 0);
5606 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5607 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5608 IEM_MC_ARG(uint8_t, u8Src, 2);
5609 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5610
5611 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5612 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5613 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5614 IEM_MC_REF_EFLAGS(pEFlags);
5615 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5616 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5617 else
5618 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5619
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 }
5623 else
5624 {
5625 IEM_MC_BEGIN(4, 3);
5626 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5627 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5628 IEM_MC_ARG(uint8_t, u8Src, 2);
5629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5631 IEM_MC_LOCAL(uint8_t, u8Al);
5632
5633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5634 IEMOP_HLP_DONE_DECODING();
5635 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5636 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5637 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5638 IEM_MC_FETCH_EFLAGS(EFlags);
5639 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5640 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5641 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5642 else
5643 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5644
5645 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5646 IEM_MC_COMMIT_EFLAGS(EFlags);
5647 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5648 IEM_MC_ADVANCE_RIP();
5649 IEM_MC_END();
5650 }
5651 return VINF_SUCCESS;
5652}
5653
5654/** Opcode 0x0f 0xb1. */
5655FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5656{
5657 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5658 IEMOP_HLP_MIN_486();
5659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5660
5661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5662 {
5663 IEMOP_HLP_DONE_DECODING();
5664 switch (pVCpu->iem.s.enmEffOpSize)
5665 {
5666 case IEMMODE_16BIT:
5667 IEM_MC_BEGIN(4, 0);
5668 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5669 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5670 IEM_MC_ARG(uint16_t, u16Src, 2);
5671 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5672
5673 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5674 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5675 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5676 IEM_MC_REF_EFLAGS(pEFlags);
5677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5678 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5679 else
5680 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5681
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 return VINF_SUCCESS;
5685
5686 case IEMMODE_32BIT:
5687 IEM_MC_BEGIN(4, 0);
5688 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5689 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5690 IEM_MC_ARG(uint32_t, u32Src, 2);
5691 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5692
5693 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5694 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5695 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5696 IEM_MC_REF_EFLAGS(pEFlags);
5697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5698 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5699 else
5700 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5701
5702 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5703 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 return VINF_SUCCESS;
5707
5708 case IEMMODE_64BIT:
5709 IEM_MC_BEGIN(4, 0);
5710 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5711 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5712#ifdef RT_ARCH_X86
5713 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5714#else
5715 IEM_MC_ARG(uint64_t, u64Src, 2);
5716#endif
5717 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5718
5719 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5720 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5721 IEM_MC_REF_EFLAGS(pEFlags);
5722#ifdef RT_ARCH_X86
5723 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5724 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5726 else
5727 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5728#else
5729 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5731 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5732 else
5733 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5734#endif
5735
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 return VINF_SUCCESS;
5739
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 }
5743 else
5744 {
5745 switch (pVCpu->iem.s.enmEffOpSize)
5746 {
5747 case IEMMODE_16BIT:
5748 IEM_MC_BEGIN(4, 3);
5749 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5750 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5751 IEM_MC_ARG(uint16_t, u16Src, 2);
5752 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5754 IEM_MC_LOCAL(uint16_t, u16Ax);
5755
5756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5757 IEMOP_HLP_DONE_DECODING();
5758 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5759 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5760 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5761 IEM_MC_FETCH_EFLAGS(EFlags);
5762 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5763 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5764 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5765 else
5766 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5767
5768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5769 IEM_MC_COMMIT_EFLAGS(EFlags);
5770 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5771 IEM_MC_ADVANCE_RIP();
5772 IEM_MC_END();
5773 return VINF_SUCCESS;
5774
5775 case IEMMODE_32BIT:
5776 IEM_MC_BEGIN(4, 3);
5777 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5778 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5779 IEM_MC_ARG(uint32_t, u32Src, 2);
5780 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5782 IEM_MC_LOCAL(uint32_t, u32Eax);
5783
5784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5785 IEMOP_HLP_DONE_DECODING();
5786 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5787 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5788 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5789 IEM_MC_FETCH_EFLAGS(EFlags);
5790 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5791 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5792 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5793 else
5794 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5795
5796 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5797 IEM_MC_COMMIT_EFLAGS(EFlags);
5798 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5799 IEM_MC_ADVANCE_RIP();
5800 IEM_MC_END();
5801 return VINF_SUCCESS;
5802
5803 case IEMMODE_64BIT:
5804 IEM_MC_BEGIN(4, 3);
5805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5806 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5807#ifdef RT_ARCH_X86
5808 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5809#else
5810 IEM_MC_ARG(uint64_t, u64Src, 2);
5811#endif
5812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5814 IEM_MC_LOCAL(uint64_t, u64Rax);
5815
5816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5817 IEMOP_HLP_DONE_DECODING();
5818 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5819 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5820 IEM_MC_FETCH_EFLAGS(EFlags);
5821 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5822#ifdef RT_ARCH_X86
5823 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5824 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5825 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5826 else
5827 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5828#else
5829 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5830 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5831 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5832 else
5833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5834#endif
5835
5836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5837 IEM_MC_COMMIT_EFLAGS(EFlags);
5838 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 return VINF_SUCCESS;
5842
5843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5844 }
5845 }
5846}
5847
5848
5849FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5850{
5851 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5852 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5853
5854 switch (pVCpu->iem.s.enmEffOpSize)
5855 {
5856 case IEMMODE_16BIT:
5857 IEM_MC_BEGIN(5, 1);
5858 IEM_MC_ARG(uint16_t, uSel, 0);
5859 IEM_MC_ARG(uint16_t, offSeg, 1);
5860 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5861 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5862 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5863 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5867 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5868 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5869 IEM_MC_END();
5870 return VINF_SUCCESS;
5871
5872 case IEMMODE_32BIT:
5873 IEM_MC_BEGIN(5, 1);
5874 IEM_MC_ARG(uint16_t, uSel, 0);
5875 IEM_MC_ARG(uint32_t, offSeg, 1);
5876 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5877 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5878 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5879 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5882 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5883 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5884 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5885 IEM_MC_END();
5886 return VINF_SUCCESS;
5887
5888 case IEMMODE_64BIT:
5889 IEM_MC_BEGIN(5, 1);
5890 IEM_MC_ARG(uint16_t, uSel, 0);
5891 IEM_MC_ARG(uint64_t, offSeg, 1);
5892 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5893 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5894 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5895 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5898 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5899 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5900 else
5901 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5902 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5903 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5908 }
5909}
5910
5911
5912/** Opcode 0x0f 0xb2. */
5913FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5914{
5915 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5916 IEMOP_HLP_MIN_386();
5917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5919 return IEMOP_RAISE_INVALID_OPCODE();
5920 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5921}
5922
5923
5924/** Opcode 0x0f 0xb3. */
5925FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5926{
5927 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5928 IEMOP_HLP_MIN_386();
5929 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5930}
5931
5932
5933/** Opcode 0x0f 0xb4. */
5934FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5935{
5936 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5937 IEMOP_HLP_MIN_386();
5938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5940 return IEMOP_RAISE_INVALID_OPCODE();
5941 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5942}
5943
5944
5945/** Opcode 0x0f 0xb5. */
5946FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5947{
5948 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5949 IEMOP_HLP_MIN_386();
5950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5952 return IEMOP_RAISE_INVALID_OPCODE();
5953 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5954}
5955
5956
5957/** Opcode 0x0f 0xb6. */
5958FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5959{
5960 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5961 IEMOP_HLP_MIN_386();
5962
5963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5964
5965 /*
5966 * If rm is denoting a register, no more instruction bytes.
5967 */
5968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5969 {
5970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5971 switch (pVCpu->iem.s.enmEffOpSize)
5972 {
5973 case IEMMODE_16BIT:
5974 IEM_MC_BEGIN(0, 1);
5975 IEM_MC_LOCAL(uint16_t, u16Value);
5976 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5977 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5978 IEM_MC_ADVANCE_RIP();
5979 IEM_MC_END();
5980 return VINF_SUCCESS;
5981
5982 case IEMMODE_32BIT:
5983 IEM_MC_BEGIN(0, 1);
5984 IEM_MC_LOCAL(uint32_t, u32Value);
5985 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5986 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5987 IEM_MC_ADVANCE_RIP();
5988 IEM_MC_END();
5989 return VINF_SUCCESS;
5990
5991 case IEMMODE_64BIT:
5992 IEM_MC_BEGIN(0, 1);
5993 IEM_MC_LOCAL(uint64_t, u64Value);
5994 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5995 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 return VINF_SUCCESS;
5999
6000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6001 }
6002 }
6003 else
6004 {
6005 /*
6006 * We're loading a register from memory.
6007 */
6008 switch (pVCpu->iem.s.enmEffOpSize)
6009 {
6010 case IEMMODE_16BIT:
6011 IEM_MC_BEGIN(0, 2);
6012 IEM_MC_LOCAL(uint16_t, u16Value);
6013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6016 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6017 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 return VINF_SUCCESS;
6021
6022 case IEMMODE_32BIT:
6023 IEM_MC_BEGIN(0, 2);
6024 IEM_MC_LOCAL(uint32_t, u32Value);
6025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6029 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 return VINF_SUCCESS;
6033
6034 case IEMMODE_64BIT:
6035 IEM_MC_BEGIN(0, 2);
6036 IEM_MC_LOCAL(uint64_t, u64Value);
6037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6041 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6042 IEM_MC_ADVANCE_RIP();
6043 IEM_MC_END();
6044 return VINF_SUCCESS;
6045
6046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6047 }
6048 }
6049}
6050
6051
6052/** Opcode 0x0f 0xb7. */
6053FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6054{
6055 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6056 IEMOP_HLP_MIN_386();
6057
6058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6059
6060 /** @todo Not entirely sure how the operand size prefix is handled here,
6061 * assuming that it will be ignored. Would be nice to have a few
6062 * test for this. */
6063 /*
6064 * If rm is denoting a register, no more instruction bytes.
6065 */
6066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6067 {
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6070 {
6071 IEM_MC_BEGIN(0, 1);
6072 IEM_MC_LOCAL(uint32_t, u32Value);
6073 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6074 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6075 IEM_MC_ADVANCE_RIP();
6076 IEM_MC_END();
6077 }
6078 else
6079 {
6080 IEM_MC_BEGIN(0, 1);
6081 IEM_MC_LOCAL(uint64_t, u64Value);
6082 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6083 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 }
6087 }
6088 else
6089 {
6090 /*
6091 * We're loading a register from memory.
6092 */
6093 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6094 {
6095 IEM_MC_BEGIN(0, 2);
6096 IEM_MC_LOCAL(uint32_t, u32Value);
6097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6100 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6101 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6102 IEM_MC_ADVANCE_RIP();
6103 IEM_MC_END();
6104 }
6105 else
6106 {
6107 IEM_MC_BEGIN(0, 2);
6108 IEM_MC_LOCAL(uint64_t, u64Value);
6109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6112 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6113 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 }
6117 }
6118 return VINF_SUCCESS;
6119}
6120
6121
6122/** Opcode 0x0f 0xb8. */
6123FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
6124
6125
6126/** Opcode 0x0f 0xb9. */
6127FNIEMOP_DEF(iemOp_Grp10)
6128{
6129 Log(("iemOp_Grp10 -> #UD\n"));
6130 return IEMOP_RAISE_INVALID_OPCODE();
6131}
6132
6133
6134/** Opcode 0x0f 0xba. */
6135FNIEMOP_DEF(iemOp_Grp8)
6136{
6137 IEMOP_HLP_MIN_386();
6138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6139 PCIEMOPBINSIZES pImpl;
6140 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6141 {
6142 case 0: case 1: case 2: case 3:
6143 return IEMOP_RAISE_INVALID_OPCODE();
6144 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6145 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6146 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6147 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6151
6152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6153 {
6154 /* register destination. */
6155 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6157
6158 switch (pVCpu->iem.s.enmEffOpSize)
6159 {
6160 case IEMMODE_16BIT:
6161 IEM_MC_BEGIN(3, 0);
6162 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6163 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6165
6166 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6167 IEM_MC_REF_EFLAGS(pEFlags);
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6169
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_32BIT:
6175 IEM_MC_BEGIN(3, 0);
6176 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6177 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6179
6180 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6181 IEM_MC_REF_EFLAGS(pEFlags);
6182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6183
6184 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6185 IEM_MC_ADVANCE_RIP();
6186 IEM_MC_END();
6187 return VINF_SUCCESS;
6188
6189 case IEMMODE_64BIT:
6190 IEM_MC_BEGIN(3, 0);
6191 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6192 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6194
6195 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6196 IEM_MC_REF_EFLAGS(pEFlags);
6197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6198
6199 IEM_MC_ADVANCE_RIP();
6200 IEM_MC_END();
6201 return VINF_SUCCESS;
6202
6203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6204 }
6205 }
6206 else
6207 {
6208 /* memory destination. */
6209
6210 uint32_t fAccess;
6211 if (pImpl->pfnLockedU16)
6212 fAccess = IEM_ACCESS_DATA_RW;
6213 else /* BT */
6214 fAccess = IEM_ACCESS_DATA_R;
6215
6216 /** @todo test negative bit offsets! */
6217 switch (pVCpu->iem.s.enmEffOpSize)
6218 {
6219 case IEMMODE_16BIT:
6220 IEM_MC_BEGIN(3, 1);
6221 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6222 IEM_MC_ARG(uint16_t, u16Src, 1);
6223 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6225
6226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6227 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6228 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6229 if (pImpl->pfnLockedU16)
6230 IEMOP_HLP_DONE_DECODING();
6231 else
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233 IEM_MC_FETCH_EFLAGS(EFlags);
6234 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6235 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6237 else
6238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6240
6241 IEM_MC_COMMIT_EFLAGS(EFlags);
6242 IEM_MC_ADVANCE_RIP();
6243 IEM_MC_END();
6244 return VINF_SUCCESS;
6245
6246 case IEMMODE_32BIT:
6247 IEM_MC_BEGIN(3, 1);
6248 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6249 IEM_MC_ARG(uint32_t, u32Src, 1);
6250 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6252
6253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6254 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6255 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6256 if (pImpl->pfnLockedU16)
6257 IEMOP_HLP_DONE_DECODING();
6258 else
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6260 IEM_MC_FETCH_EFLAGS(EFlags);
6261 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6262 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6264 else
6265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6266 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6267
6268 IEM_MC_COMMIT_EFLAGS(EFlags);
6269 IEM_MC_ADVANCE_RIP();
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272
6273 case IEMMODE_64BIT:
6274 IEM_MC_BEGIN(3, 1);
6275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6276 IEM_MC_ARG(uint64_t, u64Src, 1);
6277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6281 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6282 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6283 if (pImpl->pfnLockedU16)
6284 IEMOP_HLP_DONE_DECODING();
6285 else
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6287 IEM_MC_FETCH_EFLAGS(EFlags);
6288 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6289 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6291 else
6292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6294
6295 IEM_MC_COMMIT_EFLAGS(EFlags);
6296 IEM_MC_ADVANCE_RIP();
6297 IEM_MC_END();
6298 return VINF_SUCCESS;
6299
6300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6301 }
6302 }
6303
6304}
6305
6306
6307/** Opcode 0x0f 0xbb. */
6308FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6309{
6310 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6311 IEMOP_HLP_MIN_386();
6312 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6313}
6314
6315
6316/** Opcode 0x0f 0xbc. */
6317FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6318{
6319 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6320 IEMOP_HLP_MIN_386();
6321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6323}
6324
6325
6326/** Opcode 0x0f 0xbd. */
6327FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6328{
6329 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6330 IEMOP_HLP_MIN_386();
6331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6333}
6334
6335
6336/** Opcode 0x0f 0xbe. */
6337FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6338{
6339 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6340 IEMOP_HLP_MIN_386();
6341
6342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6343
6344 /*
6345 * If rm is denoting a register, no more instruction bytes.
6346 */
6347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6348 {
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 switch (pVCpu->iem.s.enmEffOpSize)
6351 {
6352 case IEMMODE_16BIT:
6353 IEM_MC_BEGIN(0, 1);
6354 IEM_MC_LOCAL(uint16_t, u16Value);
6355 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6356 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6357 IEM_MC_ADVANCE_RIP();
6358 IEM_MC_END();
6359 return VINF_SUCCESS;
6360
6361 case IEMMODE_32BIT:
6362 IEM_MC_BEGIN(0, 1);
6363 IEM_MC_LOCAL(uint32_t, u32Value);
6364 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6365 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6366 IEM_MC_ADVANCE_RIP();
6367 IEM_MC_END();
6368 return VINF_SUCCESS;
6369
6370 case IEMMODE_64BIT:
6371 IEM_MC_BEGIN(0, 1);
6372 IEM_MC_LOCAL(uint64_t, u64Value);
6373 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6374 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6375 IEM_MC_ADVANCE_RIP();
6376 IEM_MC_END();
6377 return VINF_SUCCESS;
6378
6379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6380 }
6381 }
6382 else
6383 {
6384 /*
6385 * We're loading a register from memory.
6386 */
6387 switch (pVCpu->iem.s.enmEffOpSize)
6388 {
6389 case IEMMODE_16BIT:
6390 IEM_MC_BEGIN(0, 2);
6391 IEM_MC_LOCAL(uint16_t, u16Value);
6392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6395 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6396 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_32BIT:
6402 IEM_MC_BEGIN(0, 2);
6403 IEM_MC_LOCAL(uint32_t, u32Value);
6404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6407 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6408 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 case IEMMODE_64BIT:
6414 IEM_MC_BEGIN(0, 2);
6415 IEM_MC_LOCAL(uint64_t, u64Value);
6416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6420 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6421 IEM_MC_ADVANCE_RIP();
6422 IEM_MC_END();
6423 return VINF_SUCCESS;
6424
6425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6426 }
6427 }
6428}
6429
6430
6431/** Opcode 0x0f 0xbf. */
6432FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6433{
6434 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6435 IEMOP_HLP_MIN_386();
6436
6437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6438
6439 /** @todo Not entirely sure how the operand size prefix is handled here,
6440 * assuming that it will be ignored. Would be nice to have a few
6441 * test for this. */
6442 /*
6443 * If rm is denoting a register, no more instruction bytes.
6444 */
6445 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6446 {
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6449 {
6450 IEM_MC_BEGIN(0, 1);
6451 IEM_MC_LOCAL(uint32_t, u32Value);
6452 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6453 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6454 IEM_MC_ADVANCE_RIP();
6455 IEM_MC_END();
6456 }
6457 else
6458 {
6459 IEM_MC_BEGIN(0, 1);
6460 IEM_MC_LOCAL(uint64_t, u64Value);
6461 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6462 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6463 IEM_MC_ADVANCE_RIP();
6464 IEM_MC_END();
6465 }
6466 }
6467 else
6468 {
6469 /*
6470 * We're loading a register from memory.
6471 */
6472 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6473 {
6474 IEM_MC_BEGIN(0, 2);
6475 IEM_MC_LOCAL(uint32_t, u32Value);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6480 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6481 IEM_MC_ADVANCE_RIP();
6482 IEM_MC_END();
6483 }
6484 else
6485 {
6486 IEM_MC_BEGIN(0, 2);
6487 IEM_MC_LOCAL(uint64_t, u64Value);
6488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6491 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6492 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6493 IEM_MC_ADVANCE_RIP();
6494 IEM_MC_END();
6495 }
6496 }
6497 return VINF_SUCCESS;
6498}
6499
6500
6501/** Opcode 0x0f 0xc0. */
6502FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6503{
6504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6505 IEMOP_HLP_MIN_486();
6506 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6507
6508 /*
6509 * If rm is denoting a register, no more instruction bytes.
6510 */
6511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6512 {
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514
6515 IEM_MC_BEGIN(3, 0);
6516 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6517 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6519
6520 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6521 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6522 IEM_MC_REF_EFLAGS(pEFlags);
6523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6524
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 }
6528 else
6529 {
6530 /*
6531 * We're accessing memory.
6532 */
6533 IEM_MC_BEGIN(3, 3);
6534 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6535 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6536 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6537 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6539
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6541 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6542 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6543 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6544 IEM_MC_FETCH_EFLAGS(EFlags);
6545 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6546 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6547 else
6548 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6549
6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6551 IEM_MC_COMMIT_EFLAGS(EFlags);
6552 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6553 IEM_MC_ADVANCE_RIP();
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556 }
6557 return VINF_SUCCESS;
6558}
6559
6560
6561/** Opcode 0x0f 0xc1. */
6562FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6563{
6564 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6565 IEMOP_HLP_MIN_486();
6566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6567
6568 /*
6569 * If rm is denoting a register, no more instruction bytes.
6570 */
6571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6572 {
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574
6575 switch (pVCpu->iem.s.enmEffOpSize)
6576 {
6577 case IEMMODE_16BIT:
6578 IEM_MC_BEGIN(3, 0);
6579 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6580 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6582
6583 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6584 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6585 IEM_MC_REF_EFLAGS(pEFlags);
6586 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6587
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 case IEMMODE_32BIT:
6593 IEM_MC_BEGIN(3, 0);
6594 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6595 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6597
6598 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6599 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6600 IEM_MC_REF_EFLAGS(pEFlags);
6601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6602
6603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6604 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6605 IEM_MC_ADVANCE_RIP();
6606 IEM_MC_END();
6607 return VINF_SUCCESS;
6608
6609 case IEMMODE_64BIT:
6610 IEM_MC_BEGIN(3, 0);
6611 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6612 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6613 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6614
6615 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6616 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6617 IEM_MC_REF_EFLAGS(pEFlags);
6618 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6619
6620 IEM_MC_ADVANCE_RIP();
6621 IEM_MC_END();
6622 return VINF_SUCCESS;
6623
6624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6625 }
6626 }
6627 else
6628 {
6629 /*
6630 * We're accessing memory.
6631 */
6632 switch (pVCpu->iem.s.enmEffOpSize)
6633 {
6634 case IEMMODE_16BIT:
6635 IEM_MC_BEGIN(3, 3);
6636 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6637 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6638 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6639 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6641
6642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6643 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6644 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6645 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6646 IEM_MC_FETCH_EFLAGS(EFlags);
6647 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6648 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6649 else
6650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6651
6652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6653 IEM_MC_COMMIT_EFLAGS(EFlags);
6654 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 return VINF_SUCCESS;
6658
6659 case IEMMODE_32BIT:
6660 IEM_MC_BEGIN(3, 3);
6661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6662 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6663 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6664 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6666
6667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6668 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6669 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6670 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6671 IEM_MC_FETCH_EFLAGS(EFlags);
6672 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6673 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6674 else
6675 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6676
6677 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6678 IEM_MC_COMMIT_EFLAGS(EFlags);
6679 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6680 IEM_MC_ADVANCE_RIP();
6681 IEM_MC_END();
6682 return VINF_SUCCESS;
6683
6684 case IEMMODE_64BIT:
6685 IEM_MC_BEGIN(3, 3);
6686 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6687 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6688 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6689 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6691
6692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6693 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6694 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6695 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6696 IEM_MC_FETCH_EFLAGS(EFlags);
6697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6699 else
6700 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6701
6702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6703 IEM_MC_COMMIT_EFLAGS(EFlags);
6704 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 return VINF_SUCCESS;
6708
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711 }
6712}
6713
6714/** Opcode 0x0f 0xc2. */
6715FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6716
6717
6718/** Opcode 0x0f 0xc3. */
6719FNIEMOP_DEF(iemOp_movnti_My_Gy)
6720{
6721 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6722
6723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6724
6725 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6726 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6727 {
6728 switch (pVCpu->iem.s.enmEffOpSize)
6729 {
6730 case IEMMODE_32BIT:
6731 IEM_MC_BEGIN(0, 2);
6732 IEM_MC_LOCAL(uint32_t, u32Value);
6733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6734
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6738 return IEMOP_RAISE_INVALID_OPCODE();
6739
6740 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6741 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6742 IEM_MC_ADVANCE_RIP();
6743 IEM_MC_END();
6744 break;
6745
6746 case IEMMODE_64BIT:
6747 IEM_MC_BEGIN(0, 2);
6748 IEM_MC_LOCAL(uint64_t, u64Value);
6749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6750
6751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6753 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6754 return IEMOP_RAISE_INVALID_OPCODE();
6755
6756 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6757 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6758 IEM_MC_ADVANCE_RIP();
6759 IEM_MC_END();
6760 break;
6761
6762 case IEMMODE_16BIT:
6763 /** @todo check this form. */
6764 return IEMOP_RAISE_INVALID_OPCODE();
6765 }
6766 }
6767 else
6768 return IEMOP_RAISE_INVALID_OPCODE();
6769 return VINF_SUCCESS;
6770}
6771
6772
6773/** Opcode 0x0f 0xc4. */
6774FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6775
6776/** Opcode 0x0f 0xc5. */
6777FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6778
6779/** Opcode 0x0f 0xc6. */
6780FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6781
6782
6783/** Opcode 0x0f 0xc7 !11/1. */
6784FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6785{
6786 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6787
6788 IEM_MC_BEGIN(4, 3);
6789 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6790 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6791 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6792 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6793 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6794 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6796
6797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6798 IEMOP_HLP_DONE_DECODING();
6799 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6800
6801 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6802 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6803 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6804
6805 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6806 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6807 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6808
6809 IEM_MC_FETCH_EFLAGS(EFlags);
6810 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6811 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6812 else
6813 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6814
6815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6816 IEM_MC_COMMIT_EFLAGS(EFlags);
6817 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6818 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6819 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6820 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6821 IEM_MC_ENDIF();
6822 IEM_MC_ADVANCE_RIP();
6823
6824 IEM_MC_END();
6825 return VINF_SUCCESS;
6826}
6827
6828
6829/** Opcode REX.W 0x0f 0xc7 !11/1. */
6830FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6831{
6832 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6833 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6834 {
6835#if 0
6836 RT_NOREF(bRm);
6837 IEMOP_BITCH_ABOUT_STUB();
6838 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6839#else
6840 IEM_MC_BEGIN(4, 3);
6841 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6842 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6843 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6844 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6845 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6846 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6848
6849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6850 IEMOP_HLP_DONE_DECODING();
6851 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6852 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6853
6854 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6855 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6856 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6857
6858 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6859 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6860 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6861
6862 IEM_MC_FETCH_EFLAGS(EFlags);
6863# ifdef RT_ARCH_AMD64
6864 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6865 {
6866 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6867 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6868 else
6869 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6870 }
6871 else
6872# endif
6873 {
6874 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6875 accesses and not all all atomic, which works fine on in UNI CPU guest
6876 configuration (ignoring DMA). If guest SMP is active we have no choice
6877 but to use a rendezvous callback here. Sigh. */
6878 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6879 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6880 else
6881 {
6882 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6883 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6884 }
6885 }
6886
6887 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6888 IEM_MC_COMMIT_EFLAGS(EFlags);
6889 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6890 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6891 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6892 IEM_MC_ENDIF();
6893 IEM_MC_ADVANCE_RIP();
6894
6895 IEM_MC_END();
6896 return VINF_SUCCESS;
6897#endif
6898 }
6899 Log(("cmpxchg16b -> #UD\n"));
6900 return IEMOP_RAISE_INVALID_OPCODE();
6901}
6902
6903
6904/** Opcode 0x0f 0xc7 11/6. */
6905FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6906
6907/** Opcode 0x0f 0xc7 !11/6. */
6908FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6909
6910/** Opcode 0x66 0x0f 0xc7 !11/6. */
6911FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6912
6913/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6914FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6915
6916/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6917FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6918
6919
6920/** Opcode 0x0f 0xc7. */
6921FNIEMOP_DEF(iemOp_Grp9)
6922{
6923 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6926 {
6927 case 0: case 2: case 3: case 4: case 5:
6928 return IEMOP_RAISE_INVALID_OPCODE();
6929 case 1:
6930 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6931 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6932 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6933 return IEMOP_RAISE_INVALID_OPCODE();
6934 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6935 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6936 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6937 case 6:
6938 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6939 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6940 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6941 {
6942 case 0:
6943 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6944 case IEM_OP_PRF_SIZE_OP:
6945 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6946 case IEM_OP_PRF_REPZ:
6947 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6948 default:
6949 return IEMOP_RAISE_INVALID_OPCODE();
6950 }
6951 case 7:
6952 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6953 {
6954 case 0:
6955 case IEM_OP_PRF_REPZ:
6956 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6957 default:
6958 return IEMOP_RAISE_INVALID_OPCODE();
6959 }
6960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6961 }
6962}
6963
6964
6965/**
6966 * Common 'bswap register' helper.
6967 */
6968FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6969{
6970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6971 switch (pVCpu->iem.s.enmEffOpSize)
6972 {
6973 case IEMMODE_16BIT:
6974 IEM_MC_BEGIN(1, 0);
6975 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6976 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6977 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6978 IEM_MC_ADVANCE_RIP();
6979 IEM_MC_END();
6980 return VINF_SUCCESS;
6981
6982 case IEMMODE_32BIT:
6983 IEM_MC_BEGIN(1, 0);
6984 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6985 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6986 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6987 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6988 IEM_MC_ADVANCE_RIP();
6989 IEM_MC_END();
6990 return VINF_SUCCESS;
6991
6992 case IEMMODE_64BIT:
6993 IEM_MC_BEGIN(1, 0);
6994 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6995 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6996 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6997 IEM_MC_ADVANCE_RIP();
6998 IEM_MC_END();
6999 return VINF_SUCCESS;
7000
7001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7002 }
7003}
7004
7005
7006/** Opcode 0x0f 0xc8. */
7007FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7008{
7009 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7010 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7011 prefix. REX.B is the correct prefix it appears. For a parallel
7012 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7013 IEMOP_HLP_MIN_486();
7014 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7015}
7016
7017
7018/** Opcode 0x0f 0xc9. */
7019FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7020{
7021 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7022 IEMOP_HLP_MIN_486();
7023 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7024}
7025
7026
7027/** Opcode 0x0f 0xca. */
7028FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7029{
7030 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7031 IEMOP_HLP_MIN_486();
7032 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7033}
7034
7035
7036/** Opcode 0x0f 0xcb. */
7037FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7038{
7039 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7040 IEMOP_HLP_MIN_486();
7041 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7042}
7043
7044
7045/** Opcode 0x0f 0xcc. */
7046FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7047{
7048 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7049 IEMOP_HLP_MIN_486();
7050 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7051}
7052
7053
7054/** Opcode 0x0f 0xcd. */
7055FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7056{
7057 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7058 IEMOP_HLP_MIN_486();
7059 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7060}
7061
7062
7063/** Opcode 0x0f 0xce. */
7064FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7065{
7066 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7067 IEMOP_HLP_MIN_486();
7068 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7069}
7070
7071
7072/** Opcode 0x0f 0xcf. */
7073FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7074{
7075 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7076 IEMOP_HLP_MIN_486();
7077 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7078}
7079
7080
7081
7082/** Opcode 0x0f 0xd0. */
7083FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
7084/** Opcode 0x0f 0xd1. */
7085FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
7086/** Opcode 0x0f 0xd2. */
7087FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
7088/** Opcode 0x0f 0xd3. */
7089FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
7090/** Opcode 0x0f 0xd4. */
7091FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
7092/** Opcode 0x0f 0xd5. */
7093FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
7094
7095/** Opcode 0x0f 0xd6. */
7096FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
7097#if 0
7098FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7099{
7100 /* Docs says register only. */
7101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7102
7103 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7104 {
7105 case IEM_OP_PRF_SIZE_OP: /* SSE */
7106 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7107 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7108 IEM_MC_BEGIN(2, 0);
7109 IEM_MC_ARG(uint64_t *, pDst, 0);
7110 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7111 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7112 IEM_MC_PREPARE_SSE_USAGE();
7113 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7114 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7115 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7116 IEM_MC_ADVANCE_RIP();
7117 IEM_MC_END();
7118 return VINF_SUCCESS;
7119
7120 case 0: /* MMX */
7121 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7122 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7123 IEM_MC_BEGIN(2, 0);
7124 IEM_MC_ARG(uint64_t *, pDst, 0);
7125 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7126 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7127 IEM_MC_PREPARE_FPU_USAGE();
7128 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7129 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7130 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7131 IEM_MC_ADVANCE_RIP();
7132 IEM_MC_END();
7133 return VINF_SUCCESS;
7134
7135 default:
7136 return IEMOP_RAISE_INVALID_OPCODE();
7137 }
7138}
7139#endif
7140
7141
7142/** Opcode 0x0f 0xd7. */
7143FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7144{
7145 /* Docs says register only. */
7146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7147 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7148 return IEMOP_RAISE_INVALID_OPCODE();
7149
7150 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7151 /** @todo testcase: Check that the instruction implicitly clears the high
7152 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7153 * and opcode modifications are made to work with the whole width (not
7154 * just 128). */
7155 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7156 {
7157 case IEM_OP_PRF_SIZE_OP: /* SSE */
7158 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7159 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7160 IEM_MC_BEGIN(2, 0);
7161 IEM_MC_ARG(uint64_t *, pDst, 0);
7162 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7163 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7164 IEM_MC_PREPARE_SSE_USAGE();
7165 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7166 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7167 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7168 IEM_MC_ADVANCE_RIP();
7169 IEM_MC_END();
7170 return VINF_SUCCESS;
7171
7172 case 0: /* MMX */
7173 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7174 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7175 IEM_MC_BEGIN(2, 0);
7176 IEM_MC_ARG(uint64_t *, pDst, 0);
7177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7179 IEM_MC_PREPARE_FPU_USAGE();
7180 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7181 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7182 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7183 IEM_MC_ADVANCE_RIP();
7184 IEM_MC_END();
7185 return VINF_SUCCESS;
7186
7187 default:
7188 return IEMOP_RAISE_INVALID_OPCODE();
7189 }
7190}
7191
7192
7193/** Opcode 0x0f 0xd8. */
7194FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
7195/** Opcode 0x0f 0xd9. */
7196FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
7197/** Opcode 0x0f 0xda. */
7198FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
7199/** Opcode 0x0f 0xdb. */
7200FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
7201/** Opcode 0x0f 0xdc. */
7202FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
7203/** Opcode 0x0f 0xdd. */
7204FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
7205/** Opcode 0x0f 0xde. */
7206FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
7207/** Opcode 0x0f 0xdf. */
7208FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
7209/** Opcode 0x0f 0xe0. */
7210FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
7211/** Opcode 0x0f 0xe1. */
7212FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
7213/** Opcode 0x0f 0xe2. */
7214FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
7215/** Opcode 0x0f 0xe3. */
7216FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
7217/** Opcode 0x0f 0xe4. */
7218FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
7219/** Opcode 0x0f 0xe5. */
7220FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
7221/** Opcode 0x0f 0xe6. */
7222FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
7223
7224
7225/** Opcode 0x0f 0xe7. */
7226FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7227{
7228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7229 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7230 {
7231 /*
7232 * Register, memory.
7233 */
7234/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7235 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7236 {
7237
7238 case IEM_OP_PRF_SIZE_OP: /* SSE */
7239 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7240 IEM_MC_BEGIN(0, 2);
7241 IEM_MC_LOCAL(uint128_t, uSrc);
7242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7243
7244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7246 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7248
7249 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7250 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7251
7252 IEM_MC_ADVANCE_RIP();
7253 IEM_MC_END();
7254 break;
7255
7256 case 0: /* MMX */
7257 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7258 IEM_MC_BEGIN(0, 2);
7259 IEM_MC_LOCAL(uint64_t, uSrc);
7260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7261
7262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7264 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7265 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7266
7267 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7268 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7269
7270 IEM_MC_ADVANCE_RIP();
7271 IEM_MC_END();
7272 break;
7273
7274 default:
7275 return IEMOP_RAISE_INVALID_OPCODE();
7276 }
7277 }
7278 /* The register, register encoding is invalid. */
7279 else
7280 return IEMOP_RAISE_INVALID_OPCODE();
7281 return VINF_SUCCESS;
7282}
7283
7284
7285/** Opcode 0x0f 0xe8. */
7286FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
7287/** Opcode 0x0f 0xe9. */
7288FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
7289/** Opcode 0x0f 0xea. */
7290FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
7291/** Opcode 0x0f 0xeb. */
7292FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
7293/** Opcode 0x0f 0xec. */
7294FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
7295/** Opcode 0x0f 0xed. */
7296FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
7297/** Opcode 0x0f 0xee. */
7298FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
7299
7300
7301/** Opcode 0x0f 0xef. */
7302FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7303{
7304 IEMOP_MNEMONIC(pxor, "pxor");
7305 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7306}
7307
7308
7309/** Opcode 0x0f 0xf0. */
7310FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7311/** Opcode 0x0f 0xf1. */
7312FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7313/** Opcode 0x0f 0xf2. */
7314FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7315/** Opcode 0x0f 0xf3. */
7316FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7317/** Opcode 0x0f 0xf4. */
7318FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7319/** Opcode 0x0f 0xf5. */
7320FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7321/** Opcode 0x0f 0xf6. */
7322FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7323/** Opcode 0x0f 0xf7. */
7324FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7325/** Opcode 0x0f 0xf8. */
7326FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7327/** Opcode 0x0f 0xf9. */
7328FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7329/** Opcode 0x0f 0xfa. */
7330FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7331/** Opcode 0x0f 0xfb. */
7332FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7333/** Opcode 0x0f 0xfc. */
7334FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7335/** Opcode 0x0f 0xfd. */
7336FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7337/** Opcode 0x0f 0xfe. */
7338FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7339
7340
7341IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[256] =
7342{
7343 /* 0x00 */ iemOp_Grp6,
7344 /* 0x01 */ iemOp_Grp7,
7345 /* 0x02 */ iemOp_lar_Gv_Ew,
7346 /* 0x03 */ iemOp_lsl_Gv_Ew,
7347 /* 0x04 */ iemOp_Invalid,
7348 /* 0x05 */ iemOp_syscall,
7349 /* 0x06 */ iemOp_clts,
7350 /* 0x07 */ iemOp_sysret,
7351 /* 0x08 */ iemOp_invd,
7352 /* 0x09 */ iemOp_wbinvd,
7353 /* 0x0a */ iemOp_Invalid,
7354 /* 0x0b */ iemOp_ud2,
7355 /* 0x0c */ iemOp_Invalid,
7356 /* 0x0d */ iemOp_nop_Ev_GrpP,
7357 /* 0x0e */ iemOp_femms,
7358 /* 0x0f */ iemOp_3Dnow,
7359 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7360 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7361 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7362 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7363 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7364 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7365 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7366 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7367 /* 0x18 */ iemOp_prefetch_Grp16,
7368 /* 0x19 */ iemOp_nop_Ev,
7369 /* 0x1a */ iemOp_nop_Ev,
7370 /* 0x1b */ iemOp_nop_Ev,
7371 /* 0x1c */ iemOp_nop_Ev,
7372 /* 0x1d */ iemOp_nop_Ev,
7373 /* 0x1e */ iemOp_nop_Ev,
7374 /* 0x1f */ iemOp_nop_Ev,
7375 /* 0x20 */ iemOp_mov_Rd_Cd,
7376 /* 0x21 */ iemOp_mov_Rd_Dd,
7377 /* 0x22 */ iemOp_mov_Cd_Rd,
7378 /* 0x23 */ iemOp_mov_Dd_Rd,
7379 /* 0x24 */ iemOp_mov_Rd_Td,
7380 /* 0x25 */ iemOp_Invalid,
7381 /* 0x26 */ iemOp_mov_Td_Rd,
7382 /* 0x27 */ iemOp_Invalid,
7383 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7384 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7385 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7386 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7387 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7388 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7389 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7390 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7391 /* 0x30 */ iemOp_wrmsr,
7392 /* 0x31 */ iemOp_rdtsc,
7393 /* 0x32 */ iemOp_rdmsr,
7394 /* 0x33 */ iemOp_rdpmc,
7395 /* 0x34 */ iemOp_sysenter,
7396 /* 0x35 */ iemOp_sysexit,
7397 /* 0x36 */ iemOp_Invalid,
7398 /* 0x37 */ iemOp_getsec,
7399 /* 0x38 */ iemOp_3byte_Esc_A4,
7400 /* 0x39 */ iemOp_Invalid,
7401 /* 0x3a */ iemOp_3byte_Esc_A5,
7402 /* 0x3b */ iemOp_Invalid,
7403 /* 0x3c */ iemOp_Invalid,
7404 /* 0x3d */ iemOp_Invalid,
7405 /* 0x3e */ iemOp_Invalid,
7406 /* 0x3f */ iemOp_Invalid,
7407 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7408 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7409 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7410 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7411 /* 0x44 */ iemOp_cmove_Gv_Ev,
7412 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7413 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7414 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7415 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7416 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7417 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7418 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7419 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7420 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7421 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7422 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7423 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7424 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7425 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7426 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7427 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7428 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7429 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7430 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7431 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7432 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7433 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7434 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7435 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7436 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7437 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7438 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7439 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7440 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7441 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7442 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7443 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7444 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7445 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7446 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7447 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7448 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7449 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7450 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7451 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7452 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7453 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7454 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7455 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7456 /* 0x71 */ iemOp_Grp12,
7457 /* 0x72 */ iemOp_Grp13,
7458 /* 0x73 */ iemOp_Grp14,
7459 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7460 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7461 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7462 /* 0x77 */ iemOp_emms,
7463 /* 0x78 */ iemOp_vmread_AmdGrp17,
7464 /* 0x79 */ iemOp_vmwrite,
7465 /* 0x7a */ iemOp_Invalid,
7466 /* 0x7b */ iemOp_Invalid,
7467 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7468 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7469 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7470 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7471 /* 0x80 */ iemOp_jo_Jv,
7472 /* 0x81 */ iemOp_jno_Jv,
7473 /* 0x82 */ iemOp_jc_Jv,
7474 /* 0x83 */ iemOp_jnc_Jv,
7475 /* 0x84 */ iemOp_je_Jv,
7476 /* 0x85 */ iemOp_jne_Jv,
7477 /* 0x86 */ iemOp_jbe_Jv,
7478 /* 0x87 */ iemOp_jnbe_Jv,
7479 /* 0x88 */ iemOp_js_Jv,
7480 /* 0x89 */ iemOp_jns_Jv,
7481 /* 0x8a */ iemOp_jp_Jv,
7482 /* 0x8b */ iemOp_jnp_Jv,
7483 /* 0x8c */ iemOp_jl_Jv,
7484 /* 0x8d */ iemOp_jnl_Jv,
7485 /* 0x8e */ iemOp_jle_Jv,
7486 /* 0x8f */ iemOp_jnle_Jv,
7487 /* 0x90 */ iemOp_seto_Eb,
7488 /* 0x91 */ iemOp_setno_Eb,
7489 /* 0x92 */ iemOp_setc_Eb,
7490 /* 0x93 */ iemOp_setnc_Eb,
7491 /* 0x94 */ iemOp_sete_Eb,
7492 /* 0x95 */ iemOp_setne_Eb,
7493 /* 0x96 */ iemOp_setbe_Eb,
7494 /* 0x97 */ iemOp_setnbe_Eb,
7495 /* 0x98 */ iemOp_sets_Eb,
7496 /* 0x99 */ iemOp_setns_Eb,
7497 /* 0x9a */ iemOp_setp_Eb,
7498 /* 0x9b */ iemOp_setnp_Eb,
7499 /* 0x9c */ iemOp_setl_Eb,
7500 /* 0x9d */ iemOp_setnl_Eb,
7501 /* 0x9e */ iemOp_setle_Eb,
7502 /* 0x9f */ iemOp_setnle_Eb,
7503 /* 0xa0 */ iemOp_push_fs,
7504 /* 0xa1 */ iemOp_pop_fs,
7505 /* 0xa2 */ iemOp_cpuid,
7506 /* 0xa3 */ iemOp_bt_Ev_Gv,
7507 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7508 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7509 /* 0xa6 */ iemOp_Invalid,
7510 /* 0xa7 */ iemOp_Invalid,
7511 /* 0xa8 */ iemOp_push_gs,
7512 /* 0xa9 */ iemOp_pop_gs,
7513 /* 0xaa */ iemOp_rsm,
7514 /* 0xab */ iemOp_bts_Ev_Gv,
7515 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7516 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7517 /* 0xae */ iemOp_Grp15,
7518 /* 0xaf */ iemOp_imul_Gv_Ev,
7519 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7520 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7521 /* 0xb2 */ iemOp_lss_Gv_Mp,
7522 /* 0xb3 */ iemOp_btr_Ev_Gv,
7523 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7524 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7525 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7526 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7527 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7528 /* 0xb9 */ iemOp_Grp10,
7529 /* 0xba */ iemOp_Grp8,
7530 /* 0xbb */ iemOp_btc_Ev_Gv,
7531 /* 0xbc */ iemOp_bsf_Gv_Ev,
7532 /* 0xbd */ iemOp_bsr_Gv_Ev,
7533 /* 0xbe */ iemOp_movsx_Gv_Eb,
7534 /* 0xbf */ iemOp_movsx_Gv_Ew,
7535 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7536 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7537 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7538 /* 0xc3 */ iemOp_movnti_My_Gy,
7539 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7540 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7541 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7542 /* 0xc7 */ iemOp_Grp9,
7543 /* 0xc8 */ iemOp_bswap_rAX_r8,
7544 /* 0xc9 */ iemOp_bswap_rCX_r9,
7545 /* 0xca */ iemOp_bswap_rDX_r10,
7546 /* 0xcb */ iemOp_bswap_rBX_r11,
7547 /* 0xcc */ iemOp_bswap_rSP_r12,
7548 /* 0xcd */ iemOp_bswap_rBP_r13,
7549 /* 0xce */ iemOp_bswap_rSI_r14,
7550 /* 0xcf */ iemOp_bswap_rDI_r15,
7551 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7552 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7553 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7554 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7555 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7556 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7557 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7558 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7559 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7560 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7561 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7562 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7563 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7564 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7565 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7566 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7567 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7568 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7569 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7570 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7571 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7572 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7573 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7574 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7575 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7576 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7577 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7578 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7579 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7580 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7581 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7582 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7583 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7584 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7585 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7586 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7587 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7588 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7589 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7590 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7591 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7592 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7593 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7594 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7595 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7596 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7597 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7598 /* 0xff */ iemOp_Invalid
7599};
7600
7601/** @} */
7602
7603
7604/** @name One byte opcodes.
7605 *
7606 * @{
7607 */
7608
7609/** Opcode 0x00. */
7610FNIEMOP_DEF(iemOp_add_Eb_Gb)
7611{
7612 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
7613 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7614}
7615
7616
7617/** Opcode 0x01. */
7618FNIEMOP_DEF(iemOp_add_Ev_Gv)
7619{
7620 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
7621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7622}
7623
7624
7625/** Opcode 0x02. */
7626FNIEMOP_DEF(iemOp_add_Gb_Eb)
7627{
7628 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
7629 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7630}
7631
7632
7633/** Opcode 0x03. */
7634FNIEMOP_DEF(iemOp_add_Gv_Ev)
7635{
7636 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
7637 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7638}
7639
7640
7641/** Opcode 0x04. */
7642FNIEMOP_DEF(iemOp_add_Al_Ib)
7643{
7644 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
7645 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7646}
7647
7648
7649/** Opcode 0x05. */
7650FNIEMOP_DEF(iemOp_add_eAX_Iz)
7651{
7652 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
7653 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7654}
7655
7656
7657/** Opcode 0x06. */
7658FNIEMOP_DEF(iemOp_push_ES)
7659{
7660 IEMOP_MNEMONIC(push_es, "push es");
7661 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7662}
7663
7664
7665/** Opcode 0x07. */
7666FNIEMOP_DEF(iemOp_pop_ES)
7667{
7668 IEMOP_MNEMONIC(pop_es, "pop es");
7669 IEMOP_HLP_NO_64BIT();
7670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7671 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
7672}
7673
7674
7675/** Opcode 0x08. */
7676FNIEMOP_DEF(iemOp_or_Eb_Gb)
7677{
7678 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
7679 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7680 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7681}
7682
7683
7684/** Opcode 0x09. */
7685FNIEMOP_DEF(iemOp_or_Ev_Gv)
7686{
7687 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
7688 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7690}
7691
7692
7693/** Opcode 0x0a. */
7694FNIEMOP_DEF(iemOp_or_Gb_Eb)
7695{
7696 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
7697 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7698 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7699}
7700
7701
7702/** Opcode 0x0b. */
7703FNIEMOP_DEF(iemOp_or_Gv_Ev)
7704{
7705 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
7706 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7707 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7708}
7709
7710
7711/** Opcode 0x0c. */
7712FNIEMOP_DEF(iemOp_or_Al_Ib)
7713{
7714 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
7715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7717}
7718
7719
7720/** Opcode 0x0d. */
7721FNIEMOP_DEF(iemOp_or_eAX_Iz)
7722{
7723 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
7724 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7726}
7727
7728
7729/** Opcode 0x0e. */
7730FNIEMOP_DEF(iemOp_push_CS)
7731{
7732 IEMOP_MNEMONIC(push_cs, "push cs");
7733 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7734}
7735
7736
7737/** Opcode 0x0f. */
7738FNIEMOP_DEF(iemOp_2byteEscape)
7739{
7740 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7741 /** @todo PUSH CS on 8086, undefined on 80186. */
7742 IEMOP_HLP_MIN_286();
7743 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7744}
7745
7746/** Opcode 0x10. */
7747FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7748{
7749 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
7750 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7751}
7752
7753
7754/** Opcode 0x11. */
7755FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7756{
7757 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
7758 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7759}
7760
7761
7762/** Opcode 0x12. */
7763FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7764{
7765 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
7766 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7767}
7768
7769
7770/** Opcode 0x13. */
7771FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7772{
7773 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
7774 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7775}
7776
7777
7778/** Opcode 0x14. */
7779FNIEMOP_DEF(iemOp_adc_Al_Ib)
7780{
7781 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
7782 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7783}
7784
7785
7786/** Opcode 0x15. */
7787FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7788{
7789 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
7790 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7791}
7792
7793
7794/** Opcode 0x16. */
7795FNIEMOP_DEF(iemOp_push_SS)
7796{
7797 IEMOP_MNEMONIC(push_ss, "push ss");
7798 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7799}
7800
7801
7802/** Opcode 0x17. */
7803FNIEMOP_DEF(iemOp_pop_SS)
7804{
7805 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
7806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7807 IEMOP_HLP_NO_64BIT();
7808 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
7809}
7810
7811
7812/** Opcode 0x18. */
7813FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7814{
7815 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
7816 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7817}
7818
7819
7820/** Opcode 0x19. */
7821FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7822{
7823 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
7824 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7825}
7826
7827
7828/** Opcode 0x1a. */
7829FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7830{
7831 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
7832 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7833}
7834
7835
7836/** Opcode 0x1b. */
7837FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7838{
7839 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
7840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7841}
7842
7843
7844/** Opcode 0x1c. */
7845FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7846{
7847 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
7848 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7849}
7850
7851
7852/** Opcode 0x1d. */
7853FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7854{
7855 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
7856 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7857}
7858
7859
7860/** Opcode 0x1e. */
7861FNIEMOP_DEF(iemOp_push_DS)
7862{
7863 IEMOP_MNEMONIC(push_ds, "push ds");
7864 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7865}
7866
7867
7868/** Opcode 0x1f. */
7869FNIEMOP_DEF(iemOp_pop_DS)
7870{
7871 IEMOP_MNEMONIC(pop_ds, "pop ds");
7872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7873 IEMOP_HLP_NO_64BIT();
7874 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
7875}
7876
7877
7878/** Opcode 0x20. */
7879FNIEMOP_DEF(iemOp_and_Eb_Gb)
7880{
7881 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
7882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7883 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7884}
7885
7886
7887/** Opcode 0x21. */
7888FNIEMOP_DEF(iemOp_and_Ev_Gv)
7889{
7890 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
7891 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7892 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7893}
7894
7895
7896/** Opcode 0x22. */
7897FNIEMOP_DEF(iemOp_and_Gb_Eb)
7898{
7899 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
7900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7901 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7902}
7903
7904
7905/** Opcode 0x23. */
7906FNIEMOP_DEF(iemOp_and_Gv_Ev)
7907{
7908 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
7909 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7910 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7911}
7912
7913
7914/** Opcode 0x24. */
7915FNIEMOP_DEF(iemOp_and_Al_Ib)
7916{
7917 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
7918 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7919 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7920}
7921
7922
7923/** Opcode 0x25. */
7924FNIEMOP_DEF(iemOp_and_eAX_Iz)
7925{
7926 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
7927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7928 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7929}
7930
7931
7932/** Opcode 0x26. */
7933FNIEMOP_DEF(iemOp_seg_ES)
7934{
7935 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7936 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
7937 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
7938
7939 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7940 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7941}
7942
7943
7944/** Opcode 0x27. */
7945FNIEMOP_DEF(iemOp_daa)
7946{
7947 IEMOP_MNEMONIC(daa_AL, "daa AL");
7948 IEMOP_HLP_NO_64BIT();
7949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7951 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7952}
7953
7954
7955/** Opcode 0x28. */
7956FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7957{
7958 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
7959 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7960}
7961
7962
7963/** Opcode 0x29. */
7964FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7965{
7966 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
7967 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7968}
7969
7970
7971/** Opcode 0x2a. */
7972FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7973{
7974 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
7975 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7976}
7977
7978
7979/** Opcode 0x2b. */
7980FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7981{
7982 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
7983 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7984}
7985
7986
7987/** Opcode 0x2c. */
7988FNIEMOP_DEF(iemOp_sub_Al_Ib)
7989{
7990 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
7991 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7992}
7993
7994
7995/** Opcode 0x2d. */
7996FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7997{
7998 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
7999 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8000}
8001
8002
8003/** Opcode 0x2e. */
8004FNIEMOP_DEF(iemOp_seg_CS)
8005{
8006 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8007 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8008 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8009
8010 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8011 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8012}
8013
8014
8015/** Opcode 0x2f. */
8016FNIEMOP_DEF(iemOp_das)
8017{
8018 IEMOP_MNEMONIC(das_AL, "das AL");
8019 IEMOP_HLP_NO_64BIT();
8020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8021 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8022 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8023}
8024
8025
8026/** Opcode 0x30. */
8027FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8028{
8029 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8030 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8031 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8032}
8033
8034
8035/** Opcode 0x31. */
8036FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8037{
8038 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8039 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8040 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8041}
8042
8043
8044/** Opcode 0x32. */
8045FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8046{
8047 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8049 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8050}
8051
8052
8053/** Opcode 0x33. */
8054FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8055{
8056 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8057 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8058 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8059}
8060
8061
8062/** Opcode 0x34. */
8063FNIEMOP_DEF(iemOp_xor_Al_Ib)
8064{
8065 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8067 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8068}
8069
8070
8071/** Opcode 0x35. */
8072FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8073{
8074 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8075 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8076 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8077}
8078
8079
8080/** Opcode 0x36. */
8081FNIEMOP_DEF(iemOp_seg_SS)
8082{
8083 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8084 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8085 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8086
8087 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8088 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8089}
8090
8091
8092/** Opcode 0x37. */
8093FNIEMOP_STUB(iemOp_aaa);
8094
8095
8096/** Opcode 0x38. */
8097FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8098{
8099 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8100 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8101}
8102
8103
8104/** Opcode 0x39. */
8105FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8106{
8107 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8108 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8109}
8110
8111
8112/** Opcode 0x3a. */
8113FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8114{
8115 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8116 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8117}
8118
8119
8120/** Opcode 0x3b. */
8121FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8122{
8123 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8124 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8125}
8126
8127
8128/** Opcode 0x3c. */
8129FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8130{
8131 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8132 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8133}
8134
8135
8136/** Opcode 0x3d. */
8137FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8138{
8139 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8140 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8141}
8142
8143
8144/** Opcode 0x3e. */
8145FNIEMOP_DEF(iemOp_seg_DS)
8146{
8147 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8148 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8149 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8150
8151 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8152 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8153}
8154
8155
8156/** Opcode 0x3f. */
8157FNIEMOP_STUB(iemOp_aas);
8158
8159/**
8160 * Common 'inc/dec/not/neg register' helper.
8161 */
8162FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8163{
8164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8165 switch (pVCpu->iem.s.enmEffOpSize)
8166 {
8167 case IEMMODE_16BIT:
8168 IEM_MC_BEGIN(2, 0);
8169 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8170 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8171 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8172 IEM_MC_REF_EFLAGS(pEFlags);
8173 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8174 IEM_MC_ADVANCE_RIP();
8175 IEM_MC_END();
8176 return VINF_SUCCESS;
8177
8178 case IEMMODE_32BIT:
8179 IEM_MC_BEGIN(2, 0);
8180 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8181 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8182 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8183 IEM_MC_REF_EFLAGS(pEFlags);
8184 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8185 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8186 IEM_MC_ADVANCE_RIP();
8187 IEM_MC_END();
8188 return VINF_SUCCESS;
8189
8190 case IEMMODE_64BIT:
8191 IEM_MC_BEGIN(2, 0);
8192 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8193 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8194 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8195 IEM_MC_REF_EFLAGS(pEFlags);
8196 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8197 IEM_MC_ADVANCE_RIP();
8198 IEM_MC_END();
8199 return VINF_SUCCESS;
8200 }
8201 return VINF_SUCCESS;
8202}
8203
8204
8205/** Opcode 0x40. */
8206FNIEMOP_DEF(iemOp_inc_eAX)
8207{
8208 /*
8209 * This is a REX prefix in 64-bit mode.
8210 */
8211 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8212 {
8213 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8214 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8215
8216 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8217 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8218 }
8219
8220 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8221 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8222}
8223
8224
8225/** Opcode 0x41. */
8226FNIEMOP_DEF(iemOp_inc_eCX)
8227{
8228 /*
8229 * This is a REX prefix in 64-bit mode.
8230 */
8231 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8232 {
8233 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8234 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8235 pVCpu->iem.s.uRexB = 1 << 3;
8236
8237 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8238 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8239 }
8240
8241 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8242 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8243}
8244
8245
8246/** Opcode 0x42. */
8247FNIEMOP_DEF(iemOp_inc_eDX)
8248{
8249 /*
8250 * This is a REX prefix in 64-bit mode.
8251 */
8252 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8253 {
8254 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8255 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8256 pVCpu->iem.s.uRexIndex = 1 << 3;
8257
8258 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8259 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8260 }
8261
8262 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8263 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8264}
8265
8266
8267
8268/** Opcode 0x43. */
8269FNIEMOP_DEF(iemOp_inc_eBX)
8270{
8271 /*
8272 * This is a REX prefix in 64-bit mode.
8273 */
8274 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8275 {
8276 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8277 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8278 pVCpu->iem.s.uRexB = 1 << 3;
8279 pVCpu->iem.s.uRexIndex = 1 << 3;
8280
8281 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8282 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8283 }
8284
8285 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8286 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8287}
8288
8289
8290/** Opcode 0x44. */
8291FNIEMOP_DEF(iemOp_inc_eSP)
8292{
8293 /*
8294 * This is a REX prefix in 64-bit mode.
8295 */
8296 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8297 {
8298 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8299 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8300 pVCpu->iem.s.uRexReg = 1 << 3;
8301
8302 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8303 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8304 }
8305
8306 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8307 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8308}
8309
8310
8311/** Opcode 0x45. */
8312FNIEMOP_DEF(iemOp_inc_eBP)
8313{
8314 /*
8315 * This is a REX prefix in 64-bit mode.
8316 */
8317 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8318 {
8319 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8320 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8321 pVCpu->iem.s.uRexReg = 1 << 3;
8322 pVCpu->iem.s.uRexB = 1 << 3;
8323
8324 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8325 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8326 }
8327
8328 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8329 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8330}
8331
8332
8333/** Opcode 0x46. */
8334FNIEMOP_DEF(iemOp_inc_eSI)
8335{
8336 /*
8337 * This is a REX prefix in 64-bit mode.
8338 */
8339 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8340 {
8341 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8342 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8343 pVCpu->iem.s.uRexReg = 1 << 3;
8344 pVCpu->iem.s.uRexIndex = 1 << 3;
8345
8346 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8347 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8348 }
8349
8350 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8351 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8352}
8353
8354
8355/** Opcode 0x47. */
8356FNIEMOP_DEF(iemOp_inc_eDI)
8357{
8358 /*
8359 * This is a REX prefix in 64-bit mode.
8360 */
8361 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8362 {
8363 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8364 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8365 pVCpu->iem.s.uRexReg = 1 << 3;
8366 pVCpu->iem.s.uRexB = 1 << 3;
8367 pVCpu->iem.s.uRexIndex = 1 << 3;
8368
8369 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8370 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8371 }
8372
8373 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
8374 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8375}
8376
8377
8378/** Opcode 0x48. */
8379FNIEMOP_DEF(iemOp_dec_eAX)
8380{
8381 /*
8382 * This is a REX prefix in 64-bit mode.
8383 */
8384 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8385 {
8386 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8387 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8388 iemRecalEffOpSize(pVCpu);
8389
8390 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8391 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8392 }
8393
8394 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
8395 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8396}
8397
8398
8399/** Opcode 0x49. */
8400FNIEMOP_DEF(iemOp_dec_eCX)
8401{
8402 /*
8403 * This is a REX prefix in 64-bit mode.
8404 */
8405 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8406 {
8407 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8408 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8409 pVCpu->iem.s.uRexB = 1 << 3;
8410 iemRecalEffOpSize(pVCpu);
8411
8412 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8413 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8414 }
8415
8416 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
8417 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8418}
8419
8420
8421/** Opcode 0x4a. */
8422FNIEMOP_DEF(iemOp_dec_eDX)
8423{
8424 /*
8425 * This is a REX prefix in 64-bit mode.
8426 */
8427 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8428 {
8429 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8430 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8431 pVCpu->iem.s.uRexIndex = 1 << 3;
8432 iemRecalEffOpSize(pVCpu);
8433
8434 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8435 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8436 }
8437
8438 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
8439 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8440}
8441
8442
8443/** Opcode 0x4b. */
8444FNIEMOP_DEF(iemOp_dec_eBX)
8445{
8446 /*
8447 * This is a REX prefix in 64-bit mode.
8448 */
8449 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8450 {
8451 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8452 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8453 pVCpu->iem.s.uRexB = 1 << 3;
8454 pVCpu->iem.s.uRexIndex = 1 << 3;
8455 iemRecalEffOpSize(pVCpu);
8456
8457 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8458 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8459 }
8460
8461 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
8462 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8463}
8464
8465
8466/** Opcode 0x4c. */
8467FNIEMOP_DEF(iemOp_dec_eSP)
8468{
8469 /*
8470 * This is a REX prefix in 64-bit mode.
8471 */
8472 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8473 {
8474 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8475 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8476 pVCpu->iem.s.uRexReg = 1 << 3;
8477 iemRecalEffOpSize(pVCpu);
8478
8479 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8480 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8481 }
8482
8483 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
8484 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8485}
8486
8487
8488/** Opcode 0x4d. */
8489FNIEMOP_DEF(iemOp_dec_eBP)
8490{
8491 /*
8492 * This is a REX prefix in 64-bit mode.
8493 */
8494 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8495 {
8496 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8497 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8498 pVCpu->iem.s.uRexReg = 1 << 3;
8499 pVCpu->iem.s.uRexB = 1 << 3;
8500 iemRecalEffOpSize(pVCpu);
8501
8502 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8503 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8504 }
8505
8506 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
8507 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8508}
8509
8510
8511/** Opcode 0x4e. */
8512FNIEMOP_DEF(iemOp_dec_eSI)
8513{
8514 /*
8515 * This is a REX prefix in 64-bit mode.
8516 */
8517 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8518 {
8519 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8520 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8521 pVCpu->iem.s.uRexReg = 1 << 3;
8522 pVCpu->iem.s.uRexIndex = 1 << 3;
8523 iemRecalEffOpSize(pVCpu);
8524
8525 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8526 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8527 }
8528
8529 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
8530 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8531}
8532
8533
8534/** Opcode 0x4f. */
8535FNIEMOP_DEF(iemOp_dec_eDI)
8536{
8537 /*
8538 * This is a REX prefix in 64-bit mode.
8539 */
8540 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8541 {
8542 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8543 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8544 pVCpu->iem.s.uRexReg = 1 << 3;
8545 pVCpu->iem.s.uRexB = 1 << 3;
8546 pVCpu->iem.s.uRexIndex = 1 << 3;
8547 iemRecalEffOpSize(pVCpu);
8548
8549 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8550 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8551 }
8552
8553 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
8554 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8555}
8556
8557
8558/**
8559 * Common 'push register' helper.
8560 */
8561FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8562{
8563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8564 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8565 {
8566 iReg |= pVCpu->iem.s.uRexB;
8567 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8568 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8569 }
8570
8571 switch (pVCpu->iem.s.enmEffOpSize)
8572 {
8573 case IEMMODE_16BIT:
8574 IEM_MC_BEGIN(0, 1);
8575 IEM_MC_LOCAL(uint16_t, u16Value);
8576 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8577 IEM_MC_PUSH_U16(u16Value);
8578 IEM_MC_ADVANCE_RIP();
8579 IEM_MC_END();
8580 break;
8581
8582 case IEMMODE_32BIT:
8583 IEM_MC_BEGIN(0, 1);
8584 IEM_MC_LOCAL(uint32_t, u32Value);
8585 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8586 IEM_MC_PUSH_U32(u32Value);
8587 IEM_MC_ADVANCE_RIP();
8588 IEM_MC_END();
8589 break;
8590
8591 case IEMMODE_64BIT:
8592 IEM_MC_BEGIN(0, 1);
8593 IEM_MC_LOCAL(uint64_t, u64Value);
8594 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8595 IEM_MC_PUSH_U64(u64Value);
8596 IEM_MC_ADVANCE_RIP();
8597 IEM_MC_END();
8598 break;
8599 }
8600
8601 return VINF_SUCCESS;
8602}
8603
8604
8605/** Opcode 0x50. */
8606FNIEMOP_DEF(iemOp_push_eAX)
8607{
8608 IEMOP_MNEMONIC(push_rAX, "push rAX");
8609 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8610}
8611
8612
8613/** Opcode 0x51. */
8614FNIEMOP_DEF(iemOp_push_eCX)
8615{
8616 IEMOP_MNEMONIC(push_rCX, "push rCX");
8617 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8618}
8619
8620
8621/** Opcode 0x52. */
8622FNIEMOP_DEF(iemOp_push_eDX)
8623{
8624 IEMOP_MNEMONIC(push_rDX, "push rDX");
8625 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8626}
8627
8628
8629/** Opcode 0x53. */
8630FNIEMOP_DEF(iemOp_push_eBX)
8631{
8632 IEMOP_MNEMONIC(push_rBX, "push rBX");
8633 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8634}
8635
8636
8637/** Opcode 0x54. */
8638FNIEMOP_DEF(iemOp_push_eSP)
8639{
8640 IEMOP_MNEMONIC(push_rSP, "push rSP");
8641 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
8642 {
8643 IEM_MC_BEGIN(0, 1);
8644 IEM_MC_LOCAL(uint16_t, u16Value);
8645 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8646 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8647 IEM_MC_PUSH_U16(u16Value);
8648 IEM_MC_ADVANCE_RIP();
8649 IEM_MC_END();
8650 }
8651 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8652}
8653
8654
8655/** Opcode 0x55. */
8656FNIEMOP_DEF(iemOp_push_eBP)
8657{
8658 IEMOP_MNEMONIC(push_rBP, "push rBP");
8659 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8660}
8661
8662
8663/** Opcode 0x56. */
8664FNIEMOP_DEF(iemOp_push_eSI)
8665{
8666 IEMOP_MNEMONIC(push_rSI, "push rSI");
8667 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8668}
8669
8670
8671/** Opcode 0x57. */
8672FNIEMOP_DEF(iemOp_push_eDI)
8673{
8674 IEMOP_MNEMONIC(push_rDI, "push rDI");
8675 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8676}
8677
8678
8679/**
8680 * Common 'pop register' helper.
8681 */
8682FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8683{
8684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8685 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8686 {
8687 iReg |= pVCpu->iem.s.uRexB;
8688 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8689 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8690 }
8691
8692 switch (pVCpu->iem.s.enmEffOpSize)
8693 {
8694 case IEMMODE_16BIT:
8695 IEM_MC_BEGIN(0, 1);
8696 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8697 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8698 IEM_MC_POP_U16(pu16Dst);
8699 IEM_MC_ADVANCE_RIP();
8700 IEM_MC_END();
8701 break;
8702
8703 case IEMMODE_32BIT:
8704 IEM_MC_BEGIN(0, 1);
8705 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8706 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8707 IEM_MC_POP_U32(pu32Dst);
8708 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8709 IEM_MC_ADVANCE_RIP();
8710 IEM_MC_END();
8711 break;
8712
8713 case IEMMODE_64BIT:
8714 IEM_MC_BEGIN(0, 1);
8715 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8716 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8717 IEM_MC_POP_U64(pu64Dst);
8718 IEM_MC_ADVANCE_RIP();
8719 IEM_MC_END();
8720 break;
8721 }
8722
8723 return VINF_SUCCESS;
8724}
8725
8726
8727/** Opcode 0x58. */
8728FNIEMOP_DEF(iemOp_pop_eAX)
8729{
8730 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
8731 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8732}
8733
8734
8735/** Opcode 0x59. */
8736FNIEMOP_DEF(iemOp_pop_eCX)
8737{
8738 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
8739 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8740}
8741
8742
8743/** Opcode 0x5a. */
8744FNIEMOP_DEF(iemOp_pop_eDX)
8745{
8746 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
8747 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8748}
8749
8750
8751/** Opcode 0x5b. */
8752FNIEMOP_DEF(iemOp_pop_eBX)
8753{
8754 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
8755 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8756}
8757
8758
8759/** Opcode 0x5c. */
8760FNIEMOP_DEF(iemOp_pop_eSP)
8761{
8762 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
8763 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8764 {
8765 if (pVCpu->iem.s.uRexB)
8766 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8767 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8768 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8769 }
8770
8771 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8772 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8773 /** @todo add testcase for this instruction. */
8774 switch (pVCpu->iem.s.enmEffOpSize)
8775 {
8776 case IEMMODE_16BIT:
8777 IEM_MC_BEGIN(0, 1);
8778 IEM_MC_LOCAL(uint16_t, u16Dst);
8779 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8780 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8781 IEM_MC_ADVANCE_RIP();
8782 IEM_MC_END();
8783 break;
8784
8785 case IEMMODE_32BIT:
8786 IEM_MC_BEGIN(0, 1);
8787 IEM_MC_LOCAL(uint32_t, u32Dst);
8788 IEM_MC_POP_U32(&u32Dst);
8789 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8790 IEM_MC_ADVANCE_RIP();
8791 IEM_MC_END();
8792 break;
8793
8794 case IEMMODE_64BIT:
8795 IEM_MC_BEGIN(0, 1);
8796 IEM_MC_LOCAL(uint64_t, u64Dst);
8797 IEM_MC_POP_U64(&u64Dst);
8798 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8799 IEM_MC_ADVANCE_RIP();
8800 IEM_MC_END();
8801 break;
8802 }
8803
8804 return VINF_SUCCESS;
8805}
8806
8807
8808/** Opcode 0x5d. */
8809FNIEMOP_DEF(iemOp_pop_eBP)
8810{
8811 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
8812 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8813}
8814
8815
8816/** Opcode 0x5e. */
8817FNIEMOP_DEF(iemOp_pop_eSI)
8818{
8819 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
8820 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8821}
8822
8823
8824/** Opcode 0x5f. */
8825FNIEMOP_DEF(iemOp_pop_eDI)
8826{
8827 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
8828 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8829}
8830
8831
8832/** Opcode 0x60. */
8833FNIEMOP_DEF(iemOp_pusha)
8834{
8835 IEMOP_MNEMONIC(pusha, "pusha");
8836 IEMOP_HLP_MIN_186();
8837 IEMOP_HLP_NO_64BIT();
8838 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8839 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8840 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8841 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8842}
8843
8844
8845/** Opcode 0x61. */
8846FNIEMOP_DEF(iemOp_popa)
8847{
8848 IEMOP_MNEMONIC(popa, "popa");
8849 IEMOP_HLP_MIN_186();
8850 IEMOP_HLP_NO_64BIT();
8851 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8852 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8853 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8854 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8855}
8856
8857
8858/** Opcode 0x62. */
8859FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8860// IEMOP_HLP_MIN_186();
8861
8862
8863/** Opcode 0x63 - non-64-bit modes. */
8864FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8865{
8866 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
8867 IEMOP_HLP_MIN_286();
8868 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8870
8871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8872 {
8873 /* Register */
8874 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8875 IEM_MC_BEGIN(3, 0);
8876 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8877 IEM_MC_ARG(uint16_t, u16Src, 1);
8878 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8879
8880 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8881 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8882 IEM_MC_REF_EFLAGS(pEFlags);
8883 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8884
8885 IEM_MC_ADVANCE_RIP();
8886 IEM_MC_END();
8887 }
8888 else
8889 {
8890 /* Memory */
8891 IEM_MC_BEGIN(3, 2);
8892 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8893 IEM_MC_ARG(uint16_t, u16Src, 1);
8894 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8896
8897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8898 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8899 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8900 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8901 IEM_MC_FETCH_EFLAGS(EFlags);
8902 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8903
8904 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8905 IEM_MC_COMMIT_EFLAGS(EFlags);
8906 IEM_MC_ADVANCE_RIP();
8907 IEM_MC_END();
8908 }
8909 return VINF_SUCCESS;
8910
8911}
8912
8913
8914/** Opcode 0x63.
8915 * @note This is a weird one. It works like a regular move instruction if
8916 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8917 * @todo This definitely needs a testcase to verify the odd cases. */
8918FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8919{
8920 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8921
8922 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
8923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8924
8925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8926 {
8927 /*
8928 * Register to register.
8929 */
8930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8931 IEM_MC_BEGIN(0, 1);
8932 IEM_MC_LOCAL(uint64_t, u64Value);
8933 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8934 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8935 IEM_MC_ADVANCE_RIP();
8936 IEM_MC_END();
8937 }
8938 else
8939 {
8940 /*
8941 * We're loading a register from memory.
8942 */
8943 IEM_MC_BEGIN(0, 2);
8944 IEM_MC_LOCAL(uint64_t, u64Value);
8945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8948 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8949 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8950 IEM_MC_ADVANCE_RIP();
8951 IEM_MC_END();
8952 }
8953 return VINF_SUCCESS;
8954}
8955
8956
8957/** Opcode 0x64. */
8958FNIEMOP_DEF(iemOp_seg_FS)
8959{
8960 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8961 IEMOP_HLP_MIN_386();
8962
8963 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
8964 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
8965
8966 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8967 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8968}
8969
8970
8971/** Opcode 0x65. */
8972FNIEMOP_DEF(iemOp_seg_GS)
8973{
8974 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8975 IEMOP_HLP_MIN_386();
8976
8977 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
8978 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
8979
8980 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8981 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8982}
8983
8984
8985/** Opcode 0x66. */
8986FNIEMOP_DEF(iemOp_op_size)
8987{
8988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8989 IEMOP_HLP_MIN_386();
8990
8991 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
8992 iemRecalEffOpSize(pVCpu);
8993
8994 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8995 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8996}
8997
8998
8999/** Opcode 0x67. */
9000FNIEMOP_DEF(iemOp_addr_size)
9001{
9002 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9003 IEMOP_HLP_MIN_386();
9004
9005 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9006 switch (pVCpu->iem.s.enmDefAddrMode)
9007 {
9008 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9009 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9010 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9011 default: AssertFailed();
9012 }
9013
9014 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9015 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9016}
9017
9018
9019/** Opcode 0x68. */
9020FNIEMOP_DEF(iemOp_push_Iz)
9021{
9022 IEMOP_MNEMONIC(push_Iz, "push Iz");
9023 IEMOP_HLP_MIN_186();
9024 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9025 switch (pVCpu->iem.s.enmEffOpSize)
9026 {
9027 case IEMMODE_16BIT:
9028 {
9029 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9031 IEM_MC_BEGIN(0,0);
9032 IEM_MC_PUSH_U16(u16Imm);
9033 IEM_MC_ADVANCE_RIP();
9034 IEM_MC_END();
9035 return VINF_SUCCESS;
9036 }
9037
9038 case IEMMODE_32BIT:
9039 {
9040 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9042 IEM_MC_BEGIN(0,0);
9043 IEM_MC_PUSH_U32(u32Imm);
9044 IEM_MC_ADVANCE_RIP();
9045 IEM_MC_END();
9046 return VINF_SUCCESS;
9047 }
9048
9049 case IEMMODE_64BIT:
9050 {
9051 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9053 IEM_MC_BEGIN(0,0);
9054 IEM_MC_PUSH_U64(u64Imm);
9055 IEM_MC_ADVANCE_RIP();
9056 IEM_MC_END();
9057 return VINF_SUCCESS;
9058 }
9059
9060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9061 }
9062}
9063
9064
9065/** Opcode 0x69. */
9066FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9067{
9068 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9069 IEMOP_HLP_MIN_186();
9070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9071 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9072
9073 switch (pVCpu->iem.s.enmEffOpSize)
9074 {
9075 case IEMMODE_16BIT:
9076 {
9077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9078 {
9079 /* register operand */
9080 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9082
9083 IEM_MC_BEGIN(3, 1);
9084 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9085 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9086 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9087 IEM_MC_LOCAL(uint16_t, u16Tmp);
9088
9089 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9090 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9091 IEM_MC_REF_EFLAGS(pEFlags);
9092 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9093 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9094
9095 IEM_MC_ADVANCE_RIP();
9096 IEM_MC_END();
9097 }
9098 else
9099 {
9100 /* memory operand */
9101 IEM_MC_BEGIN(3, 2);
9102 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9103 IEM_MC_ARG(uint16_t, u16Src, 1);
9104 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9105 IEM_MC_LOCAL(uint16_t, u16Tmp);
9106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9107
9108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9109 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9110 IEM_MC_ASSIGN(u16Src, u16Imm);
9111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9112 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9113 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9114 IEM_MC_REF_EFLAGS(pEFlags);
9115 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9116 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9117
9118 IEM_MC_ADVANCE_RIP();
9119 IEM_MC_END();
9120 }
9121 return VINF_SUCCESS;
9122 }
9123
9124 case IEMMODE_32BIT:
9125 {
9126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9127 {
9128 /* register operand */
9129 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9131
9132 IEM_MC_BEGIN(3, 1);
9133 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9134 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9135 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9136 IEM_MC_LOCAL(uint32_t, u32Tmp);
9137
9138 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9139 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9140 IEM_MC_REF_EFLAGS(pEFlags);
9141 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9142 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9143
9144 IEM_MC_ADVANCE_RIP();
9145 IEM_MC_END();
9146 }
9147 else
9148 {
9149 /* memory operand */
9150 IEM_MC_BEGIN(3, 2);
9151 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9152 IEM_MC_ARG(uint32_t, u32Src, 1);
9153 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9154 IEM_MC_LOCAL(uint32_t, u32Tmp);
9155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9156
9157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9158 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9159 IEM_MC_ASSIGN(u32Src, u32Imm);
9160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9161 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9162 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9163 IEM_MC_REF_EFLAGS(pEFlags);
9164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9165 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9166
9167 IEM_MC_ADVANCE_RIP();
9168 IEM_MC_END();
9169 }
9170 return VINF_SUCCESS;
9171 }
9172
9173 case IEMMODE_64BIT:
9174 {
9175 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9176 {
9177 /* register operand */
9178 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9180
9181 IEM_MC_BEGIN(3, 1);
9182 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9183 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9184 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9185 IEM_MC_LOCAL(uint64_t, u64Tmp);
9186
9187 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9188 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9189 IEM_MC_REF_EFLAGS(pEFlags);
9190 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9191 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9192
9193 IEM_MC_ADVANCE_RIP();
9194 IEM_MC_END();
9195 }
9196 else
9197 {
9198 /* memory operand */
9199 IEM_MC_BEGIN(3, 2);
9200 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9201 IEM_MC_ARG(uint64_t, u64Src, 1);
9202 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9203 IEM_MC_LOCAL(uint64_t, u64Tmp);
9204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9205
9206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9207 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9208 IEM_MC_ASSIGN(u64Src, u64Imm);
9209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9210 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9211 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9212 IEM_MC_REF_EFLAGS(pEFlags);
9213 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9214 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9215
9216 IEM_MC_ADVANCE_RIP();
9217 IEM_MC_END();
9218 }
9219 return VINF_SUCCESS;
9220 }
9221 }
9222 AssertFailedReturn(VERR_IEM_IPE_9);
9223}
9224
9225
9226/** Opcode 0x6a. */
9227FNIEMOP_DEF(iemOp_push_Ib)
9228{
9229 IEMOP_MNEMONIC(push_Ib, "push Ib");
9230 IEMOP_HLP_MIN_186();
9231 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9234
9235 IEM_MC_BEGIN(0,0);
9236 switch (pVCpu->iem.s.enmEffOpSize)
9237 {
9238 case IEMMODE_16BIT:
9239 IEM_MC_PUSH_U16(i8Imm);
9240 break;
9241 case IEMMODE_32BIT:
9242 IEM_MC_PUSH_U32(i8Imm);
9243 break;
9244 case IEMMODE_64BIT:
9245 IEM_MC_PUSH_U64(i8Imm);
9246 break;
9247 }
9248 IEM_MC_ADVANCE_RIP();
9249 IEM_MC_END();
9250 return VINF_SUCCESS;
9251}
9252
9253
9254/** Opcode 0x6b. */
9255FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9256{
9257 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9258 IEMOP_HLP_MIN_186();
9259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9261
9262 switch (pVCpu->iem.s.enmEffOpSize)
9263 {
9264 case IEMMODE_16BIT:
9265 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9266 {
9267 /* register operand */
9268 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9270
9271 IEM_MC_BEGIN(3, 1);
9272 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9273 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9274 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9275 IEM_MC_LOCAL(uint16_t, u16Tmp);
9276
9277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9278 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9279 IEM_MC_REF_EFLAGS(pEFlags);
9280 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9281 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9282
9283 IEM_MC_ADVANCE_RIP();
9284 IEM_MC_END();
9285 }
9286 else
9287 {
9288 /* memory operand */
9289 IEM_MC_BEGIN(3, 2);
9290 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9291 IEM_MC_ARG(uint16_t, u16Src, 1);
9292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9293 IEM_MC_LOCAL(uint16_t, u16Tmp);
9294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9295
9296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9297 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9298 IEM_MC_ASSIGN(u16Src, u16Imm);
9299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9300 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9301 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9302 IEM_MC_REF_EFLAGS(pEFlags);
9303 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9304 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9305
9306 IEM_MC_ADVANCE_RIP();
9307 IEM_MC_END();
9308 }
9309 return VINF_SUCCESS;
9310
9311 case IEMMODE_32BIT:
9312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9313 {
9314 /* register operand */
9315 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9317
9318 IEM_MC_BEGIN(3, 1);
9319 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9320 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9321 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9322 IEM_MC_LOCAL(uint32_t, u32Tmp);
9323
9324 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9325 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9326 IEM_MC_REF_EFLAGS(pEFlags);
9327 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9328 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9329
9330 IEM_MC_ADVANCE_RIP();
9331 IEM_MC_END();
9332 }
9333 else
9334 {
9335 /* memory operand */
9336 IEM_MC_BEGIN(3, 2);
9337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9338 IEM_MC_ARG(uint32_t, u32Src, 1);
9339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9340 IEM_MC_LOCAL(uint32_t, u32Tmp);
9341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9342
9343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9344 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9345 IEM_MC_ASSIGN(u32Src, u32Imm);
9346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9347 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9348 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9349 IEM_MC_REF_EFLAGS(pEFlags);
9350 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9351 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9352
9353 IEM_MC_ADVANCE_RIP();
9354 IEM_MC_END();
9355 }
9356 return VINF_SUCCESS;
9357
9358 case IEMMODE_64BIT:
9359 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9360 {
9361 /* register operand */
9362 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9364
9365 IEM_MC_BEGIN(3, 1);
9366 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9367 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9368 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9369 IEM_MC_LOCAL(uint64_t, u64Tmp);
9370
9371 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9372 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9373 IEM_MC_REF_EFLAGS(pEFlags);
9374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9375 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9376
9377 IEM_MC_ADVANCE_RIP();
9378 IEM_MC_END();
9379 }
9380 else
9381 {
9382 /* memory operand */
9383 IEM_MC_BEGIN(3, 2);
9384 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9385 IEM_MC_ARG(uint64_t, u64Src, 1);
9386 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9387 IEM_MC_LOCAL(uint64_t, u64Tmp);
9388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9389
9390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9391 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9392 IEM_MC_ASSIGN(u64Src, u64Imm);
9393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9394 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9395 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9396 IEM_MC_REF_EFLAGS(pEFlags);
9397 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9398 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9399
9400 IEM_MC_ADVANCE_RIP();
9401 IEM_MC_END();
9402 }
9403 return VINF_SUCCESS;
9404 }
9405 AssertFailedReturn(VERR_IEM_IPE_8);
9406}
9407
9408
9409/** Opcode 0x6c. */
9410FNIEMOP_DEF(iemOp_insb_Yb_DX)
9411{
9412 IEMOP_HLP_MIN_186();
9413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9414 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9415 {
9416 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
9417 switch (pVCpu->iem.s.enmEffAddrMode)
9418 {
9419 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9420 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9421 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9423 }
9424 }
9425 else
9426 {
9427 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
9428 switch (pVCpu->iem.s.enmEffAddrMode)
9429 {
9430 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9431 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9432 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9434 }
9435 }
9436}
9437
9438
9439/** Opcode 0x6d. */
9440FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9441{
9442 IEMOP_HLP_MIN_186();
9443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9444 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9445 {
9446 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
9447 switch (pVCpu->iem.s.enmEffOpSize)
9448 {
9449 case IEMMODE_16BIT:
9450 switch (pVCpu->iem.s.enmEffAddrMode)
9451 {
9452 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9453 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9454 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9456 }
9457 break;
9458 case IEMMODE_64BIT:
9459 case IEMMODE_32BIT:
9460 switch (pVCpu->iem.s.enmEffAddrMode)
9461 {
9462 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9463 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9464 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9466 }
9467 break;
9468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9469 }
9470 }
9471 else
9472 {
9473 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
9474 switch (pVCpu->iem.s.enmEffOpSize)
9475 {
9476 case IEMMODE_16BIT:
9477 switch (pVCpu->iem.s.enmEffAddrMode)
9478 {
9479 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9480 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9481 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9483 }
9484 break;
9485 case IEMMODE_64BIT:
9486 case IEMMODE_32BIT:
9487 switch (pVCpu->iem.s.enmEffAddrMode)
9488 {
9489 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9490 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9491 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9493 }
9494 break;
9495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9496 }
9497 }
9498}
9499
9500
9501/** Opcode 0x6e. */
9502FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9503{
9504 IEMOP_HLP_MIN_186();
9505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9506 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9507 {
9508 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
9509 switch (pVCpu->iem.s.enmEffAddrMode)
9510 {
9511 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9512 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9513 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9515 }
9516 }
9517 else
9518 {
9519 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
9520 switch (pVCpu->iem.s.enmEffAddrMode)
9521 {
9522 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9523 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9524 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9526 }
9527 }
9528}
9529
9530
9531/** Opcode 0x6f. */
9532FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9533{
9534 IEMOP_HLP_MIN_186();
9535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9536 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9537 {
9538 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
9539 switch (pVCpu->iem.s.enmEffOpSize)
9540 {
9541 case IEMMODE_16BIT:
9542 switch (pVCpu->iem.s.enmEffAddrMode)
9543 {
9544 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9545 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9546 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9548 }
9549 break;
9550 case IEMMODE_64BIT:
9551 case IEMMODE_32BIT:
9552 switch (pVCpu->iem.s.enmEffAddrMode)
9553 {
9554 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9555 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9556 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9558 }
9559 break;
9560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9561 }
9562 }
9563 else
9564 {
9565 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
9566 switch (pVCpu->iem.s.enmEffOpSize)
9567 {
9568 case IEMMODE_16BIT:
9569 switch (pVCpu->iem.s.enmEffAddrMode)
9570 {
9571 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9572 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9573 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9575 }
9576 break;
9577 case IEMMODE_64BIT:
9578 case IEMMODE_32BIT:
9579 switch (pVCpu->iem.s.enmEffAddrMode)
9580 {
9581 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9582 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9583 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9585 }
9586 break;
9587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9588 }
9589 }
9590}
9591
9592
9593/** Opcode 0x70. */
9594FNIEMOP_DEF(iemOp_jo_Jb)
9595{
9596 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
9597 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9599 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9600
9601 IEM_MC_BEGIN(0, 0);
9602 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9603 IEM_MC_REL_JMP_S8(i8Imm);
9604 } IEM_MC_ELSE() {
9605 IEM_MC_ADVANCE_RIP();
9606 } IEM_MC_ENDIF();
9607 IEM_MC_END();
9608 return VINF_SUCCESS;
9609}
9610
9611
9612/** Opcode 0x71. */
9613FNIEMOP_DEF(iemOp_jno_Jb)
9614{
9615 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
9616 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9618 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9619
9620 IEM_MC_BEGIN(0, 0);
9621 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9622 IEM_MC_ADVANCE_RIP();
9623 } IEM_MC_ELSE() {
9624 IEM_MC_REL_JMP_S8(i8Imm);
9625 } IEM_MC_ENDIF();
9626 IEM_MC_END();
9627 return VINF_SUCCESS;
9628}
9629
9630/** Opcode 0x72. */
9631FNIEMOP_DEF(iemOp_jc_Jb)
9632{
9633 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
9634 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9636 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9637
9638 IEM_MC_BEGIN(0, 0);
9639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9640 IEM_MC_REL_JMP_S8(i8Imm);
9641 } IEM_MC_ELSE() {
9642 IEM_MC_ADVANCE_RIP();
9643 } IEM_MC_ENDIF();
9644 IEM_MC_END();
9645 return VINF_SUCCESS;
9646}
9647
9648
9649/** Opcode 0x73. */
9650FNIEMOP_DEF(iemOp_jnc_Jb)
9651{
9652 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
9653 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9655 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9656
9657 IEM_MC_BEGIN(0, 0);
9658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9659 IEM_MC_ADVANCE_RIP();
9660 } IEM_MC_ELSE() {
9661 IEM_MC_REL_JMP_S8(i8Imm);
9662 } IEM_MC_ENDIF();
9663 IEM_MC_END();
9664 return VINF_SUCCESS;
9665}
9666
9667
9668/** Opcode 0x74. */
9669FNIEMOP_DEF(iemOp_je_Jb)
9670{
9671 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
9672 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9675
9676 IEM_MC_BEGIN(0, 0);
9677 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9678 IEM_MC_REL_JMP_S8(i8Imm);
9679 } IEM_MC_ELSE() {
9680 IEM_MC_ADVANCE_RIP();
9681 } IEM_MC_ENDIF();
9682 IEM_MC_END();
9683 return VINF_SUCCESS;
9684}
9685
9686
9687/** Opcode 0x75. */
9688FNIEMOP_DEF(iemOp_jne_Jb)
9689{
9690 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
9691 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9693 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9694
9695 IEM_MC_BEGIN(0, 0);
9696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9697 IEM_MC_ADVANCE_RIP();
9698 } IEM_MC_ELSE() {
9699 IEM_MC_REL_JMP_S8(i8Imm);
9700 } IEM_MC_ENDIF();
9701 IEM_MC_END();
9702 return VINF_SUCCESS;
9703}
9704
9705
9706/** Opcode 0x76. */
9707FNIEMOP_DEF(iemOp_jbe_Jb)
9708{
9709 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
9710 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9713
9714 IEM_MC_BEGIN(0, 0);
9715 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9716 IEM_MC_REL_JMP_S8(i8Imm);
9717 } IEM_MC_ELSE() {
9718 IEM_MC_ADVANCE_RIP();
9719 } IEM_MC_ENDIF();
9720 IEM_MC_END();
9721 return VINF_SUCCESS;
9722}
9723
9724
9725/** Opcode 0x77. */
9726FNIEMOP_DEF(iemOp_jnbe_Jb)
9727{
9728 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
9729 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9732
9733 IEM_MC_BEGIN(0, 0);
9734 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9735 IEM_MC_ADVANCE_RIP();
9736 } IEM_MC_ELSE() {
9737 IEM_MC_REL_JMP_S8(i8Imm);
9738 } IEM_MC_ENDIF();
9739 IEM_MC_END();
9740 return VINF_SUCCESS;
9741}
9742
9743
9744/** Opcode 0x78. */
9745FNIEMOP_DEF(iemOp_js_Jb)
9746{
9747 IEMOP_MNEMONIC(js_Jb, "js Jb");
9748 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9750 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9751
9752 IEM_MC_BEGIN(0, 0);
9753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9754 IEM_MC_REL_JMP_S8(i8Imm);
9755 } IEM_MC_ELSE() {
9756 IEM_MC_ADVANCE_RIP();
9757 } IEM_MC_ENDIF();
9758 IEM_MC_END();
9759 return VINF_SUCCESS;
9760}
9761
9762
9763/** Opcode 0x79. */
9764FNIEMOP_DEF(iemOp_jns_Jb)
9765{
9766 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
9767 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9769 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9770
9771 IEM_MC_BEGIN(0, 0);
9772 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9773 IEM_MC_ADVANCE_RIP();
9774 } IEM_MC_ELSE() {
9775 IEM_MC_REL_JMP_S8(i8Imm);
9776 } IEM_MC_ENDIF();
9777 IEM_MC_END();
9778 return VINF_SUCCESS;
9779}
9780
9781
9782/** Opcode 0x7a. */
9783FNIEMOP_DEF(iemOp_jp_Jb)
9784{
9785 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
9786 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9788 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9789
9790 IEM_MC_BEGIN(0, 0);
9791 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9792 IEM_MC_REL_JMP_S8(i8Imm);
9793 } IEM_MC_ELSE() {
9794 IEM_MC_ADVANCE_RIP();
9795 } IEM_MC_ENDIF();
9796 IEM_MC_END();
9797 return VINF_SUCCESS;
9798}
9799
9800
9801/** Opcode 0x7b. */
9802FNIEMOP_DEF(iemOp_jnp_Jb)
9803{
9804 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
9805 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9808
9809 IEM_MC_BEGIN(0, 0);
9810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9811 IEM_MC_ADVANCE_RIP();
9812 } IEM_MC_ELSE() {
9813 IEM_MC_REL_JMP_S8(i8Imm);
9814 } IEM_MC_ENDIF();
9815 IEM_MC_END();
9816 return VINF_SUCCESS;
9817}
9818
9819
9820/** Opcode 0x7c. */
9821FNIEMOP_DEF(iemOp_jl_Jb)
9822{
9823 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
9824 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9826 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9827
9828 IEM_MC_BEGIN(0, 0);
9829 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9830 IEM_MC_REL_JMP_S8(i8Imm);
9831 } IEM_MC_ELSE() {
9832 IEM_MC_ADVANCE_RIP();
9833 } IEM_MC_ENDIF();
9834 IEM_MC_END();
9835 return VINF_SUCCESS;
9836}
9837
9838
9839/** Opcode 0x7d. */
9840FNIEMOP_DEF(iemOp_jnl_Jb)
9841{
9842 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
9843 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9845 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9846
9847 IEM_MC_BEGIN(0, 0);
9848 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9849 IEM_MC_ADVANCE_RIP();
9850 } IEM_MC_ELSE() {
9851 IEM_MC_REL_JMP_S8(i8Imm);
9852 } IEM_MC_ENDIF();
9853 IEM_MC_END();
9854 return VINF_SUCCESS;
9855}
9856
9857
9858/** Opcode 0x7e. */
9859FNIEMOP_DEF(iemOp_jle_Jb)
9860{
9861 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
9862 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9864 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9865
9866 IEM_MC_BEGIN(0, 0);
9867 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9868 IEM_MC_REL_JMP_S8(i8Imm);
9869 } IEM_MC_ELSE() {
9870 IEM_MC_ADVANCE_RIP();
9871 } IEM_MC_ENDIF();
9872 IEM_MC_END();
9873 return VINF_SUCCESS;
9874}
9875
9876
9877/** Opcode 0x7f. */
9878FNIEMOP_DEF(iemOp_jnle_Jb)
9879{
9880 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
9881 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9883 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9884
9885 IEM_MC_BEGIN(0, 0);
9886 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9887 IEM_MC_ADVANCE_RIP();
9888 } IEM_MC_ELSE() {
9889 IEM_MC_REL_JMP_S8(i8Imm);
9890 } IEM_MC_ENDIF();
9891 IEM_MC_END();
9892 return VINF_SUCCESS;
9893}
9894
9895
9896/** Opcode 0x80. */
9897FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9898{
9899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9900 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9901 {
9902 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
9903 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
9904 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
9905 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
9906 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
9907 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
9908 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
9909 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
9910 }
9911 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9912
9913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9914 {
9915 /* register target */
9916 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9918 IEM_MC_BEGIN(3, 0);
9919 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9920 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9921 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9922
9923 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9924 IEM_MC_REF_EFLAGS(pEFlags);
9925 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9926
9927 IEM_MC_ADVANCE_RIP();
9928 IEM_MC_END();
9929 }
9930 else
9931 {
9932 /* memory target */
9933 uint32_t fAccess;
9934 if (pImpl->pfnLockedU8)
9935 fAccess = IEM_ACCESS_DATA_RW;
9936 else /* CMP */
9937 fAccess = IEM_ACCESS_DATA_R;
9938 IEM_MC_BEGIN(3, 2);
9939 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9940 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9942
9943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9944 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9945 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9946 if (pImpl->pfnLockedU8)
9947 IEMOP_HLP_DONE_DECODING();
9948 else
9949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9950
9951 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9952 IEM_MC_FETCH_EFLAGS(EFlags);
9953 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9954 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9955 else
9956 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9957
9958 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9959 IEM_MC_COMMIT_EFLAGS(EFlags);
9960 IEM_MC_ADVANCE_RIP();
9961 IEM_MC_END();
9962 }
9963 return VINF_SUCCESS;
9964}
9965
9966
9967/** Opcode 0x81. */
9968FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9969{
9970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9971 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9972 {
9973 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
9974 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
9975 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
9976 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
9977 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
9978 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
9979 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
9980 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
9981 }
9982 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9983
9984 switch (pVCpu->iem.s.enmEffOpSize)
9985 {
9986 case IEMMODE_16BIT:
9987 {
9988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9989 {
9990 /* register target */
9991 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9993 IEM_MC_BEGIN(3, 0);
9994 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9995 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9997
9998 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9999 IEM_MC_REF_EFLAGS(pEFlags);
10000 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10001
10002 IEM_MC_ADVANCE_RIP();
10003 IEM_MC_END();
10004 }
10005 else
10006 {
10007 /* memory target */
10008 uint32_t fAccess;
10009 if (pImpl->pfnLockedU16)
10010 fAccess = IEM_ACCESS_DATA_RW;
10011 else /* CMP, TEST */
10012 fAccess = IEM_ACCESS_DATA_R;
10013 IEM_MC_BEGIN(3, 2);
10014 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10015 IEM_MC_ARG(uint16_t, u16Src, 1);
10016 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10018
10019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10020 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10021 IEM_MC_ASSIGN(u16Src, u16Imm);
10022 if (pImpl->pfnLockedU16)
10023 IEMOP_HLP_DONE_DECODING();
10024 else
10025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10026 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10027 IEM_MC_FETCH_EFLAGS(EFlags);
10028 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10029 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10030 else
10031 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10032
10033 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10034 IEM_MC_COMMIT_EFLAGS(EFlags);
10035 IEM_MC_ADVANCE_RIP();
10036 IEM_MC_END();
10037 }
10038 break;
10039 }
10040
10041 case IEMMODE_32BIT:
10042 {
10043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10044 {
10045 /* register target */
10046 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10048 IEM_MC_BEGIN(3, 0);
10049 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10050 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10052
10053 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10054 IEM_MC_REF_EFLAGS(pEFlags);
10055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10056 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10057
10058 IEM_MC_ADVANCE_RIP();
10059 IEM_MC_END();
10060 }
10061 else
10062 {
10063 /* memory target */
10064 uint32_t fAccess;
10065 if (pImpl->pfnLockedU32)
10066 fAccess = IEM_ACCESS_DATA_RW;
10067 else /* CMP, TEST */
10068 fAccess = IEM_ACCESS_DATA_R;
10069 IEM_MC_BEGIN(3, 2);
10070 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10071 IEM_MC_ARG(uint32_t, u32Src, 1);
10072 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10074
10075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10076 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10077 IEM_MC_ASSIGN(u32Src, u32Imm);
10078 if (pImpl->pfnLockedU32)
10079 IEMOP_HLP_DONE_DECODING();
10080 else
10081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10082 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10083 IEM_MC_FETCH_EFLAGS(EFlags);
10084 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10085 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10086 else
10087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10088
10089 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10090 IEM_MC_COMMIT_EFLAGS(EFlags);
10091 IEM_MC_ADVANCE_RIP();
10092 IEM_MC_END();
10093 }
10094 break;
10095 }
10096
10097 case IEMMODE_64BIT:
10098 {
10099 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10100 {
10101 /* register target */
10102 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10104 IEM_MC_BEGIN(3, 0);
10105 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10106 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10108
10109 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10110 IEM_MC_REF_EFLAGS(pEFlags);
10111 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10112
10113 IEM_MC_ADVANCE_RIP();
10114 IEM_MC_END();
10115 }
10116 else
10117 {
10118 /* memory target */
10119 uint32_t fAccess;
10120 if (pImpl->pfnLockedU64)
10121 fAccess = IEM_ACCESS_DATA_RW;
10122 else /* CMP */
10123 fAccess = IEM_ACCESS_DATA_R;
10124 IEM_MC_BEGIN(3, 2);
10125 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10126 IEM_MC_ARG(uint64_t, u64Src, 1);
10127 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10129
10130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10131 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10132 if (pImpl->pfnLockedU64)
10133 IEMOP_HLP_DONE_DECODING();
10134 else
10135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10136 IEM_MC_ASSIGN(u64Src, u64Imm);
10137 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10138 IEM_MC_FETCH_EFLAGS(EFlags);
10139 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10140 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10141 else
10142 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10143
10144 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10145 IEM_MC_COMMIT_EFLAGS(EFlags);
10146 IEM_MC_ADVANCE_RIP();
10147 IEM_MC_END();
10148 }
10149 break;
10150 }
10151 }
10152 return VINF_SUCCESS;
10153}
10154
10155
10156/** Opcode 0x82. */
10157FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10158{
10159 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10160 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10161}
10162
10163
10164/** Opcode 0x83. */
10165FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10166{
10167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10168 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10169 {
10170 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10171 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10172 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10173 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10174 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10175 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10176 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10177 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10178 }
10179 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10180 to the 386 even if absent in the intel reference manuals and some
10181 3rd party opcode listings. */
10182 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10183
10184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10185 {
10186 /*
10187 * Register target
10188 */
10189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10190 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10191 switch (pVCpu->iem.s.enmEffOpSize)
10192 {
10193 case IEMMODE_16BIT:
10194 {
10195 IEM_MC_BEGIN(3, 0);
10196 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10197 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10198 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10199
10200 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10201 IEM_MC_REF_EFLAGS(pEFlags);
10202 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10203
10204 IEM_MC_ADVANCE_RIP();
10205 IEM_MC_END();
10206 break;
10207 }
10208
10209 case IEMMODE_32BIT:
10210 {
10211 IEM_MC_BEGIN(3, 0);
10212 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10213 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10214 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10215
10216 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10217 IEM_MC_REF_EFLAGS(pEFlags);
10218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10219 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10220
10221 IEM_MC_ADVANCE_RIP();
10222 IEM_MC_END();
10223 break;
10224 }
10225
10226 case IEMMODE_64BIT:
10227 {
10228 IEM_MC_BEGIN(3, 0);
10229 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10230 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10232
10233 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10234 IEM_MC_REF_EFLAGS(pEFlags);
10235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10236
10237 IEM_MC_ADVANCE_RIP();
10238 IEM_MC_END();
10239 break;
10240 }
10241 }
10242 }
10243 else
10244 {
10245 /*
10246 * Memory target.
10247 */
10248 uint32_t fAccess;
10249 if (pImpl->pfnLockedU16)
10250 fAccess = IEM_ACCESS_DATA_RW;
10251 else /* CMP */
10252 fAccess = IEM_ACCESS_DATA_R;
10253
10254 switch (pVCpu->iem.s.enmEffOpSize)
10255 {
10256 case IEMMODE_16BIT:
10257 {
10258 IEM_MC_BEGIN(3, 2);
10259 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10260 IEM_MC_ARG(uint16_t, u16Src, 1);
10261 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10263
10264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10265 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10266 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10267 if (pImpl->pfnLockedU16)
10268 IEMOP_HLP_DONE_DECODING();
10269 else
10270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10271 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10272 IEM_MC_FETCH_EFLAGS(EFlags);
10273 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10274 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10275 else
10276 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10277
10278 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10279 IEM_MC_COMMIT_EFLAGS(EFlags);
10280 IEM_MC_ADVANCE_RIP();
10281 IEM_MC_END();
10282 break;
10283 }
10284
10285 case IEMMODE_32BIT:
10286 {
10287 IEM_MC_BEGIN(3, 2);
10288 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10289 IEM_MC_ARG(uint32_t, u32Src, 1);
10290 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10292
10293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10294 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10295 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10296 if (pImpl->pfnLockedU32)
10297 IEMOP_HLP_DONE_DECODING();
10298 else
10299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10300 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10301 IEM_MC_FETCH_EFLAGS(EFlags);
10302 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10303 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10304 else
10305 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10306
10307 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10308 IEM_MC_COMMIT_EFLAGS(EFlags);
10309 IEM_MC_ADVANCE_RIP();
10310 IEM_MC_END();
10311 break;
10312 }
10313
10314 case IEMMODE_64BIT:
10315 {
10316 IEM_MC_BEGIN(3, 2);
10317 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10318 IEM_MC_ARG(uint64_t, u64Src, 1);
10319 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10321
10322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10323 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10324 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10325 if (pImpl->pfnLockedU64)
10326 IEMOP_HLP_DONE_DECODING();
10327 else
10328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10329 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10330 IEM_MC_FETCH_EFLAGS(EFlags);
10331 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10333 else
10334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10335
10336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10337 IEM_MC_COMMIT_EFLAGS(EFlags);
10338 IEM_MC_ADVANCE_RIP();
10339 IEM_MC_END();
10340 break;
10341 }
10342 }
10343 }
10344 return VINF_SUCCESS;
10345}
10346
10347
10348/** Opcode 0x84. */
10349FNIEMOP_DEF(iemOp_test_Eb_Gb)
10350{
10351 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
10352 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10353 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10354}
10355
10356
10357/** Opcode 0x85. */
10358FNIEMOP_DEF(iemOp_test_Ev_Gv)
10359{
10360 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
10361 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10362 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10363}
10364
10365
10366/** Opcode 0x86. */
10367FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10368{
10369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10370 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
10371
10372 /*
10373 * If rm is denoting a register, no more instruction bytes.
10374 */
10375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10376 {
10377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10378
10379 IEM_MC_BEGIN(0, 2);
10380 IEM_MC_LOCAL(uint8_t, uTmp1);
10381 IEM_MC_LOCAL(uint8_t, uTmp2);
10382
10383 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10384 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10385 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10386 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10387
10388 IEM_MC_ADVANCE_RIP();
10389 IEM_MC_END();
10390 }
10391 else
10392 {
10393 /*
10394 * We're accessing memory.
10395 */
10396/** @todo the register must be committed separately! */
10397 IEM_MC_BEGIN(2, 2);
10398 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10399 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10401
10402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10403 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10404 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10405 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10407
10408 IEM_MC_ADVANCE_RIP();
10409 IEM_MC_END();
10410 }
10411 return VINF_SUCCESS;
10412}
10413
10414
10415/** Opcode 0x87. */
10416FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10417{
10418 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
10419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10420
10421 /*
10422 * If rm is denoting a register, no more instruction bytes.
10423 */
10424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10425 {
10426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10427
10428 switch (pVCpu->iem.s.enmEffOpSize)
10429 {
10430 case IEMMODE_16BIT:
10431 IEM_MC_BEGIN(0, 2);
10432 IEM_MC_LOCAL(uint16_t, uTmp1);
10433 IEM_MC_LOCAL(uint16_t, uTmp2);
10434
10435 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10436 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10437 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10438 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10439
10440 IEM_MC_ADVANCE_RIP();
10441 IEM_MC_END();
10442 return VINF_SUCCESS;
10443
10444 case IEMMODE_32BIT:
10445 IEM_MC_BEGIN(0, 2);
10446 IEM_MC_LOCAL(uint32_t, uTmp1);
10447 IEM_MC_LOCAL(uint32_t, uTmp2);
10448
10449 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10450 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10451 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10452 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10453
10454 IEM_MC_ADVANCE_RIP();
10455 IEM_MC_END();
10456 return VINF_SUCCESS;
10457
10458 case IEMMODE_64BIT:
10459 IEM_MC_BEGIN(0, 2);
10460 IEM_MC_LOCAL(uint64_t, uTmp1);
10461 IEM_MC_LOCAL(uint64_t, uTmp2);
10462
10463 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10464 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10465 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10466 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10467
10468 IEM_MC_ADVANCE_RIP();
10469 IEM_MC_END();
10470 return VINF_SUCCESS;
10471
10472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10473 }
10474 }
10475 else
10476 {
10477 /*
10478 * We're accessing memory.
10479 */
10480 switch (pVCpu->iem.s.enmEffOpSize)
10481 {
10482/** @todo the register must be committed separately! */
10483 case IEMMODE_16BIT:
10484 IEM_MC_BEGIN(2, 2);
10485 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10486 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10488
10489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10490 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10491 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10492 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10494
10495 IEM_MC_ADVANCE_RIP();
10496 IEM_MC_END();
10497 return VINF_SUCCESS;
10498
10499 case IEMMODE_32BIT:
10500 IEM_MC_BEGIN(2, 2);
10501 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10502 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10504
10505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10506 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10507 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10508 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10510
10511 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10512 IEM_MC_ADVANCE_RIP();
10513 IEM_MC_END();
10514 return VINF_SUCCESS;
10515
10516 case IEMMODE_64BIT:
10517 IEM_MC_BEGIN(2, 2);
10518 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10519 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10521
10522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10523 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10524 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10525 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10526 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10527
10528 IEM_MC_ADVANCE_RIP();
10529 IEM_MC_END();
10530 return VINF_SUCCESS;
10531
10532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10533 }
10534 }
10535}
10536
10537
10538/** Opcode 0x88. */
10539FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10540{
10541 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
10542
10543 uint8_t bRm;
10544 IEM_OPCODE_GET_NEXT_U8(&bRm);
10545
10546 /*
10547 * If rm is denoting a register, no more instruction bytes.
10548 */
10549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10550 {
10551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10552 IEM_MC_BEGIN(0, 1);
10553 IEM_MC_LOCAL(uint8_t, u8Value);
10554 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10555 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
10556 IEM_MC_ADVANCE_RIP();
10557 IEM_MC_END();
10558 }
10559 else
10560 {
10561 /*
10562 * We're writing a register to memory.
10563 */
10564 IEM_MC_BEGIN(0, 2);
10565 IEM_MC_LOCAL(uint8_t, u8Value);
10566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10569 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10570 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
10571 IEM_MC_ADVANCE_RIP();
10572 IEM_MC_END();
10573 }
10574 return VINF_SUCCESS;
10575
10576}
10577
10578
10579/** Opcode 0x89. */
10580FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10581{
10582 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
10583
10584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10585
10586 /*
10587 * If rm is denoting a register, no more instruction bytes.
10588 */
10589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10590 {
10591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10592 switch (pVCpu->iem.s.enmEffOpSize)
10593 {
10594 case IEMMODE_16BIT:
10595 IEM_MC_BEGIN(0, 1);
10596 IEM_MC_LOCAL(uint16_t, u16Value);
10597 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10598 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10599 IEM_MC_ADVANCE_RIP();
10600 IEM_MC_END();
10601 break;
10602
10603 case IEMMODE_32BIT:
10604 IEM_MC_BEGIN(0, 1);
10605 IEM_MC_LOCAL(uint32_t, u32Value);
10606 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10607 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10608 IEM_MC_ADVANCE_RIP();
10609 IEM_MC_END();
10610 break;
10611
10612 case IEMMODE_64BIT:
10613 IEM_MC_BEGIN(0, 1);
10614 IEM_MC_LOCAL(uint64_t, u64Value);
10615 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10616 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10617 IEM_MC_ADVANCE_RIP();
10618 IEM_MC_END();
10619 break;
10620 }
10621 }
10622 else
10623 {
10624 /*
10625 * We're writing a register to memory.
10626 */
10627 switch (pVCpu->iem.s.enmEffOpSize)
10628 {
10629 case IEMMODE_16BIT:
10630 IEM_MC_BEGIN(0, 2);
10631 IEM_MC_LOCAL(uint16_t, u16Value);
10632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10635 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10636 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10637 IEM_MC_ADVANCE_RIP();
10638 IEM_MC_END();
10639 break;
10640
10641 case IEMMODE_32BIT:
10642 IEM_MC_BEGIN(0, 2);
10643 IEM_MC_LOCAL(uint32_t, u32Value);
10644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10647 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10648 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
10649 IEM_MC_ADVANCE_RIP();
10650 IEM_MC_END();
10651 break;
10652
10653 case IEMMODE_64BIT:
10654 IEM_MC_BEGIN(0, 2);
10655 IEM_MC_LOCAL(uint64_t, u64Value);
10656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10660 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
10661 IEM_MC_ADVANCE_RIP();
10662 IEM_MC_END();
10663 break;
10664 }
10665 }
10666 return VINF_SUCCESS;
10667}
10668
10669
10670/** Opcode 0x8a. */
10671FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10672{
10673 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
10674
10675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10676
10677 /*
10678 * If rm is denoting a register, no more instruction bytes.
10679 */
10680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10681 {
10682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10683 IEM_MC_BEGIN(0, 1);
10684 IEM_MC_LOCAL(uint8_t, u8Value);
10685 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10686 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10687 IEM_MC_ADVANCE_RIP();
10688 IEM_MC_END();
10689 }
10690 else
10691 {
10692 /*
10693 * We're loading a register from memory.
10694 */
10695 IEM_MC_BEGIN(0, 2);
10696 IEM_MC_LOCAL(uint8_t, u8Value);
10697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10700 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10701 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10702 IEM_MC_ADVANCE_RIP();
10703 IEM_MC_END();
10704 }
10705 return VINF_SUCCESS;
10706}
10707
10708
10709/** Opcode 0x8b. */
10710FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10711{
10712 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
10713
10714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10715
10716 /*
10717 * If rm is denoting a register, no more instruction bytes.
10718 */
10719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10720 {
10721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10722 switch (pVCpu->iem.s.enmEffOpSize)
10723 {
10724 case IEMMODE_16BIT:
10725 IEM_MC_BEGIN(0, 1);
10726 IEM_MC_LOCAL(uint16_t, u16Value);
10727 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10728 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10729 IEM_MC_ADVANCE_RIP();
10730 IEM_MC_END();
10731 break;
10732
10733 case IEMMODE_32BIT:
10734 IEM_MC_BEGIN(0, 1);
10735 IEM_MC_LOCAL(uint32_t, u32Value);
10736 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10737 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10738 IEM_MC_ADVANCE_RIP();
10739 IEM_MC_END();
10740 break;
10741
10742 case IEMMODE_64BIT:
10743 IEM_MC_BEGIN(0, 1);
10744 IEM_MC_LOCAL(uint64_t, u64Value);
10745 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10746 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10747 IEM_MC_ADVANCE_RIP();
10748 IEM_MC_END();
10749 break;
10750 }
10751 }
10752 else
10753 {
10754 /*
10755 * We're loading a register from memory.
10756 */
10757 switch (pVCpu->iem.s.enmEffOpSize)
10758 {
10759 case IEMMODE_16BIT:
10760 IEM_MC_BEGIN(0, 2);
10761 IEM_MC_LOCAL(uint16_t, u16Value);
10762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10765 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10766 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10767 IEM_MC_ADVANCE_RIP();
10768 IEM_MC_END();
10769 break;
10770
10771 case IEMMODE_32BIT:
10772 IEM_MC_BEGIN(0, 2);
10773 IEM_MC_LOCAL(uint32_t, u32Value);
10774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10777 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10778 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10779 IEM_MC_ADVANCE_RIP();
10780 IEM_MC_END();
10781 break;
10782
10783 case IEMMODE_64BIT:
10784 IEM_MC_BEGIN(0, 2);
10785 IEM_MC_LOCAL(uint64_t, u64Value);
10786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10789 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10790 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10791 IEM_MC_ADVANCE_RIP();
10792 IEM_MC_END();
10793 break;
10794 }
10795 }
10796 return VINF_SUCCESS;
10797}
10798
10799
10800/** Opcode 0x63. */
10801FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10802{
10803 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
10804 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10805 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10806 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10807 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10808}
10809
10810
10811/** Opcode 0x8c. */
10812FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10813{
10814 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
10815
10816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10817
10818 /*
10819 * Check that the destination register exists. The REX.R prefix is ignored.
10820 */
10821 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10822 if ( iSegReg > X86_SREG_GS)
10823 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10824
10825 /*
10826 * If rm is denoting a register, no more instruction bytes.
10827 * In that case, the operand size is respected and the upper bits are
10828 * cleared (starting with some pentium).
10829 */
10830 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10831 {
10832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10833 switch (pVCpu->iem.s.enmEffOpSize)
10834 {
10835 case IEMMODE_16BIT:
10836 IEM_MC_BEGIN(0, 1);
10837 IEM_MC_LOCAL(uint16_t, u16Value);
10838 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10839 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10840 IEM_MC_ADVANCE_RIP();
10841 IEM_MC_END();
10842 break;
10843
10844 case IEMMODE_32BIT:
10845 IEM_MC_BEGIN(0, 1);
10846 IEM_MC_LOCAL(uint32_t, u32Value);
10847 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10848 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10849 IEM_MC_ADVANCE_RIP();
10850 IEM_MC_END();
10851 break;
10852
10853 case IEMMODE_64BIT:
10854 IEM_MC_BEGIN(0, 1);
10855 IEM_MC_LOCAL(uint64_t, u64Value);
10856 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10857 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10858 IEM_MC_ADVANCE_RIP();
10859 IEM_MC_END();
10860 break;
10861 }
10862 }
10863 else
10864 {
10865 /*
10866 * We're saving the register to memory. The access is word sized
10867 * regardless of operand size prefixes.
10868 */
10869#if 0 /* not necessary */
10870 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10871#endif
10872 IEM_MC_BEGIN(0, 2);
10873 IEM_MC_LOCAL(uint16_t, u16Value);
10874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10877 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10878 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10879 IEM_MC_ADVANCE_RIP();
10880 IEM_MC_END();
10881 }
10882 return VINF_SUCCESS;
10883}
10884
10885
10886
10887
10888/** Opcode 0x8d. */
10889FNIEMOP_DEF(iemOp_lea_Gv_M)
10890{
10891 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
10892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10894 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10895
10896 switch (pVCpu->iem.s.enmEffOpSize)
10897 {
10898 case IEMMODE_16BIT:
10899 IEM_MC_BEGIN(0, 2);
10900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10901 IEM_MC_LOCAL(uint16_t, u16Cast);
10902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10904 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10905 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
10906 IEM_MC_ADVANCE_RIP();
10907 IEM_MC_END();
10908 return VINF_SUCCESS;
10909
10910 case IEMMODE_32BIT:
10911 IEM_MC_BEGIN(0, 2);
10912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10913 IEM_MC_LOCAL(uint32_t, u32Cast);
10914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10916 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10917 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
10918 IEM_MC_ADVANCE_RIP();
10919 IEM_MC_END();
10920 return VINF_SUCCESS;
10921
10922 case IEMMODE_64BIT:
10923 IEM_MC_BEGIN(0, 1);
10924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10927 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
10928 IEM_MC_ADVANCE_RIP();
10929 IEM_MC_END();
10930 return VINF_SUCCESS;
10931 }
10932 AssertFailedReturn(VERR_IEM_IPE_7);
10933}
10934
10935
10936/** Opcode 0x8e. */
10937FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10938{
10939 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
10940
10941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10942
10943 /*
10944 * The practical operand size is 16-bit.
10945 */
10946#if 0 /* not necessary */
10947 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10948#endif
10949
10950 /*
10951 * Check that the destination register exists and can be used with this
10952 * instruction. The REX.R prefix is ignored.
10953 */
10954 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10955 if ( iSegReg == X86_SREG_CS
10956 || iSegReg > X86_SREG_GS)
10957 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10958
10959 /*
10960 * If rm is denoting a register, no more instruction bytes.
10961 */
10962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10963 {
10964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10965 IEM_MC_BEGIN(2, 0);
10966 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10967 IEM_MC_ARG(uint16_t, u16Value, 1);
10968 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10969 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10970 IEM_MC_END();
10971 }
10972 else
10973 {
10974 /*
10975 * We're loading the register from memory. The access is word sized
10976 * regardless of operand size prefixes.
10977 */
10978 IEM_MC_BEGIN(2, 1);
10979 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10980 IEM_MC_ARG(uint16_t, u16Value, 1);
10981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10984 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10985 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10986 IEM_MC_END();
10987 }
10988 return VINF_SUCCESS;
10989}
10990
10991
10992/** Opcode 0x8f /0. */
10993FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10994{
10995 /* This bugger is rather annoying as it requires rSP to be updated before
10996 doing the effective address calculations. Will eventually require a
10997 split between the R/M+SIB decoding and the effective address
10998 calculation - which is something that is required for any attempt at
10999 reusing this code for a recompiler. It may also be good to have if we
11000 need to delay #UD exception caused by invalid lock prefixes.
11001
11002 For now, we'll do a mostly safe interpreter-only implementation here. */
11003 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11004 * now until tests show it's checked.. */
11005 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11006
11007 /* Register access is relatively easy and can share code. */
11008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11009 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11010
11011 /*
11012 * Memory target.
11013 *
11014 * Intel says that RSP is incremented before it's used in any effective
11015 * address calcuations. This means some serious extra annoyance here since
11016 * we decode and calculate the effective address in one step and like to
11017 * delay committing registers till everything is done.
11018 *
11019 * So, we'll decode and calculate the effective address twice. This will
11020 * require some recoding if turned into a recompiler.
11021 */
11022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11023
11024#ifndef TST_IEM_CHECK_MC
11025 /* Calc effective address with modified ESP. */
11026/** @todo testcase */
11027 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11028 RTGCPTR GCPtrEff;
11029 VBOXSTRICTRC rcStrict;
11030 switch (pVCpu->iem.s.enmEffOpSize)
11031 {
11032 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11033 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11034 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11036 }
11037 if (rcStrict != VINF_SUCCESS)
11038 return rcStrict;
11039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11040
11041 /* Perform the operation - this should be CImpl. */
11042 RTUINT64U TmpRsp;
11043 TmpRsp.u = pCtx->rsp;
11044 switch (pVCpu->iem.s.enmEffOpSize)
11045 {
11046 case IEMMODE_16BIT:
11047 {
11048 uint16_t u16Value;
11049 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11050 if (rcStrict == VINF_SUCCESS)
11051 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11052 break;
11053 }
11054
11055 case IEMMODE_32BIT:
11056 {
11057 uint32_t u32Value;
11058 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11059 if (rcStrict == VINF_SUCCESS)
11060 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11061 break;
11062 }
11063
11064 case IEMMODE_64BIT:
11065 {
11066 uint64_t u64Value;
11067 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11068 if (rcStrict == VINF_SUCCESS)
11069 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11070 break;
11071 }
11072
11073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11074 }
11075 if (rcStrict == VINF_SUCCESS)
11076 {
11077 pCtx->rsp = TmpRsp.u;
11078 iemRegUpdateRipAndClearRF(pVCpu);
11079 }
11080 return rcStrict;
11081
11082#else
11083 return VERR_IEM_IPE_2;
11084#endif
11085}
11086
11087
11088/** Opcode 0x8f. */
11089FNIEMOP_DEF(iemOp_Grp1A)
11090{
11091 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11092 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11093 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11094
11095 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11096 /** @todo XOP decoding. */
11097 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11098 return IEMOP_RAISE_INVALID_OPCODE();
11099}
11100
11101
11102/**
11103 * Common 'xchg reg,rAX' helper.
11104 */
11105FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11106{
11107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11108
11109 iReg |= pVCpu->iem.s.uRexB;
11110 switch (pVCpu->iem.s.enmEffOpSize)
11111 {
11112 case IEMMODE_16BIT:
11113 IEM_MC_BEGIN(0, 2);
11114 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11115 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11116 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11117 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11118 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11119 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11120 IEM_MC_ADVANCE_RIP();
11121 IEM_MC_END();
11122 return VINF_SUCCESS;
11123
11124 case IEMMODE_32BIT:
11125 IEM_MC_BEGIN(0, 2);
11126 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11127 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11128 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11129 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11130 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11131 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11132 IEM_MC_ADVANCE_RIP();
11133 IEM_MC_END();
11134 return VINF_SUCCESS;
11135
11136 case IEMMODE_64BIT:
11137 IEM_MC_BEGIN(0, 2);
11138 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11139 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11140 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11141 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11142 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11143 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11144 IEM_MC_ADVANCE_RIP();
11145 IEM_MC_END();
11146 return VINF_SUCCESS;
11147
11148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11149 }
11150}
11151
11152
11153/** Opcode 0x90. */
11154FNIEMOP_DEF(iemOp_nop)
11155{
11156 /* R8/R8D and RAX/EAX can be exchanged. */
11157 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11158 {
11159 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11160 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11161 }
11162
11163 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11164 IEMOP_MNEMONIC(pause, "pause");
11165 else
11166 IEMOP_MNEMONIC(nop, "nop");
11167 IEM_MC_BEGIN(0, 0);
11168 IEM_MC_ADVANCE_RIP();
11169 IEM_MC_END();
11170 return VINF_SUCCESS;
11171}
11172
11173
11174/** Opcode 0x91. */
11175FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11176{
11177 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11178 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11179}
11180
11181
11182/** Opcode 0x92. */
11183FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11184{
11185 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11186 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11187}
11188
11189
11190/** Opcode 0x93. */
11191FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11192{
11193 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11194 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11195}
11196
11197
11198/** Opcode 0x94. */
11199FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11200{
11201 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11202 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11203}
11204
11205
11206/** Opcode 0x95. */
11207FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11208{
11209 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11210 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11211}
11212
11213
11214/** Opcode 0x96. */
11215FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11216{
11217 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11218 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11219}
11220
11221
11222/** Opcode 0x97. */
11223FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11224{
11225 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11226 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11227}
11228
11229
11230/** Opcode 0x98. */
11231FNIEMOP_DEF(iemOp_cbw)
11232{
11233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11234 switch (pVCpu->iem.s.enmEffOpSize)
11235 {
11236 case IEMMODE_16BIT:
11237 IEMOP_MNEMONIC(cbw, "cbw");
11238 IEM_MC_BEGIN(0, 1);
11239 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11240 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11241 } IEM_MC_ELSE() {
11242 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11243 } IEM_MC_ENDIF();
11244 IEM_MC_ADVANCE_RIP();
11245 IEM_MC_END();
11246 return VINF_SUCCESS;
11247
11248 case IEMMODE_32BIT:
11249 IEMOP_MNEMONIC(cwde, "cwde");
11250 IEM_MC_BEGIN(0, 1);
11251 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11252 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11253 } IEM_MC_ELSE() {
11254 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11255 } IEM_MC_ENDIF();
11256 IEM_MC_ADVANCE_RIP();
11257 IEM_MC_END();
11258 return VINF_SUCCESS;
11259
11260 case IEMMODE_64BIT:
11261 IEMOP_MNEMONIC(cdqe, "cdqe");
11262 IEM_MC_BEGIN(0, 1);
11263 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11264 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11265 } IEM_MC_ELSE() {
11266 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11267 } IEM_MC_ENDIF();
11268 IEM_MC_ADVANCE_RIP();
11269 IEM_MC_END();
11270 return VINF_SUCCESS;
11271
11272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11273 }
11274}
11275
11276
11277/** Opcode 0x99. */
11278FNIEMOP_DEF(iemOp_cwd)
11279{
11280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11281 switch (pVCpu->iem.s.enmEffOpSize)
11282 {
11283 case IEMMODE_16BIT:
11284 IEMOP_MNEMONIC(cwd, "cwd");
11285 IEM_MC_BEGIN(0, 1);
11286 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11287 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11288 } IEM_MC_ELSE() {
11289 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11290 } IEM_MC_ENDIF();
11291 IEM_MC_ADVANCE_RIP();
11292 IEM_MC_END();
11293 return VINF_SUCCESS;
11294
11295 case IEMMODE_32BIT:
11296 IEMOP_MNEMONIC(cdq, "cdq");
11297 IEM_MC_BEGIN(0, 1);
11298 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11299 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11300 } IEM_MC_ELSE() {
11301 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11302 } IEM_MC_ENDIF();
11303 IEM_MC_ADVANCE_RIP();
11304 IEM_MC_END();
11305 return VINF_SUCCESS;
11306
11307 case IEMMODE_64BIT:
11308 IEMOP_MNEMONIC(cqo, "cqo");
11309 IEM_MC_BEGIN(0, 1);
11310 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11311 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11312 } IEM_MC_ELSE() {
11313 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11314 } IEM_MC_ENDIF();
11315 IEM_MC_ADVANCE_RIP();
11316 IEM_MC_END();
11317 return VINF_SUCCESS;
11318
11319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11320 }
11321}
11322
11323
11324/** Opcode 0x9a. */
11325FNIEMOP_DEF(iemOp_call_Ap)
11326{
11327 IEMOP_MNEMONIC(call_Ap, "call Ap");
11328 IEMOP_HLP_NO_64BIT();
11329
11330 /* Decode the far pointer address and pass it on to the far call C implementation. */
11331 uint32_t offSeg;
11332 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11333 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11334 else
11335 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11336 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11338 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11339}
11340
11341
11342/** Opcode 0x9b. (aka fwait) */
11343FNIEMOP_DEF(iemOp_wait)
11344{
11345 IEMOP_MNEMONIC(wait, "wait");
11346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11347
11348 IEM_MC_BEGIN(0, 0);
11349 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11350 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11351 IEM_MC_ADVANCE_RIP();
11352 IEM_MC_END();
11353 return VINF_SUCCESS;
11354}
11355
11356
11357/** Opcode 0x9c. */
11358FNIEMOP_DEF(iemOp_pushf_Fv)
11359{
11360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11362 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11363}
11364
11365
11366/** Opcode 0x9d. */
11367FNIEMOP_DEF(iemOp_popf_Fv)
11368{
11369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11370 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11371 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11372}
11373
11374
11375/** Opcode 0x9e. */
11376FNIEMOP_DEF(iemOp_sahf)
11377{
11378 IEMOP_MNEMONIC(sahf, "sahf");
11379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11380 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11381 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11382 return IEMOP_RAISE_INVALID_OPCODE();
11383 IEM_MC_BEGIN(0, 2);
11384 IEM_MC_LOCAL(uint32_t, u32Flags);
11385 IEM_MC_LOCAL(uint32_t, EFlags);
11386 IEM_MC_FETCH_EFLAGS(EFlags);
11387 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11388 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11389 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11390 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11391 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11392 IEM_MC_COMMIT_EFLAGS(EFlags);
11393 IEM_MC_ADVANCE_RIP();
11394 IEM_MC_END();
11395 return VINF_SUCCESS;
11396}
11397
11398
11399/** Opcode 0x9f. */
11400FNIEMOP_DEF(iemOp_lahf)
11401{
11402 IEMOP_MNEMONIC(lahf, "lahf");
11403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11404 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11405 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11406 return IEMOP_RAISE_INVALID_OPCODE();
11407 IEM_MC_BEGIN(0, 1);
11408 IEM_MC_LOCAL(uint8_t, u8Flags);
11409 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11410 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11411 IEM_MC_ADVANCE_RIP();
11412 IEM_MC_END();
11413 return VINF_SUCCESS;
11414}
11415
11416
11417/**
11418 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11419 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11420 * prefixes. Will return on failures.
11421 * @param a_GCPtrMemOff The variable to store the offset in.
11422 */
11423#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11424 do \
11425 { \
11426 switch (pVCpu->iem.s.enmEffAddrMode) \
11427 { \
11428 case IEMMODE_16BIT: \
11429 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11430 break; \
11431 case IEMMODE_32BIT: \
11432 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11433 break; \
11434 case IEMMODE_64BIT: \
11435 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11436 break; \
11437 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11438 } \
11439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11440 } while (0)
11441
11442/** Opcode 0xa0. */
11443FNIEMOP_DEF(iemOp_mov_Al_Ob)
11444{
11445 /*
11446 * Get the offset and fend of lock prefixes.
11447 */
11448 RTGCPTR GCPtrMemOff;
11449 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11450
11451 /*
11452 * Fetch AL.
11453 */
11454 IEM_MC_BEGIN(0,1);
11455 IEM_MC_LOCAL(uint8_t, u8Tmp);
11456 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11457 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11458 IEM_MC_ADVANCE_RIP();
11459 IEM_MC_END();
11460 return VINF_SUCCESS;
11461}
11462
11463
11464/** Opcode 0xa1. */
11465FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11466{
11467 /*
11468 * Get the offset and fend of lock prefixes.
11469 */
11470 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
11471 RTGCPTR GCPtrMemOff;
11472 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11473
11474 /*
11475 * Fetch rAX.
11476 */
11477 switch (pVCpu->iem.s.enmEffOpSize)
11478 {
11479 case IEMMODE_16BIT:
11480 IEM_MC_BEGIN(0,1);
11481 IEM_MC_LOCAL(uint16_t, u16Tmp);
11482 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11483 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11484 IEM_MC_ADVANCE_RIP();
11485 IEM_MC_END();
11486 return VINF_SUCCESS;
11487
11488 case IEMMODE_32BIT:
11489 IEM_MC_BEGIN(0,1);
11490 IEM_MC_LOCAL(uint32_t, u32Tmp);
11491 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11492 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11493 IEM_MC_ADVANCE_RIP();
11494 IEM_MC_END();
11495 return VINF_SUCCESS;
11496
11497 case IEMMODE_64BIT:
11498 IEM_MC_BEGIN(0,1);
11499 IEM_MC_LOCAL(uint64_t, u64Tmp);
11500 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11501 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11502 IEM_MC_ADVANCE_RIP();
11503 IEM_MC_END();
11504 return VINF_SUCCESS;
11505
11506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11507 }
11508}
11509
11510
11511/** Opcode 0xa2. */
11512FNIEMOP_DEF(iemOp_mov_Ob_AL)
11513{
11514 /*
11515 * Get the offset and fend of lock prefixes.
11516 */
11517 RTGCPTR GCPtrMemOff;
11518 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11519
11520 /*
11521 * Store AL.
11522 */
11523 IEM_MC_BEGIN(0,1);
11524 IEM_MC_LOCAL(uint8_t, u8Tmp);
11525 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11526 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
11527 IEM_MC_ADVANCE_RIP();
11528 IEM_MC_END();
11529 return VINF_SUCCESS;
11530}
11531
11532
11533/** Opcode 0xa3. */
11534FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11535{
11536 /*
11537 * Get the offset and fend of lock prefixes.
11538 */
11539 RTGCPTR GCPtrMemOff;
11540 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11541
11542 /*
11543 * Store rAX.
11544 */
11545 switch (pVCpu->iem.s.enmEffOpSize)
11546 {
11547 case IEMMODE_16BIT:
11548 IEM_MC_BEGIN(0,1);
11549 IEM_MC_LOCAL(uint16_t, u16Tmp);
11550 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11551 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
11552 IEM_MC_ADVANCE_RIP();
11553 IEM_MC_END();
11554 return VINF_SUCCESS;
11555
11556 case IEMMODE_32BIT:
11557 IEM_MC_BEGIN(0,1);
11558 IEM_MC_LOCAL(uint32_t, u32Tmp);
11559 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11560 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
11561 IEM_MC_ADVANCE_RIP();
11562 IEM_MC_END();
11563 return VINF_SUCCESS;
11564
11565 case IEMMODE_64BIT:
11566 IEM_MC_BEGIN(0,1);
11567 IEM_MC_LOCAL(uint64_t, u64Tmp);
11568 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11569 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
11570 IEM_MC_ADVANCE_RIP();
11571 IEM_MC_END();
11572 return VINF_SUCCESS;
11573
11574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11575 }
11576}
11577
11578/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11579#define IEM_MOVS_CASE(ValBits, AddrBits) \
11580 IEM_MC_BEGIN(0, 2); \
11581 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11582 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11583 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11584 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11585 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11586 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11587 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11588 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11589 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11590 } IEM_MC_ELSE() { \
11591 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11592 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11593 } IEM_MC_ENDIF(); \
11594 IEM_MC_ADVANCE_RIP(); \
11595 IEM_MC_END();
11596
11597/** Opcode 0xa4. */
11598FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11599{
11600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11601
11602 /*
11603 * Use the C implementation if a repeat prefix is encountered.
11604 */
11605 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11606 {
11607 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
11608 switch (pVCpu->iem.s.enmEffAddrMode)
11609 {
11610 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
11611 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
11612 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
11613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11614 }
11615 }
11616 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
11617
11618 /*
11619 * Sharing case implementation with movs[wdq] below.
11620 */
11621 switch (pVCpu->iem.s.enmEffAddrMode)
11622 {
11623 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11624 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11625 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11627 }
11628 return VINF_SUCCESS;
11629}
11630
11631
11632/** Opcode 0xa5. */
11633FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11634{
11635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11636
11637 /*
11638 * Use the C implementation if a repeat prefix is encountered.
11639 */
11640 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11641 {
11642 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
11643 switch (pVCpu->iem.s.enmEffOpSize)
11644 {
11645 case IEMMODE_16BIT:
11646 switch (pVCpu->iem.s.enmEffAddrMode)
11647 {
11648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
11649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
11650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
11651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11652 }
11653 break;
11654 case IEMMODE_32BIT:
11655 switch (pVCpu->iem.s.enmEffAddrMode)
11656 {
11657 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
11658 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
11659 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
11660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11661 }
11662 case IEMMODE_64BIT:
11663 switch (pVCpu->iem.s.enmEffAddrMode)
11664 {
11665 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11666 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
11667 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
11668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11669 }
11670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11671 }
11672 }
11673 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
11674
11675 /*
11676 * Annoying double switch here.
11677 * Using ugly macro for implementing the cases, sharing it with movsb.
11678 */
11679 switch (pVCpu->iem.s.enmEffOpSize)
11680 {
11681 case IEMMODE_16BIT:
11682 switch (pVCpu->iem.s.enmEffAddrMode)
11683 {
11684 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11685 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11686 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11688 }
11689 break;
11690
11691 case IEMMODE_32BIT:
11692 switch (pVCpu->iem.s.enmEffAddrMode)
11693 {
11694 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11695 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11696 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11698 }
11699 break;
11700
11701 case IEMMODE_64BIT:
11702 switch (pVCpu->iem.s.enmEffAddrMode)
11703 {
11704 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11705 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11706 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11708 }
11709 break;
11710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11711 }
11712 return VINF_SUCCESS;
11713}
11714
11715#undef IEM_MOVS_CASE
11716
11717/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11718#define IEM_CMPS_CASE(ValBits, AddrBits) \
11719 IEM_MC_BEGIN(3, 3); \
11720 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11721 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11722 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11723 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11724 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11725 \
11726 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11727 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
11728 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11729 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11730 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11731 IEM_MC_REF_EFLAGS(pEFlags); \
11732 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11733 \
11734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11735 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11736 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11737 } IEM_MC_ELSE() { \
11738 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11739 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11740 } IEM_MC_ENDIF(); \
11741 IEM_MC_ADVANCE_RIP(); \
11742 IEM_MC_END(); \
11743
11744/** Opcode 0xa6. */
11745FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11746{
11747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11748
11749 /*
11750 * Use the C implementation if a repeat prefix is encountered.
11751 */
11752 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11753 {
11754 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
11755 switch (pVCpu->iem.s.enmEffAddrMode)
11756 {
11757 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11758 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11759 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11761 }
11762 }
11763 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11764 {
11765 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
11766 switch (pVCpu->iem.s.enmEffAddrMode)
11767 {
11768 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11769 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11770 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11772 }
11773 }
11774 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
11775
11776 /*
11777 * Sharing case implementation with cmps[wdq] below.
11778 */
11779 switch (pVCpu->iem.s.enmEffAddrMode)
11780 {
11781 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11782 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11783 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11785 }
11786 return VINF_SUCCESS;
11787
11788}
11789
11790
11791/** Opcode 0xa7. */
11792FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11793{
11794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11795
11796 /*
11797 * Use the C implementation if a repeat prefix is encountered.
11798 */
11799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11800 {
11801 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
11802 switch (pVCpu->iem.s.enmEffOpSize)
11803 {
11804 case IEMMODE_16BIT:
11805 switch (pVCpu->iem.s.enmEffAddrMode)
11806 {
11807 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11808 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11809 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11811 }
11812 break;
11813 case IEMMODE_32BIT:
11814 switch (pVCpu->iem.s.enmEffAddrMode)
11815 {
11816 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11817 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11818 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11820 }
11821 case IEMMODE_64BIT:
11822 switch (pVCpu->iem.s.enmEffAddrMode)
11823 {
11824 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11825 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11826 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11828 }
11829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11830 }
11831 }
11832
11833 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11834 {
11835 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
11836 switch (pVCpu->iem.s.enmEffOpSize)
11837 {
11838 case IEMMODE_16BIT:
11839 switch (pVCpu->iem.s.enmEffAddrMode)
11840 {
11841 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11842 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11843 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11845 }
11846 break;
11847 case IEMMODE_32BIT:
11848 switch (pVCpu->iem.s.enmEffAddrMode)
11849 {
11850 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11851 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11852 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11854 }
11855 case IEMMODE_64BIT:
11856 switch (pVCpu->iem.s.enmEffAddrMode)
11857 {
11858 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11859 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11860 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11862 }
11863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11864 }
11865 }
11866
11867 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
11868
11869 /*
11870 * Annoying double switch here.
11871 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11872 */
11873 switch (pVCpu->iem.s.enmEffOpSize)
11874 {
11875 case IEMMODE_16BIT:
11876 switch (pVCpu->iem.s.enmEffAddrMode)
11877 {
11878 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11879 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11880 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11882 }
11883 break;
11884
11885 case IEMMODE_32BIT:
11886 switch (pVCpu->iem.s.enmEffAddrMode)
11887 {
11888 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11889 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11890 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11892 }
11893 break;
11894
11895 case IEMMODE_64BIT:
11896 switch (pVCpu->iem.s.enmEffAddrMode)
11897 {
11898 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11899 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11900 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11902 }
11903 break;
11904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11905 }
11906 return VINF_SUCCESS;
11907
11908}
11909
11910#undef IEM_CMPS_CASE
11911
11912/** Opcode 0xa8. */
11913FNIEMOP_DEF(iemOp_test_AL_Ib)
11914{
11915 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
11916 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11917 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11918}
11919
11920
11921/** Opcode 0xa9. */
11922FNIEMOP_DEF(iemOp_test_eAX_Iz)
11923{
11924 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
11925 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11926 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11927}
11928
11929
11930/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11931#define IEM_STOS_CASE(ValBits, AddrBits) \
11932 IEM_MC_BEGIN(0, 2); \
11933 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11934 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11935 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11936 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11937 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11938 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11939 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11940 } IEM_MC_ELSE() { \
11941 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11942 } IEM_MC_ENDIF(); \
11943 IEM_MC_ADVANCE_RIP(); \
11944 IEM_MC_END(); \
11945
11946/** Opcode 0xaa. */
11947FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11948{
11949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11950
11951 /*
11952 * Use the C implementation if a repeat prefix is encountered.
11953 */
11954 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11955 {
11956 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
11957 switch (pVCpu->iem.s.enmEffAddrMode)
11958 {
11959 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11960 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11961 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11963 }
11964 }
11965 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
11966
11967 /*
11968 * Sharing case implementation with stos[wdq] below.
11969 */
11970 switch (pVCpu->iem.s.enmEffAddrMode)
11971 {
11972 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11973 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11974 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11976 }
11977 return VINF_SUCCESS;
11978}
11979
11980
11981/** Opcode 0xab. */
11982FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11983{
11984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11985
11986 /*
11987 * Use the C implementation if a repeat prefix is encountered.
11988 */
11989 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11990 {
11991 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
11992 switch (pVCpu->iem.s.enmEffOpSize)
11993 {
11994 case IEMMODE_16BIT:
11995 switch (pVCpu->iem.s.enmEffAddrMode)
11996 {
11997 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11998 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11999 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12001 }
12002 break;
12003 case IEMMODE_32BIT:
12004 switch (pVCpu->iem.s.enmEffAddrMode)
12005 {
12006 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12007 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12008 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12010 }
12011 case IEMMODE_64BIT:
12012 switch (pVCpu->iem.s.enmEffAddrMode)
12013 {
12014 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12015 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12016 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12018 }
12019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12020 }
12021 }
12022 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12023
12024 /*
12025 * Annoying double switch here.
12026 * Using ugly macro for implementing the cases, sharing it with stosb.
12027 */
12028 switch (pVCpu->iem.s.enmEffOpSize)
12029 {
12030 case IEMMODE_16BIT:
12031 switch (pVCpu->iem.s.enmEffAddrMode)
12032 {
12033 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12034 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12035 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12037 }
12038 break;
12039
12040 case IEMMODE_32BIT:
12041 switch (pVCpu->iem.s.enmEffAddrMode)
12042 {
12043 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12044 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12045 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12047 }
12048 break;
12049
12050 case IEMMODE_64BIT:
12051 switch (pVCpu->iem.s.enmEffAddrMode)
12052 {
12053 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12054 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12055 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12057 }
12058 break;
12059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12060 }
12061 return VINF_SUCCESS;
12062}
12063
12064#undef IEM_STOS_CASE
12065
12066/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12067#define IEM_LODS_CASE(ValBits, AddrBits) \
12068 IEM_MC_BEGIN(0, 2); \
12069 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12070 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12071 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12072 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12073 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12074 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12075 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12076 } IEM_MC_ELSE() { \
12077 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12078 } IEM_MC_ENDIF(); \
12079 IEM_MC_ADVANCE_RIP(); \
12080 IEM_MC_END();
12081
12082/** Opcode 0xac. */
12083FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12084{
12085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12086
12087 /*
12088 * Use the C implementation if a repeat prefix is encountered.
12089 */
12090 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12091 {
12092 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12093 switch (pVCpu->iem.s.enmEffAddrMode)
12094 {
12095 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12096 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12097 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12099 }
12100 }
12101 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12102
12103 /*
12104 * Sharing case implementation with stos[wdq] below.
12105 */
12106 switch (pVCpu->iem.s.enmEffAddrMode)
12107 {
12108 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12109 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12110 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12112 }
12113 return VINF_SUCCESS;
12114}
12115
12116
12117/** Opcode 0xad. */
12118FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12119{
12120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12121
12122 /*
12123 * Use the C implementation if a repeat prefix is encountered.
12124 */
12125 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12126 {
12127 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12128 switch (pVCpu->iem.s.enmEffOpSize)
12129 {
12130 case IEMMODE_16BIT:
12131 switch (pVCpu->iem.s.enmEffAddrMode)
12132 {
12133 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12134 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12135 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12137 }
12138 break;
12139 case IEMMODE_32BIT:
12140 switch (pVCpu->iem.s.enmEffAddrMode)
12141 {
12142 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12143 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12144 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12146 }
12147 case IEMMODE_64BIT:
12148 switch (pVCpu->iem.s.enmEffAddrMode)
12149 {
12150 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12151 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12152 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12154 }
12155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12156 }
12157 }
12158 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12159
12160 /*
12161 * Annoying double switch here.
12162 * Using ugly macro for implementing the cases, sharing it with lodsb.
12163 */
12164 switch (pVCpu->iem.s.enmEffOpSize)
12165 {
12166 case IEMMODE_16BIT:
12167 switch (pVCpu->iem.s.enmEffAddrMode)
12168 {
12169 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12170 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12171 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12173 }
12174 break;
12175
12176 case IEMMODE_32BIT:
12177 switch (pVCpu->iem.s.enmEffAddrMode)
12178 {
12179 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12180 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12181 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12183 }
12184 break;
12185
12186 case IEMMODE_64BIT:
12187 switch (pVCpu->iem.s.enmEffAddrMode)
12188 {
12189 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12190 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12191 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12193 }
12194 break;
12195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12196 }
12197 return VINF_SUCCESS;
12198}
12199
12200#undef IEM_LODS_CASE
12201
12202/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12203#define IEM_SCAS_CASE(ValBits, AddrBits) \
12204 IEM_MC_BEGIN(3, 2); \
12205 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12206 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12207 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12208 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12209 \
12210 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12211 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12212 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12213 IEM_MC_REF_EFLAGS(pEFlags); \
12214 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12215 \
12216 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12217 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12218 } IEM_MC_ELSE() { \
12219 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12220 } IEM_MC_ENDIF(); \
12221 IEM_MC_ADVANCE_RIP(); \
12222 IEM_MC_END();
12223
12224/** Opcode 0xae. */
12225FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12226{
12227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12228
12229 /*
12230 * Use the C implementation if a repeat prefix is encountered.
12231 */
12232 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12233 {
12234 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12235 switch (pVCpu->iem.s.enmEffAddrMode)
12236 {
12237 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12238 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12239 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12241 }
12242 }
12243 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12244 {
12245 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12246 switch (pVCpu->iem.s.enmEffAddrMode)
12247 {
12248 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12249 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12250 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12252 }
12253 }
12254 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12255
12256 /*
12257 * Sharing case implementation with stos[wdq] below.
12258 */
12259 switch (pVCpu->iem.s.enmEffAddrMode)
12260 {
12261 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12262 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12263 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12265 }
12266 return VINF_SUCCESS;
12267}
12268
12269
12270/** Opcode 0xaf. */
12271FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12272{
12273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12274
12275 /*
12276 * Use the C implementation if a repeat prefix is encountered.
12277 */
12278 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12279 {
12280 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12281 switch (pVCpu->iem.s.enmEffOpSize)
12282 {
12283 case IEMMODE_16BIT:
12284 switch (pVCpu->iem.s.enmEffAddrMode)
12285 {
12286 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12287 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12288 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12290 }
12291 break;
12292 case IEMMODE_32BIT:
12293 switch (pVCpu->iem.s.enmEffAddrMode)
12294 {
12295 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12296 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12297 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12299 }
12300 case IEMMODE_64BIT:
12301 switch (pVCpu->iem.s.enmEffAddrMode)
12302 {
12303 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12304 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12305 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12307 }
12308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12309 }
12310 }
12311 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12312 {
12313 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12314 switch (pVCpu->iem.s.enmEffOpSize)
12315 {
12316 case IEMMODE_16BIT:
12317 switch (pVCpu->iem.s.enmEffAddrMode)
12318 {
12319 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12320 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12321 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12323 }
12324 break;
12325 case IEMMODE_32BIT:
12326 switch (pVCpu->iem.s.enmEffAddrMode)
12327 {
12328 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12329 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12330 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12332 }
12333 case IEMMODE_64BIT:
12334 switch (pVCpu->iem.s.enmEffAddrMode)
12335 {
12336 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12337 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12338 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12340 }
12341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12342 }
12343 }
12344 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12345
12346 /*
12347 * Annoying double switch here.
12348 * Using ugly macro for implementing the cases, sharing it with scasb.
12349 */
12350 switch (pVCpu->iem.s.enmEffOpSize)
12351 {
12352 case IEMMODE_16BIT:
12353 switch (pVCpu->iem.s.enmEffAddrMode)
12354 {
12355 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12356 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12357 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12359 }
12360 break;
12361
12362 case IEMMODE_32BIT:
12363 switch (pVCpu->iem.s.enmEffAddrMode)
12364 {
12365 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12366 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12367 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12369 }
12370 break;
12371
12372 case IEMMODE_64BIT:
12373 switch (pVCpu->iem.s.enmEffAddrMode)
12374 {
12375 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12376 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12377 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12379 }
12380 break;
12381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12382 }
12383 return VINF_SUCCESS;
12384}
12385
12386#undef IEM_SCAS_CASE
12387
12388/**
12389 * Common 'mov r8, imm8' helper.
12390 */
12391FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12392{
12393 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12395
12396 IEM_MC_BEGIN(0, 1);
12397 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12398 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12399 IEM_MC_ADVANCE_RIP();
12400 IEM_MC_END();
12401
12402 return VINF_SUCCESS;
12403}
12404
12405
12406/** Opcode 0xb0. */
12407FNIEMOP_DEF(iemOp_mov_AL_Ib)
12408{
12409 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
12410 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12411}
12412
12413
12414/** Opcode 0xb1. */
12415FNIEMOP_DEF(iemOp_CL_Ib)
12416{
12417 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
12418 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12419}
12420
12421
12422/** Opcode 0xb2. */
12423FNIEMOP_DEF(iemOp_DL_Ib)
12424{
12425 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
12426 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12427}
12428
12429
12430/** Opcode 0xb3. */
12431FNIEMOP_DEF(iemOp_BL_Ib)
12432{
12433 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
12434 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12435}
12436
12437
12438/** Opcode 0xb4. */
12439FNIEMOP_DEF(iemOp_mov_AH_Ib)
12440{
12441 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
12442 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12443}
12444
12445
12446/** Opcode 0xb5. */
12447FNIEMOP_DEF(iemOp_CH_Ib)
12448{
12449 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
12450 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12451}
12452
12453
12454/** Opcode 0xb6. */
12455FNIEMOP_DEF(iemOp_DH_Ib)
12456{
12457 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
12458 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12459}
12460
12461
12462/** Opcode 0xb7. */
12463FNIEMOP_DEF(iemOp_BH_Ib)
12464{
12465 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
12466 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12467}
12468
12469
12470/**
12471 * Common 'mov regX,immX' helper.
12472 */
12473FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12474{
12475 switch (pVCpu->iem.s.enmEffOpSize)
12476 {
12477 case IEMMODE_16BIT:
12478 {
12479 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12481
12482 IEM_MC_BEGIN(0, 1);
12483 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12484 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12485 IEM_MC_ADVANCE_RIP();
12486 IEM_MC_END();
12487 break;
12488 }
12489
12490 case IEMMODE_32BIT:
12491 {
12492 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12494
12495 IEM_MC_BEGIN(0, 1);
12496 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12497 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12498 IEM_MC_ADVANCE_RIP();
12499 IEM_MC_END();
12500 break;
12501 }
12502 case IEMMODE_64BIT:
12503 {
12504 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12506
12507 IEM_MC_BEGIN(0, 1);
12508 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12509 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12510 IEM_MC_ADVANCE_RIP();
12511 IEM_MC_END();
12512 break;
12513 }
12514 }
12515
12516 return VINF_SUCCESS;
12517}
12518
12519
12520/** Opcode 0xb8. */
12521FNIEMOP_DEF(iemOp_eAX_Iv)
12522{
12523 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
12524 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12525}
12526
12527
12528/** Opcode 0xb9. */
12529FNIEMOP_DEF(iemOp_eCX_Iv)
12530{
12531 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
12532 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12533}
12534
12535
12536/** Opcode 0xba. */
12537FNIEMOP_DEF(iemOp_eDX_Iv)
12538{
12539 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
12540 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12541}
12542
12543
12544/** Opcode 0xbb. */
12545FNIEMOP_DEF(iemOp_eBX_Iv)
12546{
12547 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
12548 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12549}
12550
12551
12552/** Opcode 0xbc. */
12553FNIEMOP_DEF(iemOp_eSP_Iv)
12554{
12555 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
12556 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12557}
12558
12559
12560/** Opcode 0xbd. */
12561FNIEMOP_DEF(iemOp_eBP_Iv)
12562{
12563 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
12564 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12565}
12566
12567
12568/** Opcode 0xbe. */
12569FNIEMOP_DEF(iemOp_eSI_Iv)
12570{
12571 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
12572 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12573}
12574
12575
12576/** Opcode 0xbf. */
12577FNIEMOP_DEF(iemOp_eDI_Iv)
12578{
12579 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
12580 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12581}
12582
12583
12584/** Opcode 0xc0. */
12585FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12586{
12587 IEMOP_HLP_MIN_186();
12588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12589 PCIEMOPSHIFTSIZES pImpl;
12590 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12591 {
12592 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
12593 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
12594 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
12595 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
12596 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
12597 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
12598 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
12599 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12600 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12601 }
12602 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12603
12604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12605 {
12606 /* register */
12607 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12609 IEM_MC_BEGIN(3, 0);
12610 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12611 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12613 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12614 IEM_MC_REF_EFLAGS(pEFlags);
12615 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12616 IEM_MC_ADVANCE_RIP();
12617 IEM_MC_END();
12618 }
12619 else
12620 {
12621 /* memory */
12622 IEM_MC_BEGIN(3, 2);
12623 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12624 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12625 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12627
12628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12629 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12630 IEM_MC_ASSIGN(cShiftArg, cShift);
12631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12632 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12633 IEM_MC_FETCH_EFLAGS(EFlags);
12634 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12635
12636 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12637 IEM_MC_COMMIT_EFLAGS(EFlags);
12638 IEM_MC_ADVANCE_RIP();
12639 IEM_MC_END();
12640 }
12641 return VINF_SUCCESS;
12642}
12643
12644
12645/** Opcode 0xc1. */
12646FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12647{
12648 IEMOP_HLP_MIN_186();
12649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12650 PCIEMOPSHIFTSIZES pImpl;
12651 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12652 {
12653 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
12654 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
12655 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
12656 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
12657 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
12658 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
12659 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
12660 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12661 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12662 }
12663 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12664
12665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12666 {
12667 /* register */
12668 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12670 switch (pVCpu->iem.s.enmEffOpSize)
12671 {
12672 case IEMMODE_16BIT:
12673 IEM_MC_BEGIN(3, 0);
12674 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12675 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12676 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12677 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12678 IEM_MC_REF_EFLAGS(pEFlags);
12679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12680 IEM_MC_ADVANCE_RIP();
12681 IEM_MC_END();
12682 return VINF_SUCCESS;
12683
12684 case IEMMODE_32BIT:
12685 IEM_MC_BEGIN(3, 0);
12686 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12687 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12689 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12690 IEM_MC_REF_EFLAGS(pEFlags);
12691 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12692 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12693 IEM_MC_ADVANCE_RIP();
12694 IEM_MC_END();
12695 return VINF_SUCCESS;
12696
12697 case IEMMODE_64BIT:
12698 IEM_MC_BEGIN(3, 0);
12699 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12700 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12701 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12702 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12703 IEM_MC_REF_EFLAGS(pEFlags);
12704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12705 IEM_MC_ADVANCE_RIP();
12706 IEM_MC_END();
12707 return VINF_SUCCESS;
12708
12709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12710 }
12711 }
12712 else
12713 {
12714 /* memory */
12715 switch (pVCpu->iem.s.enmEffOpSize)
12716 {
12717 case IEMMODE_16BIT:
12718 IEM_MC_BEGIN(3, 2);
12719 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12720 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12721 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12723
12724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12725 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12726 IEM_MC_ASSIGN(cShiftArg, cShift);
12727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12728 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12729 IEM_MC_FETCH_EFLAGS(EFlags);
12730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12731
12732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12733 IEM_MC_COMMIT_EFLAGS(EFlags);
12734 IEM_MC_ADVANCE_RIP();
12735 IEM_MC_END();
12736 return VINF_SUCCESS;
12737
12738 case IEMMODE_32BIT:
12739 IEM_MC_BEGIN(3, 2);
12740 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12741 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12742 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12744
12745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12746 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12747 IEM_MC_ASSIGN(cShiftArg, cShift);
12748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12749 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12750 IEM_MC_FETCH_EFLAGS(EFlags);
12751 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12752
12753 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12754 IEM_MC_COMMIT_EFLAGS(EFlags);
12755 IEM_MC_ADVANCE_RIP();
12756 IEM_MC_END();
12757 return VINF_SUCCESS;
12758
12759 case IEMMODE_64BIT:
12760 IEM_MC_BEGIN(3, 2);
12761 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12762 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12763 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12765
12766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12767 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12768 IEM_MC_ASSIGN(cShiftArg, cShift);
12769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12770 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12771 IEM_MC_FETCH_EFLAGS(EFlags);
12772 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12773
12774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12775 IEM_MC_COMMIT_EFLAGS(EFlags);
12776 IEM_MC_ADVANCE_RIP();
12777 IEM_MC_END();
12778 return VINF_SUCCESS;
12779
12780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12781 }
12782 }
12783}
12784
12785
12786/** Opcode 0xc2. */
12787FNIEMOP_DEF(iemOp_retn_Iw)
12788{
12789 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
12790 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12793 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
12794}
12795
12796
12797/** Opcode 0xc3. */
12798FNIEMOP_DEF(iemOp_retn)
12799{
12800 IEMOP_MNEMONIC(retn, "retn");
12801 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12803 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
12804}
12805
12806
12807/** Opcode 0xc4. */
12808FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12809{
12810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12811 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12812 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12813 {
12814 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
12815 /* The LES instruction is invalid 64-bit mode. In legacy and
12816 compatability mode it is invalid with MOD=3.
12817 The use as a VEX prefix is made possible by assigning the inverted
12818 REX.R to the top MOD bit, and the top bit in the inverted register
12819 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12820 to accessing registers 0..7 in this VEX form. */
12821 /** @todo VEX: Just use new tables for it. */
12822 return IEMOP_RAISE_INVALID_OPCODE();
12823 }
12824 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
12825 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12826}
12827
12828
12829/** Opcode 0xc5. */
12830FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12831{
12832 /* The LDS instruction is invalid 64-bit mode. In legacy and
12833 compatability mode it is invalid with MOD=3.
12834 The use as a VEX prefix is made possible by assigning the inverted
12835 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12836 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12838 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
12839 {
12840 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12841 {
12842 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
12843 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12844 }
12845 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12846 }
12847
12848 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
12849 /** @todo Test when exctly the VEX conformance checks kick in during
12850 * instruction decoding and fetching (using \#PF). */
12851 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12852 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12853 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12854#if 0 /* will make sense of this next week... */
12855 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12856 &&
12857 )
12858 {
12859
12860 }
12861#endif
12862
12863 /** @todo VEX: Just use new tables for it. */
12864 return IEMOP_RAISE_INVALID_OPCODE();
12865}
12866
12867
12868/** Opcode 0xc6. */
12869FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12870{
12871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12872 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12873 return IEMOP_RAISE_INVALID_OPCODE();
12874 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
12875
12876 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12877 {
12878 /* register access */
12879 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12881 IEM_MC_BEGIN(0, 0);
12882 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
12883 IEM_MC_ADVANCE_RIP();
12884 IEM_MC_END();
12885 }
12886 else
12887 {
12888 /* memory access. */
12889 IEM_MC_BEGIN(0, 1);
12890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12892 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12894 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
12895 IEM_MC_ADVANCE_RIP();
12896 IEM_MC_END();
12897 }
12898 return VINF_SUCCESS;
12899}
12900
12901
12902/** Opcode 0xc7. */
12903FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12904{
12905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12906 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12907 return IEMOP_RAISE_INVALID_OPCODE();
12908 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
12909
12910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12911 {
12912 /* register access */
12913 switch (pVCpu->iem.s.enmEffOpSize)
12914 {
12915 case IEMMODE_16BIT:
12916 IEM_MC_BEGIN(0, 0);
12917 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12919 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
12920 IEM_MC_ADVANCE_RIP();
12921 IEM_MC_END();
12922 return VINF_SUCCESS;
12923
12924 case IEMMODE_32BIT:
12925 IEM_MC_BEGIN(0, 0);
12926 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12928 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
12929 IEM_MC_ADVANCE_RIP();
12930 IEM_MC_END();
12931 return VINF_SUCCESS;
12932
12933 case IEMMODE_64BIT:
12934 IEM_MC_BEGIN(0, 0);
12935 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12937 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
12938 IEM_MC_ADVANCE_RIP();
12939 IEM_MC_END();
12940 return VINF_SUCCESS;
12941
12942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12943 }
12944 }
12945 else
12946 {
12947 /* memory access. */
12948 switch (pVCpu->iem.s.enmEffOpSize)
12949 {
12950 case IEMMODE_16BIT:
12951 IEM_MC_BEGIN(0, 1);
12952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12954 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12956 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
12957 IEM_MC_ADVANCE_RIP();
12958 IEM_MC_END();
12959 return VINF_SUCCESS;
12960
12961 case IEMMODE_32BIT:
12962 IEM_MC_BEGIN(0, 1);
12963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12965 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12967 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
12968 IEM_MC_ADVANCE_RIP();
12969 IEM_MC_END();
12970 return VINF_SUCCESS;
12971
12972 case IEMMODE_64BIT:
12973 IEM_MC_BEGIN(0, 1);
12974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12976 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12978 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
12979 IEM_MC_ADVANCE_RIP();
12980 IEM_MC_END();
12981 return VINF_SUCCESS;
12982
12983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12984 }
12985 }
12986}
12987
12988
12989
12990
12991/** Opcode 0xc8. */
12992FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12993{
12994 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
12995 IEMOP_HLP_MIN_186();
12996 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12997 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12998 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13000 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13001}
13002
13003
13004/** Opcode 0xc9. */
13005FNIEMOP_DEF(iemOp_leave)
13006{
13007 IEMOP_MNEMONIC(leave, "leave");
13008 IEMOP_HLP_MIN_186();
13009 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13011 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13012}
13013
13014
13015/** Opcode 0xca. */
13016FNIEMOP_DEF(iemOp_retf_Iw)
13017{
13018 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13019 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13022 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13023}
13024
13025
13026/** Opcode 0xcb. */
13027FNIEMOP_DEF(iemOp_retf)
13028{
13029 IEMOP_MNEMONIC(retf, "retf");
13030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13031 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13032 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13033}
13034
13035
13036/** Opcode 0xcc. */
13037FNIEMOP_DEF(iemOp_int_3)
13038{
13039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13040 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13041}
13042
13043
13044/** Opcode 0xcd. */
13045FNIEMOP_DEF(iemOp_int_Ib)
13046{
13047 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13049 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13050}
13051
13052
13053/** Opcode 0xce. */
13054FNIEMOP_DEF(iemOp_into)
13055{
13056 IEMOP_MNEMONIC(into, "into");
13057 IEMOP_HLP_NO_64BIT();
13058
13059 IEM_MC_BEGIN(2, 0);
13060 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13061 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13062 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13063 IEM_MC_END();
13064 return VINF_SUCCESS;
13065}
13066
13067
13068/** Opcode 0xcf. */
13069FNIEMOP_DEF(iemOp_iret)
13070{
13071 IEMOP_MNEMONIC(iret, "iret");
13072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13073 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13074}
13075
13076
13077/** Opcode 0xd0. */
13078FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13079{
13080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13081 PCIEMOPSHIFTSIZES pImpl;
13082 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13083 {
13084 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13085 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13086 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13087 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13088 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13089 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13090 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13091 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13092 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13093 }
13094 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13095
13096 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13097 {
13098 /* register */
13099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13100 IEM_MC_BEGIN(3, 0);
13101 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13102 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13103 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13104 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13105 IEM_MC_REF_EFLAGS(pEFlags);
13106 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13107 IEM_MC_ADVANCE_RIP();
13108 IEM_MC_END();
13109 }
13110 else
13111 {
13112 /* memory */
13113 IEM_MC_BEGIN(3, 2);
13114 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13115 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13116 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13118
13119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13121 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13122 IEM_MC_FETCH_EFLAGS(EFlags);
13123 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13124
13125 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13126 IEM_MC_COMMIT_EFLAGS(EFlags);
13127 IEM_MC_ADVANCE_RIP();
13128 IEM_MC_END();
13129 }
13130 return VINF_SUCCESS;
13131}
13132
13133
13134
13135/** Opcode 0xd1. */
13136FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13137{
13138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13139 PCIEMOPSHIFTSIZES pImpl;
13140 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13141 {
13142 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13143 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13144 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13145 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13146 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13147 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13148 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13149 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13150 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13151 }
13152 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13153
13154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13155 {
13156 /* register */
13157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13158 switch (pVCpu->iem.s.enmEffOpSize)
13159 {
13160 case IEMMODE_16BIT:
13161 IEM_MC_BEGIN(3, 0);
13162 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13163 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13165 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13166 IEM_MC_REF_EFLAGS(pEFlags);
13167 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13168 IEM_MC_ADVANCE_RIP();
13169 IEM_MC_END();
13170 return VINF_SUCCESS;
13171
13172 case IEMMODE_32BIT:
13173 IEM_MC_BEGIN(3, 0);
13174 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13175 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13177 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13178 IEM_MC_REF_EFLAGS(pEFlags);
13179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13180 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13181 IEM_MC_ADVANCE_RIP();
13182 IEM_MC_END();
13183 return VINF_SUCCESS;
13184
13185 case IEMMODE_64BIT:
13186 IEM_MC_BEGIN(3, 0);
13187 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13188 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13189 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13190 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13191 IEM_MC_REF_EFLAGS(pEFlags);
13192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13193 IEM_MC_ADVANCE_RIP();
13194 IEM_MC_END();
13195 return VINF_SUCCESS;
13196
13197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13198 }
13199 }
13200 else
13201 {
13202 /* memory */
13203 switch (pVCpu->iem.s.enmEffOpSize)
13204 {
13205 case IEMMODE_16BIT:
13206 IEM_MC_BEGIN(3, 2);
13207 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13208 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13209 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13211
13212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13214 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13215 IEM_MC_FETCH_EFLAGS(EFlags);
13216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13217
13218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13219 IEM_MC_COMMIT_EFLAGS(EFlags);
13220 IEM_MC_ADVANCE_RIP();
13221 IEM_MC_END();
13222 return VINF_SUCCESS;
13223
13224 case IEMMODE_32BIT:
13225 IEM_MC_BEGIN(3, 2);
13226 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13227 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13228 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13230
13231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13233 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13234 IEM_MC_FETCH_EFLAGS(EFlags);
13235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13236
13237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13238 IEM_MC_COMMIT_EFLAGS(EFlags);
13239 IEM_MC_ADVANCE_RIP();
13240 IEM_MC_END();
13241 return VINF_SUCCESS;
13242
13243 case IEMMODE_64BIT:
13244 IEM_MC_BEGIN(3, 2);
13245 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13246 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13247 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13249
13250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13252 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13253 IEM_MC_FETCH_EFLAGS(EFlags);
13254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13255
13256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13257 IEM_MC_COMMIT_EFLAGS(EFlags);
13258 IEM_MC_ADVANCE_RIP();
13259 IEM_MC_END();
13260 return VINF_SUCCESS;
13261
13262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13263 }
13264 }
13265}
13266
13267
13268/** Opcode 0xd2. */
13269FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13270{
13271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13272 PCIEMOPSHIFTSIZES pImpl;
13273 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13274 {
13275 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13276 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13277 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13278 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13279 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13280 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13281 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13282 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13283 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13284 }
13285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13286
13287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13288 {
13289 /* register */
13290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13291 IEM_MC_BEGIN(3, 0);
13292 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13293 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13294 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13295 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13296 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13297 IEM_MC_REF_EFLAGS(pEFlags);
13298 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13299 IEM_MC_ADVANCE_RIP();
13300 IEM_MC_END();
13301 }
13302 else
13303 {
13304 /* memory */
13305 IEM_MC_BEGIN(3, 2);
13306 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13307 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13308 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13310
13311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13313 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13314 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13315 IEM_MC_FETCH_EFLAGS(EFlags);
13316 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13317
13318 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13319 IEM_MC_COMMIT_EFLAGS(EFlags);
13320 IEM_MC_ADVANCE_RIP();
13321 IEM_MC_END();
13322 }
13323 return VINF_SUCCESS;
13324}
13325
13326
13327/** Opcode 0xd3. */
13328FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13329{
13330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13331 PCIEMOPSHIFTSIZES pImpl;
13332 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13333 {
13334 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13335 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13336 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13337 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13338 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13339 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13340 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13341 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13342 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13343 }
13344 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13345
13346 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13347 {
13348 /* register */
13349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13350 switch (pVCpu->iem.s.enmEffOpSize)
13351 {
13352 case IEMMODE_16BIT:
13353 IEM_MC_BEGIN(3, 0);
13354 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13355 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13356 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13357 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13358 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13359 IEM_MC_REF_EFLAGS(pEFlags);
13360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13361 IEM_MC_ADVANCE_RIP();
13362 IEM_MC_END();
13363 return VINF_SUCCESS;
13364
13365 case IEMMODE_32BIT:
13366 IEM_MC_BEGIN(3, 0);
13367 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13368 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13369 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13370 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13371 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13372 IEM_MC_REF_EFLAGS(pEFlags);
13373 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13374 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13375 IEM_MC_ADVANCE_RIP();
13376 IEM_MC_END();
13377 return VINF_SUCCESS;
13378
13379 case IEMMODE_64BIT:
13380 IEM_MC_BEGIN(3, 0);
13381 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13382 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13383 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13384 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13385 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13386 IEM_MC_REF_EFLAGS(pEFlags);
13387 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13388 IEM_MC_ADVANCE_RIP();
13389 IEM_MC_END();
13390 return VINF_SUCCESS;
13391
13392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13393 }
13394 }
13395 else
13396 {
13397 /* memory */
13398 switch (pVCpu->iem.s.enmEffOpSize)
13399 {
13400 case IEMMODE_16BIT:
13401 IEM_MC_BEGIN(3, 2);
13402 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13403 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13404 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13406
13407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13409 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13410 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13411 IEM_MC_FETCH_EFLAGS(EFlags);
13412 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13413
13414 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13415 IEM_MC_COMMIT_EFLAGS(EFlags);
13416 IEM_MC_ADVANCE_RIP();
13417 IEM_MC_END();
13418 return VINF_SUCCESS;
13419
13420 case IEMMODE_32BIT:
13421 IEM_MC_BEGIN(3, 2);
13422 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13423 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13426
13427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13429 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13430 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13431 IEM_MC_FETCH_EFLAGS(EFlags);
13432 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13433
13434 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13435 IEM_MC_COMMIT_EFLAGS(EFlags);
13436 IEM_MC_ADVANCE_RIP();
13437 IEM_MC_END();
13438 return VINF_SUCCESS;
13439
13440 case IEMMODE_64BIT:
13441 IEM_MC_BEGIN(3, 2);
13442 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13443 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13444 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13446
13447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13449 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13450 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13451 IEM_MC_FETCH_EFLAGS(EFlags);
13452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13453
13454 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13455 IEM_MC_COMMIT_EFLAGS(EFlags);
13456 IEM_MC_ADVANCE_RIP();
13457 IEM_MC_END();
13458 return VINF_SUCCESS;
13459
13460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13461 }
13462 }
13463}
13464
13465/** Opcode 0xd4. */
13466FNIEMOP_DEF(iemOp_aam_Ib)
13467{
13468 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
13469 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13471 IEMOP_HLP_NO_64BIT();
13472 if (!bImm)
13473 return IEMOP_RAISE_DIVIDE_ERROR();
13474 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13475}
13476
13477
13478/** Opcode 0xd5. */
13479FNIEMOP_DEF(iemOp_aad_Ib)
13480{
13481 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
13482 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13484 IEMOP_HLP_NO_64BIT();
13485 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13486}
13487
13488
13489/** Opcode 0xd6. */
13490FNIEMOP_DEF(iemOp_salc)
13491{
13492 IEMOP_MNEMONIC(salc, "salc");
13493 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13494 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13496 IEMOP_HLP_NO_64BIT();
13497
13498 IEM_MC_BEGIN(0, 0);
13499 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13500 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13501 } IEM_MC_ELSE() {
13502 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13503 } IEM_MC_ENDIF();
13504 IEM_MC_ADVANCE_RIP();
13505 IEM_MC_END();
13506 return VINF_SUCCESS;
13507}
13508
13509
13510/** Opcode 0xd7. */
13511FNIEMOP_DEF(iemOp_xlat)
13512{
13513 IEMOP_MNEMONIC(xlat, "xlat");
13514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13515 switch (pVCpu->iem.s.enmEffAddrMode)
13516 {
13517 case IEMMODE_16BIT:
13518 IEM_MC_BEGIN(2, 0);
13519 IEM_MC_LOCAL(uint8_t, u8Tmp);
13520 IEM_MC_LOCAL(uint16_t, u16Addr);
13521 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13522 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13523 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
13524 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13525 IEM_MC_ADVANCE_RIP();
13526 IEM_MC_END();
13527 return VINF_SUCCESS;
13528
13529 case IEMMODE_32BIT:
13530 IEM_MC_BEGIN(2, 0);
13531 IEM_MC_LOCAL(uint8_t, u8Tmp);
13532 IEM_MC_LOCAL(uint32_t, u32Addr);
13533 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13534 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13535 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
13536 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13537 IEM_MC_ADVANCE_RIP();
13538 IEM_MC_END();
13539 return VINF_SUCCESS;
13540
13541 case IEMMODE_64BIT:
13542 IEM_MC_BEGIN(2, 0);
13543 IEM_MC_LOCAL(uint8_t, u8Tmp);
13544 IEM_MC_LOCAL(uint64_t, u64Addr);
13545 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13546 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13547 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
13548 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13549 IEM_MC_ADVANCE_RIP();
13550 IEM_MC_END();
13551 return VINF_SUCCESS;
13552
13553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13554 }
13555}
13556
13557
13558/**
13559 * Common worker for FPU instructions working on ST0 and STn, and storing the
13560 * result in ST0.
13561 *
13562 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13563 */
13564FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13565{
13566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13567
13568 IEM_MC_BEGIN(3, 1);
13569 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13570 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13571 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13572 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13573
13574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13576 IEM_MC_PREPARE_FPU_USAGE();
13577 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13578 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13579 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13580 IEM_MC_ELSE()
13581 IEM_MC_FPU_STACK_UNDERFLOW(0);
13582 IEM_MC_ENDIF();
13583 IEM_MC_ADVANCE_RIP();
13584
13585 IEM_MC_END();
13586 return VINF_SUCCESS;
13587}
13588
13589
13590/**
13591 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13592 * flags.
13593 *
13594 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13595 */
13596FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13597{
13598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13599
13600 IEM_MC_BEGIN(3, 1);
13601 IEM_MC_LOCAL(uint16_t, u16Fsw);
13602 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13603 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13604 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13605
13606 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13607 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13608 IEM_MC_PREPARE_FPU_USAGE();
13609 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13610 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13611 IEM_MC_UPDATE_FSW(u16Fsw);
13612 IEM_MC_ELSE()
13613 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13614 IEM_MC_ENDIF();
13615 IEM_MC_ADVANCE_RIP();
13616
13617 IEM_MC_END();
13618 return VINF_SUCCESS;
13619}
13620
13621
13622/**
13623 * Common worker for FPU instructions working on ST0 and STn, only affecting
13624 * flags, and popping when done.
13625 *
13626 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13627 */
13628FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13629{
13630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13631
13632 IEM_MC_BEGIN(3, 1);
13633 IEM_MC_LOCAL(uint16_t, u16Fsw);
13634 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13635 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13636 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13637
13638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13639 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13640 IEM_MC_PREPARE_FPU_USAGE();
13641 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13642 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13643 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13644 IEM_MC_ELSE()
13645 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13646 IEM_MC_ENDIF();
13647 IEM_MC_ADVANCE_RIP();
13648
13649 IEM_MC_END();
13650 return VINF_SUCCESS;
13651}
13652
13653
13654/** Opcode 0xd8 11/0. */
13655FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13656{
13657 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
13658 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13659}
13660
13661
13662/** Opcode 0xd8 11/1. */
13663FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13664{
13665 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
13666 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13667}
13668
13669
13670/** Opcode 0xd8 11/2. */
13671FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13672{
13673 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
13674 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13675}
13676
13677
13678/** Opcode 0xd8 11/3. */
13679FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13680{
13681 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
13682 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13683}
13684
13685
13686/** Opcode 0xd8 11/4. */
13687FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13688{
13689 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
13690 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13691}
13692
13693
13694/** Opcode 0xd8 11/5. */
13695FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13696{
13697 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
13698 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13699}
13700
13701
13702/** Opcode 0xd8 11/6. */
13703FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13704{
13705 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
13706 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13707}
13708
13709
13710/** Opcode 0xd8 11/7. */
13711FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13712{
13713 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
13714 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13715}
13716
13717
13718/**
13719 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13720 * the result in ST0.
13721 *
13722 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13723 */
13724FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13725{
13726 IEM_MC_BEGIN(3, 3);
13727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13728 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13729 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13730 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13731 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13732 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13733
13734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13736
13737 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13738 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13739 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13740
13741 IEM_MC_PREPARE_FPU_USAGE();
13742 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13743 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13744 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13745 IEM_MC_ELSE()
13746 IEM_MC_FPU_STACK_UNDERFLOW(0);
13747 IEM_MC_ENDIF();
13748 IEM_MC_ADVANCE_RIP();
13749
13750 IEM_MC_END();
13751 return VINF_SUCCESS;
13752}
13753
13754
13755/** Opcode 0xd8 !11/0. */
13756FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13757{
13758 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
13759 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13760}
13761
13762
13763/** Opcode 0xd8 !11/1. */
13764FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13765{
13766 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
13767 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13768}
13769
13770
13771/** Opcode 0xd8 !11/2. */
13772FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13773{
13774 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
13775
13776 IEM_MC_BEGIN(3, 3);
13777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13778 IEM_MC_LOCAL(uint16_t, u16Fsw);
13779 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13780 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13781 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13782 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13783
13784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13786
13787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13788 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13789 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13790
13791 IEM_MC_PREPARE_FPU_USAGE();
13792 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13793 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13794 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13795 IEM_MC_ELSE()
13796 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13797 IEM_MC_ENDIF();
13798 IEM_MC_ADVANCE_RIP();
13799
13800 IEM_MC_END();
13801 return VINF_SUCCESS;
13802}
13803
13804
13805/** Opcode 0xd8 !11/3. */
13806FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13807{
13808 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
13809
13810 IEM_MC_BEGIN(3, 3);
13811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13812 IEM_MC_LOCAL(uint16_t, u16Fsw);
13813 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13814 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13815 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13816 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13817
13818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13820
13821 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13822 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13823 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13824
13825 IEM_MC_PREPARE_FPU_USAGE();
13826 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13827 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13828 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13829 IEM_MC_ELSE()
13830 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13831 IEM_MC_ENDIF();
13832 IEM_MC_ADVANCE_RIP();
13833
13834 IEM_MC_END();
13835 return VINF_SUCCESS;
13836}
13837
13838
13839/** Opcode 0xd8 !11/4. */
13840FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13841{
13842 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
13843 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13844}
13845
13846
13847/** Opcode 0xd8 !11/5. */
13848FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13849{
13850 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
13851 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13852}
13853
13854
13855/** Opcode 0xd8 !11/6. */
13856FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13857{
13858 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
13859 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13860}
13861
13862
13863/** Opcode 0xd8 !11/7. */
13864FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13865{
13866 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
13867 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13868}
13869
13870
13871/** Opcode 0xd8. */
13872FNIEMOP_DEF(iemOp_EscF0)
13873{
13874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13875 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
13876
13877 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13878 {
13879 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13880 {
13881 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13882 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13883 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13884 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13885 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13886 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13887 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13888 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13890 }
13891 }
13892 else
13893 {
13894 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13895 {
13896 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13897 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13898 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13899 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13900 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13901 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13902 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13903 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13905 }
13906 }
13907}
13908
13909
13910/** Opcode 0xd9 /0 mem32real
13911 * @sa iemOp_fld_m64r */
13912FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13913{
13914 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
13915
13916 IEM_MC_BEGIN(2, 3);
13917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13918 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13919 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13920 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13921 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13922
13923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13925
13926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13927 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13928 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13929
13930 IEM_MC_PREPARE_FPU_USAGE();
13931 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13932 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13933 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13934 IEM_MC_ELSE()
13935 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13936 IEM_MC_ENDIF();
13937 IEM_MC_ADVANCE_RIP();
13938
13939 IEM_MC_END();
13940 return VINF_SUCCESS;
13941}
13942
13943
13944/** Opcode 0xd9 !11/2 mem32real */
13945FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13946{
13947 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
13948 IEM_MC_BEGIN(3, 2);
13949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13950 IEM_MC_LOCAL(uint16_t, u16Fsw);
13951 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13952 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13953 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13954
13955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13958 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13959
13960 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13961 IEM_MC_PREPARE_FPU_USAGE();
13962 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13963 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13964 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13965 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13966 IEM_MC_ELSE()
13967 IEM_MC_IF_FCW_IM()
13968 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13969 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13970 IEM_MC_ENDIF();
13971 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13972 IEM_MC_ENDIF();
13973 IEM_MC_ADVANCE_RIP();
13974
13975 IEM_MC_END();
13976 return VINF_SUCCESS;
13977}
13978
13979
13980/** Opcode 0xd9 !11/3 */
13981FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13982{
13983 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
13984 IEM_MC_BEGIN(3, 2);
13985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13986 IEM_MC_LOCAL(uint16_t, u16Fsw);
13987 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13988 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13989 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13990
13991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13993 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13994 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13995
13996 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13997 IEM_MC_PREPARE_FPU_USAGE();
13998 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13999 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14000 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14001 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14002 IEM_MC_ELSE()
14003 IEM_MC_IF_FCW_IM()
14004 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14005 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14006 IEM_MC_ENDIF();
14007 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14008 IEM_MC_ENDIF();
14009 IEM_MC_ADVANCE_RIP();
14010
14011 IEM_MC_END();
14012 return VINF_SUCCESS;
14013}
14014
14015
14016/** Opcode 0xd9 !11/4 */
14017FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14018{
14019 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14020 IEM_MC_BEGIN(3, 0);
14021 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14022 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14023 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14026 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14027 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14028 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14029 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14030 IEM_MC_END();
14031 return VINF_SUCCESS;
14032}
14033
14034
14035/** Opcode 0xd9 !11/5 */
14036FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14037{
14038 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14039 IEM_MC_BEGIN(1, 1);
14040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14041 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14044 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14045 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14046 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14047 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14048 IEM_MC_END();
14049 return VINF_SUCCESS;
14050}
14051
14052
14053/** Opcode 0xd9 !11/6 */
14054FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14055{
14056 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14057 IEM_MC_BEGIN(3, 0);
14058 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14059 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14060 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14064 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14065 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14066 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14067 IEM_MC_END();
14068 return VINF_SUCCESS;
14069}
14070
14071
14072/** Opcode 0xd9 !11/7 */
14073FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14074{
14075 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14076 IEM_MC_BEGIN(2, 0);
14077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14078 IEM_MC_LOCAL(uint16_t, u16Fcw);
14079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14082 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14083 IEM_MC_FETCH_FCW(u16Fcw);
14084 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14085 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14086 IEM_MC_END();
14087 return VINF_SUCCESS;
14088}
14089
14090
14091/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14092FNIEMOP_DEF(iemOp_fnop)
14093{
14094 IEMOP_MNEMONIC(fnop, "fnop");
14095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14096
14097 IEM_MC_BEGIN(0, 0);
14098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14099 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14100 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14101 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14102 * intel optimizations. Investigate. */
14103 IEM_MC_UPDATE_FPU_OPCODE_IP();
14104 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14105 IEM_MC_END();
14106 return VINF_SUCCESS;
14107}
14108
14109
14110/** Opcode 0xd9 11/0 stN */
14111FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14112{
14113 IEMOP_MNEMONIC(fld_stN, "fld stN");
14114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14115
14116 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14117 * indicates that it does. */
14118 IEM_MC_BEGIN(0, 2);
14119 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14120 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14123
14124 IEM_MC_PREPARE_FPU_USAGE();
14125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14126 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14127 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14128 IEM_MC_ELSE()
14129 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14130 IEM_MC_ENDIF();
14131
14132 IEM_MC_ADVANCE_RIP();
14133 IEM_MC_END();
14134
14135 return VINF_SUCCESS;
14136}
14137
14138
14139/** Opcode 0xd9 11/3 stN */
14140FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14141{
14142 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14144
14145 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14146 * indicates that it does. */
14147 IEM_MC_BEGIN(1, 3);
14148 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14149 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14150 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14151 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14154
14155 IEM_MC_PREPARE_FPU_USAGE();
14156 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14157 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14158 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14159 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14160 IEM_MC_ELSE()
14161 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14162 IEM_MC_ENDIF();
14163
14164 IEM_MC_ADVANCE_RIP();
14165 IEM_MC_END();
14166
14167 return VINF_SUCCESS;
14168}
14169
14170
14171/** Opcode 0xd9 11/4, 0xdd 11/2. */
14172FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14173{
14174 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14176
14177 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14178 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14179 if (!iDstReg)
14180 {
14181 IEM_MC_BEGIN(0, 1);
14182 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14183 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14184 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14185
14186 IEM_MC_PREPARE_FPU_USAGE();
14187 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14188 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14189 IEM_MC_ELSE()
14190 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14191 IEM_MC_ENDIF();
14192
14193 IEM_MC_ADVANCE_RIP();
14194 IEM_MC_END();
14195 }
14196 else
14197 {
14198 IEM_MC_BEGIN(0, 2);
14199 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14200 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14201 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14202 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14203
14204 IEM_MC_PREPARE_FPU_USAGE();
14205 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14206 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14207 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14208 IEM_MC_ELSE()
14209 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14210 IEM_MC_ENDIF();
14211
14212 IEM_MC_ADVANCE_RIP();
14213 IEM_MC_END();
14214 }
14215 return VINF_SUCCESS;
14216}
14217
14218
14219/**
14220 * Common worker for FPU instructions working on ST0 and replaces it with the
14221 * result, i.e. unary operators.
14222 *
14223 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14224 */
14225FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14226{
14227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14228
14229 IEM_MC_BEGIN(2, 1);
14230 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14231 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14232 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14233
14234 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14235 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14236 IEM_MC_PREPARE_FPU_USAGE();
14237 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14238 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14239 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14240 IEM_MC_ELSE()
14241 IEM_MC_FPU_STACK_UNDERFLOW(0);
14242 IEM_MC_ENDIF();
14243 IEM_MC_ADVANCE_RIP();
14244
14245 IEM_MC_END();
14246 return VINF_SUCCESS;
14247}
14248
14249
14250/** Opcode 0xd9 0xe0. */
14251FNIEMOP_DEF(iemOp_fchs)
14252{
14253 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14254 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14255}
14256
14257
14258/** Opcode 0xd9 0xe1. */
14259FNIEMOP_DEF(iemOp_fabs)
14260{
14261 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14262 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14263}
14264
14265
14266/**
14267 * Common worker for FPU instructions working on ST0 and only returns FSW.
14268 *
14269 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14270 */
14271FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14272{
14273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14274
14275 IEM_MC_BEGIN(2, 1);
14276 IEM_MC_LOCAL(uint16_t, u16Fsw);
14277 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14278 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14279
14280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14282 IEM_MC_PREPARE_FPU_USAGE();
14283 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14284 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14285 IEM_MC_UPDATE_FSW(u16Fsw);
14286 IEM_MC_ELSE()
14287 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14288 IEM_MC_ENDIF();
14289 IEM_MC_ADVANCE_RIP();
14290
14291 IEM_MC_END();
14292 return VINF_SUCCESS;
14293}
14294
14295
14296/** Opcode 0xd9 0xe4. */
14297FNIEMOP_DEF(iemOp_ftst)
14298{
14299 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14300 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14301}
14302
14303
14304/** Opcode 0xd9 0xe5. */
14305FNIEMOP_DEF(iemOp_fxam)
14306{
14307 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14308 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14309}
14310
14311
14312/**
14313 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14314 *
14315 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14316 */
14317FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14318{
14319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14320
14321 IEM_MC_BEGIN(1, 1);
14322 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14323 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14324
14325 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14326 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14327 IEM_MC_PREPARE_FPU_USAGE();
14328 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14329 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14330 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14331 IEM_MC_ELSE()
14332 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14333 IEM_MC_ENDIF();
14334 IEM_MC_ADVANCE_RIP();
14335
14336 IEM_MC_END();
14337 return VINF_SUCCESS;
14338}
14339
14340
14341/** Opcode 0xd9 0xe8. */
14342FNIEMOP_DEF(iemOp_fld1)
14343{
14344 IEMOP_MNEMONIC(fld1, "fld1");
14345 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14346}
14347
14348
14349/** Opcode 0xd9 0xe9. */
14350FNIEMOP_DEF(iemOp_fldl2t)
14351{
14352 IEMOP_MNEMONIC(fldl2t, "fldl2t");
14353 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14354}
14355
14356
14357/** Opcode 0xd9 0xea. */
14358FNIEMOP_DEF(iemOp_fldl2e)
14359{
14360 IEMOP_MNEMONIC(fldl2e, "fldl2e");
14361 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14362}
14363
14364/** Opcode 0xd9 0xeb. */
14365FNIEMOP_DEF(iemOp_fldpi)
14366{
14367 IEMOP_MNEMONIC(fldpi, "fldpi");
14368 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14369}
14370
14371
14372/** Opcode 0xd9 0xec. */
14373FNIEMOP_DEF(iemOp_fldlg2)
14374{
14375 IEMOP_MNEMONIC(fldlg2, "fldlg2");
14376 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14377}
14378
14379/** Opcode 0xd9 0xed. */
14380FNIEMOP_DEF(iemOp_fldln2)
14381{
14382 IEMOP_MNEMONIC(fldln2, "fldln2");
14383 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14384}
14385
14386
14387/** Opcode 0xd9 0xee. */
14388FNIEMOP_DEF(iemOp_fldz)
14389{
14390 IEMOP_MNEMONIC(fldz, "fldz");
14391 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14392}
14393
14394
14395/** Opcode 0xd9 0xf0. */
14396FNIEMOP_DEF(iemOp_f2xm1)
14397{
14398 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
14399 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14400}
14401
14402
14403/**
14404 * Common worker for FPU instructions working on STn and ST0, storing the result
14405 * in STn, and popping the stack unless IE, DE or ZE was raised.
14406 *
14407 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14408 */
14409FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14410{
14411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14412
14413 IEM_MC_BEGIN(3, 1);
14414 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14415 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14416 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14417 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14418
14419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14421
14422 IEM_MC_PREPARE_FPU_USAGE();
14423 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14424 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14425 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14426 IEM_MC_ELSE()
14427 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14428 IEM_MC_ENDIF();
14429 IEM_MC_ADVANCE_RIP();
14430
14431 IEM_MC_END();
14432 return VINF_SUCCESS;
14433}
14434
14435
14436/** Opcode 0xd9 0xf1. */
14437FNIEMOP_DEF(iemOp_fyl2x)
14438{
14439 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
14440 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
14441}
14442
14443
14444/**
14445 * Common worker for FPU instructions working on ST0 and having two outputs, one
14446 * replacing ST0 and one pushed onto the stack.
14447 *
14448 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14449 */
14450FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14451{
14452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14453
14454 IEM_MC_BEGIN(2, 1);
14455 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14456 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14457 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14458
14459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14461 IEM_MC_PREPARE_FPU_USAGE();
14462 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14463 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14464 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14465 IEM_MC_ELSE()
14466 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14467 IEM_MC_ENDIF();
14468 IEM_MC_ADVANCE_RIP();
14469
14470 IEM_MC_END();
14471 return VINF_SUCCESS;
14472}
14473
14474
14475/** Opcode 0xd9 0xf2. */
14476FNIEMOP_DEF(iemOp_fptan)
14477{
14478 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
14479 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14480}
14481
14482
14483/** Opcode 0xd9 0xf3. */
14484FNIEMOP_DEF(iemOp_fpatan)
14485{
14486 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
14487 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14488}
14489
14490
14491/** Opcode 0xd9 0xf4. */
14492FNIEMOP_DEF(iemOp_fxtract)
14493{
14494 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
14495 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14496}
14497
14498
14499/** Opcode 0xd9 0xf5. */
14500FNIEMOP_DEF(iemOp_fprem1)
14501{
14502 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
14503 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14504}
14505
14506
14507/** Opcode 0xd9 0xf6. */
14508FNIEMOP_DEF(iemOp_fdecstp)
14509{
14510 IEMOP_MNEMONIC(fdecstp, "fdecstp");
14511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14512 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14513 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14514 * FINCSTP and FDECSTP. */
14515
14516 IEM_MC_BEGIN(0,0);
14517
14518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14519 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14520
14521 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14522 IEM_MC_FPU_STACK_DEC_TOP();
14523 IEM_MC_UPDATE_FSW_CONST(0);
14524
14525 IEM_MC_ADVANCE_RIP();
14526 IEM_MC_END();
14527 return VINF_SUCCESS;
14528}
14529
14530
14531/** Opcode 0xd9 0xf7. */
14532FNIEMOP_DEF(iemOp_fincstp)
14533{
14534 IEMOP_MNEMONIC(fincstp, "fincstp");
14535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14536 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14537 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14538 * FINCSTP and FDECSTP. */
14539
14540 IEM_MC_BEGIN(0,0);
14541
14542 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14543 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14544
14545 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14546 IEM_MC_FPU_STACK_INC_TOP();
14547 IEM_MC_UPDATE_FSW_CONST(0);
14548
14549 IEM_MC_ADVANCE_RIP();
14550 IEM_MC_END();
14551 return VINF_SUCCESS;
14552}
14553
14554
14555/** Opcode 0xd9 0xf8. */
14556FNIEMOP_DEF(iemOp_fprem)
14557{
14558 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
14559 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14560}
14561
14562
14563/** Opcode 0xd9 0xf9. */
14564FNIEMOP_DEF(iemOp_fyl2xp1)
14565{
14566 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
14567 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14568}
14569
14570
14571/** Opcode 0xd9 0xfa. */
14572FNIEMOP_DEF(iemOp_fsqrt)
14573{
14574 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
14575 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14576}
14577
14578
14579/** Opcode 0xd9 0xfb. */
14580FNIEMOP_DEF(iemOp_fsincos)
14581{
14582 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
14583 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14584}
14585
14586
14587/** Opcode 0xd9 0xfc. */
14588FNIEMOP_DEF(iemOp_frndint)
14589{
14590 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
14591 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14592}
14593
14594
14595/** Opcode 0xd9 0xfd. */
14596FNIEMOP_DEF(iemOp_fscale)
14597{
14598 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
14599 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14600}
14601
14602
14603/** Opcode 0xd9 0xfe. */
14604FNIEMOP_DEF(iemOp_fsin)
14605{
14606 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
14607 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14608}
14609
14610
14611/** Opcode 0xd9 0xff. */
14612FNIEMOP_DEF(iemOp_fcos)
14613{
14614 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
14615 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14616}
14617
14618
14619/** Used by iemOp_EscF1. */
14620IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14621{
14622 /* 0xe0 */ iemOp_fchs,
14623 /* 0xe1 */ iemOp_fabs,
14624 /* 0xe2 */ iemOp_Invalid,
14625 /* 0xe3 */ iemOp_Invalid,
14626 /* 0xe4 */ iemOp_ftst,
14627 /* 0xe5 */ iemOp_fxam,
14628 /* 0xe6 */ iemOp_Invalid,
14629 /* 0xe7 */ iemOp_Invalid,
14630 /* 0xe8 */ iemOp_fld1,
14631 /* 0xe9 */ iemOp_fldl2t,
14632 /* 0xea */ iemOp_fldl2e,
14633 /* 0xeb */ iemOp_fldpi,
14634 /* 0xec */ iemOp_fldlg2,
14635 /* 0xed */ iemOp_fldln2,
14636 /* 0xee */ iemOp_fldz,
14637 /* 0xef */ iemOp_Invalid,
14638 /* 0xf0 */ iemOp_f2xm1,
14639 /* 0xf1 */ iemOp_fyl2x,
14640 /* 0xf2 */ iemOp_fptan,
14641 /* 0xf3 */ iemOp_fpatan,
14642 /* 0xf4 */ iemOp_fxtract,
14643 /* 0xf5 */ iemOp_fprem1,
14644 /* 0xf6 */ iemOp_fdecstp,
14645 /* 0xf7 */ iemOp_fincstp,
14646 /* 0xf8 */ iemOp_fprem,
14647 /* 0xf9 */ iemOp_fyl2xp1,
14648 /* 0xfa */ iemOp_fsqrt,
14649 /* 0xfb */ iemOp_fsincos,
14650 /* 0xfc */ iemOp_frndint,
14651 /* 0xfd */ iemOp_fscale,
14652 /* 0xfe */ iemOp_fsin,
14653 /* 0xff */ iemOp_fcos
14654};
14655
14656
14657/** Opcode 0xd9. */
14658FNIEMOP_DEF(iemOp_EscF1)
14659{
14660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14661 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
14662
14663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14664 {
14665 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14666 {
14667 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14668 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14669 case 2:
14670 if (bRm == 0xd0)
14671 return FNIEMOP_CALL(iemOp_fnop);
14672 return IEMOP_RAISE_INVALID_OPCODE();
14673 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14674 case 4:
14675 case 5:
14676 case 6:
14677 case 7:
14678 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14679 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14681 }
14682 }
14683 else
14684 {
14685 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14686 {
14687 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14688 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14689 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14690 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14691 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14692 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14693 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14694 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14696 }
14697 }
14698}
14699
14700
14701/** Opcode 0xda 11/0. */
14702FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14703{
14704 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
14705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14706
14707 IEM_MC_BEGIN(0, 1);
14708 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14709
14710 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14711 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14712
14713 IEM_MC_PREPARE_FPU_USAGE();
14714 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14715 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14716 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14717 IEM_MC_ENDIF();
14718 IEM_MC_UPDATE_FPU_OPCODE_IP();
14719 IEM_MC_ELSE()
14720 IEM_MC_FPU_STACK_UNDERFLOW(0);
14721 IEM_MC_ENDIF();
14722 IEM_MC_ADVANCE_RIP();
14723
14724 IEM_MC_END();
14725 return VINF_SUCCESS;
14726}
14727
14728
14729/** Opcode 0xda 11/1. */
14730FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14731{
14732 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
14733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14734
14735 IEM_MC_BEGIN(0, 1);
14736 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14737
14738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14740
14741 IEM_MC_PREPARE_FPU_USAGE();
14742 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14744 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14745 IEM_MC_ENDIF();
14746 IEM_MC_UPDATE_FPU_OPCODE_IP();
14747 IEM_MC_ELSE()
14748 IEM_MC_FPU_STACK_UNDERFLOW(0);
14749 IEM_MC_ENDIF();
14750 IEM_MC_ADVANCE_RIP();
14751
14752 IEM_MC_END();
14753 return VINF_SUCCESS;
14754}
14755
14756
14757/** Opcode 0xda 11/2. */
14758FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14759{
14760 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
14761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14762
14763 IEM_MC_BEGIN(0, 1);
14764 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14765
14766 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14767 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14768
14769 IEM_MC_PREPARE_FPU_USAGE();
14770 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14771 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14772 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14773 IEM_MC_ENDIF();
14774 IEM_MC_UPDATE_FPU_OPCODE_IP();
14775 IEM_MC_ELSE()
14776 IEM_MC_FPU_STACK_UNDERFLOW(0);
14777 IEM_MC_ENDIF();
14778 IEM_MC_ADVANCE_RIP();
14779
14780 IEM_MC_END();
14781 return VINF_SUCCESS;
14782}
14783
14784
14785/** Opcode 0xda 11/3. */
14786FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14787{
14788 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
14789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14790
14791 IEM_MC_BEGIN(0, 1);
14792 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14793
14794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14795 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14796
14797 IEM_MC_PREPARE_FPU_USAGE();
14798 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14799 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14800 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14801 IEM_MC_ENDIF();
14802 IEM_MC_UPDATE_FPU_OPCODE_IP();
14803 IEM_MC_ELSE()
14804 IEM_MC_FPU_STACK_UNDERFLOW(0);
14805 IEM_MC_ENDIF();
14806 IEM_MC_ADVANCE_RIP();
14807
14808 IEM_MC_END();
14809 return VINF_SUCCESS;
14810}
14811
14812
14813/**
14814 * Common worker for FPU instructions working on ST0 and STn, only affecting
14815 * flags, and popping twice when done.
14816 *
14817 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14818 */
14819FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14820{
14821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14822
14823 IEM_MC_BEGIN(3, 1);
14824 IEM_MC_LOCAL(uint16_t, u16Fsw);
14825 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14826 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14827 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14828
14829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14831
14832 IEM_MC_PREPARE_FPU_USAGE();
14833 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14834 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14835 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14836 IEM_MC_ELSE()
14837 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14838 IEM_MC_ENDIF();
14839 IEM_MC_ADVANCE_RIP();
14840
14841 IEM_MC_END();
14842 return VINF_SUCCESS;
14843}
14844
14845
14846/** Opcode 0xda 0xe9. */
14847FNIEMOP_DEF(iemOp_fucompp)
14848{
14849 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
14850 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14851}
14852
14853
14854/**
14855 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14856 * the result in ST0.
14857 *
14858 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14859 */
14860FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14861{
14862 IEM_MC_BEGIN(3, 3);
14863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14864 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14865 IEM_MC_LOCAL(int32_t, i32Val2);
14866 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14867 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14868 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14869
14870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14872
14873 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14874 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14875 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14876
14877 IEM_MC_PREPARE_FPU_USAGE();
14878 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14879 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14880 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14881 IEM_MC_ELSE()
14882 IEM_MC_FPU_STACK_UNDERFLOW(0);
14883 IEM_MC_ENDIF();
14884 IEM_MC_ADVANCE_RIP();
14885
14886 IEM_MC_END();
14887 return VINF_SUCCESS;
14888}
14889
14890
14891/** Opcode 0xda !11/0. */
14892FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14893{
14894 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
14895 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14896}
14897
14898
14899/** Opcode 0xda !11/1. */
14900FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14901{
14902 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
14903 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14904}
14905
14906
14907/** Opcode 0xda !11/2. */
14908FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14909{
14910 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
14911
14912 IEM_MC_BEGIN(3, 3);
14913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14914 IEM_MC_LOCAL(uint16_t, u16Fsw);
14915 IEM_MC_LOCAL(int32_t, i32Val2);
14916 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14917 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14918 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14919
14920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14922
14923 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14924 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14925 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14926
14927 IEM_MC_PREPARE_FPU_USAGE();
14928 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14929 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14930 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14931 IEM_MC_ELSE()
14932 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14933 IEM_MC_ENDIF();
14934 IEM_MC_ADVANCE_RIP();
14935
14936 IEM_MC_END();
14937 return VINF_SUCCESS;
14938}
14939
14940
14941/** Opcode 0xda !11/3. */
14942FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14943{
14944 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
14945
14946 IEM_MC_BEGIN(3, 3);
14947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14948 IEM_MC_LOCAL(uint16_t, u16Fsw);
14949 IEM_MC_LOCAL(int32_t, i32Val2);
14950 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14951 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14952 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14953
14954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14956
14957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14958 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14959 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14960
14961 IEM_MC_PREPARE_FPU_USAGE();
14962 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14963 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14964 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14965 IEM_MC_ELSE()
14966 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14967 IEM_MC_ENDIF();
14968 IEM_MC_ADVANCE_RIP();
14969
14970 IEM_MC_END();
14971 return VINF_SUCCESS;
14972}
14973
14974
14975/** Opcode 0xda !11/4. */
14976FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14977{
14978 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
14979 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14980}
14981
14982
14983/** Opcode 0xda !11/5. */
14984FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14985{
14986 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
14987 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14988}
14989
14990
14991/** Opcode 0xda !11/6. */
14992FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14993{
14994 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
14995 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14996}
14997
14998
14999/** Opcode 0xda !11/7. */
15000FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15001{
15002 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15003 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15004}
15005
15006
15007/** Opcode 0xda. */
15008FNIEMOP_DEF(iemOp_EscF2)
15009{
15010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15011 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15012 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15013 {
15014 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15015 {
15016 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15017 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15018 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15019 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15020 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15021 case 5:
15022 if (bRm == 0xe9)
15023 return FNIEMOP_CALL(iemOp_fucompp);
15024 return IEMOP_RAISE_INVALID_OPCODE();
15025 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15026 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15028 }
15029 }
15030 else
15031 {
15032 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15033 {
15034 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15035 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15036 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15037 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15038 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15039 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15040 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15041 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15043 }
15044 }
15045}
15046
15047
15048/** Opcode 0xdb !11/0. */
15049FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15050{
15051 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15052
15053 IEM_MC_BEGIN(2, 3);
15054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15055 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15056 IEM_MC_LOCAL(int32_t, i32Val);
15057 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15058 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15059
15060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15062
15063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15064 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15065 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15066
15067 IEM_MC_PREPARE_FPU_USAGE();
15068 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15069 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15070 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15071 IEM_MC_ELSE()
15072 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15073 IEM_MC_ENDIF();
15074 IEM_MC_ADVANCE_RIP();
15075
15076 IEM_MC_END();
15077 return VINF_SUCCESS;
15078}
15079
15080
15081/** Opcode 0xdb !11/1. */
15082FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15083{
15084 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15085 IEM_MC_BEGIN(3, 2);
15086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15087 IEM_MC_LOCAL(uint16_t, u16Fsw);
15088 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15089 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15091
15092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15095 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15096
15097 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15098 IEM_MC_PREPARE_FPU_USAGE();
15099 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15100 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15101 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15102 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15103 IEM_MC_ELSE()
15104 IEM_MC_IF_FCW_IM()
15105 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15106 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15107 IEM_MC_ENDIF();
15108 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15109 IEM_MC_ENDIF();
15110 IEM_MC_ADVANCE_RIP();
15111
15112 IEM_MC_END();
15113 return VINF_SUCCESS;
15114}
15115
15116
15117/** Opcode 0xdb !11/2. */
15118FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15119{
15120 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15121 IEM_MC_BEGIN(3, 2);
15122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15123 IEM_MC_LOCAL(uint16_t, u16Fsw);
15124 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15125 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15126 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15127
15128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15131 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15132
15133 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15134 IEM_MC_PREPARE_FPU_USAGE();
15135 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15136 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15137 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15138 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15139 IEM_MC_ELSE()
15140 IEM_MC_IF_FCW_IM()
15141 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15142 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15143 IEM_MC_ENDIF();
15144 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15145 IEM_MC_ENDIF();
15146 IEM_MC_ADVANCE_RIP();
15147
15148 IEM_MC_END();
15149 return VINF_SUCCESS;
15150}
15151
15152
15153/** Opcode 0xdb !11/3. */
15154FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15155{
15156 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15157 IEM_MC_BEGIN(3, 2);
15158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15159 IEM_MC_LOCAL(uint16_t, u16Fsw);
15160 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15161 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15162 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15163
15164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15166 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15167 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15168
15169 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15170 IEM_MC_PREPARE_FPU_USAGE();
15171 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15172 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15173 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15174 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15175 IEM_MC_ELSE()
15176 IEM_MC_IF_FCW_IM()
15177 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15178 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15179 IEM_MC_ENDIF();
15180 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15181 IEM_MC_ENDIF();
15182 IEM_MC_ADVANCE_RIP();
15183
15184 IEM_MC_END();
15185 return VINF_SUCCESS;
15186}
15187
15188
15189/** Opcode 0xdb !11/5. */
15190FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15191{
15192 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15193
15194 IEM_MC_BEGIN(2, 3);
15195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15196 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15197 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15198 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15199 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15200
15201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15203
15204 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15205 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15206 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15207
15208 IEM_MC_PREPARE_FPU_USAGE();
15209 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15210 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15211 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15212 IEM_MC_ELSE()
15213 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15214 IEM_MC_ENDIF();
15215 IEM_MC_ADVANCE_RIP();
15216
15217 IEM_MC_END();
15218 return VINF_SUCCESS;
15219}
15220
15221
15222/** Opcode 0xdb !11/7. */
15223FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15224{
15225 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15226 IEM_MC_BEGIN(3, 2);
15227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15228 IEM_MC_LOCAL(uint16_t, u16Fsw);
15229 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15230 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15231 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15232
15233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15236 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15237
15238 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15239 IEM_MC_PREPARE_FPU_USAGE();
15240 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15241 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15242 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15243 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15244 IEM_MC_ELSE()
15245 IEM_MC_IF_FCW_IM()
15246 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15247 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15248 IEM_MC_ENDIF();
15249 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15250 IEM_MC_ENDIF();
15251 IEM_MC_ADVANCE_RIP();
15252
15253 IEM_MC_END();
15254 return VINF_SUCCESS;
15255}
15256
15257
15258/** Opcode 0xdb 11/0. */
15259FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15260{
15261 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15263
15264 IEM_MC_BEGIN(0, 1);
15265 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15266
15267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15269
15270 IEM_MC_PREPARE_FPU_USAGE();
15271 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15272 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15273 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15274 IEM_MC_ENDIF();
15275 IEM_MC_UPDATE_FPU_OPCODE_IP();
15276 IEM_MC_ELSE()
15277 IEM_MC_FPU_STACK_UNDERFLOW(0);
15278 IEM_MC_ENDIF();
15279 IEM_MC_ADVANCE_RIP();
15280
15281 IEM_MC_END();
15282 return VINF_SUCCESS;
15283}
15284
15285
15286/** Opcode 0xdb 11/1. */
15287FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15288{
15289 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15291
15292 IEM_MC_BEGIN(0, 1);
15293 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15294
15295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15297
15298 IEM_MC_PREPARE_FPU_USAGE();
15299 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15300 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15301 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15302 IEM_MC_ENDIF();
15303 IEM_MC_UPDATE_FPU_OPCODE_IP();
15304 IEM_MC_ELSE()
15305 IEM_MC_FPU_STACK_UNDERFLOW(0);
15306 IEM_MC_ENDIF();
15307 IEM_MC_ADVANCE_RIP();
15308
15309 IEM_MC_END();
15310 return VINF_SUCCESS;
15311}
15312
15313
15314/** Opcode 0xdb 11/2. */
15315FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15316{
15317 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15319
15320 IEM_MC_BEGIN(0, 1);
15321 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15322
15323 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15324 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15325
15326 IEM_MC_PREPARE_FPU_USAGE();
15327 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15328 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15329 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15330 IEM_MC_ENDIF();
15331 IEM_MC_UPDATE_FPU_OPCODE_IP();
15332 IEM_MC_ELSE()
15333 IEM_MC_FPU_STACK_UNDERFLOW(0);
15334 IEM_MC_ENDIF();
15335 IEM_MC_ADVANCE_RIP();
15336
15337 IEM_MC_END();
15338 return VINF_SUCCESS;
15339}
15340
15341
15342/** Opcode 0xdb 11/3. */
15343FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15344{
15345 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15347
15348 IEM_MC_BEGIN(0, 1);
15349 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15350
15351 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15352 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15353
15354 IEM_MC_PREPARE_FPU_USAGE();
15355 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15356 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15357 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15358 IEM_MC_ENDIF();
15359 IEM_MC_UPDATE_FPU_OPCODE_IP();
15360 IEM_MC_ELSE()
15361 IEM_MC_FPU_STACK_UNDERFLOW(0);
15362 IEM_MC_ENDIF();
15363 IEM_MC_ADVANCE_RIP();
15364
15365 IEM_MC_END();
15366 return VINF_SUCCESS;
15367}
15368
15369
15370/** Opcode 0xdb 0xe0. */
15371FNIEMOP_DEF(iemOp_fneni)
15372{
15373 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
15374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15375 IEM_MC_BEGIN(0,0);
15376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15377 IEM_MC_ADVANCE_RIP();
15378 IEM_MC_END();
15379 return VINF_SUCCESS;
15380}
15381
15382
15383/** Opcode 0xdb 0xe1. */
15384FNIEMOP_DEF(iemOp_fndisi)
15385{
15386 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
15387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15388 IEM_MC_BEGIN(0,0);
15389 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15390 IEM_MC_ADVANCE_RIP();
15391 IEM_MC_END();
15392 return VINF_SUCCESS;
15393}
15394
15395
15396/** Opcode 0xdb 0xe2. */
15397FNIEMOP_DEF(iemOp_fnclex)
15398{
15399 IEMOP_MNEMONIC(fnclex, "fnclex");
15400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15401
15402 IEM_MC_BEGIN(0,0);
15403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15404 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15405 IEM_MC_CLEAR_FSW_EX();
15406 IEM_MC_ADVANCE_RIP();
15407 IEM_MC_END();
15408 return VINF_SUCCESS;
15409}
15410
15411
15412/** Opcode 0xdb 0xe3. */
15413FNIEMOP_DEF(iemOp_fninit)
15414{
15415 IEMOP_MNEMONIC(fninit, "fninit");
15416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15417 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15418}
15419
15420
15421/** Opcode 0xdb 0xe4. */
15422FNIEMOP_DEF(iemOp_fnsetpm)
15423{
15424 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15426 IEM_MC_BEGIN(0,0);
15427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15428 IEM_MC_ADVANCE_RIP();
15429 IEM_MC_END();
15430 return VINF_SUCCESS;
15431}
15432
15433
15434/** Opcode 0xdb 0xe5. */
15435FNIEMOP_DEF(iemOp_frstpm)
15436{
15437 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15438#if 0 /* #UDs on newer CPUs */
15439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15440 IEM_MC_BEGIN(0,0);
15441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15442 IEM_MC_ADVANCE_RIP();
15443 IEM_MC_END();
15444 return VINF_SUCCESS;
15445#else
15446 return IEMOP_RAISE_INVALID_OPCODE();
15447#endif
15448}
15449
15450
15451/** Opcode 0xdb 11/5. */
15452FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15453{
15454 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
15455 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15456}
15457
15458
15459/** Opcode 0xdb 11/6. */
15460FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15461{
15462 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
15463 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15464}
15465
15466
15467/** Opcode 0xdb. */
15468FNIEMOP_DEF(iemOp_EscF3)
15469{
15470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15471 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
15472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15473 {
15474 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15475 {
15476 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15477 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15478 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15479 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15480 case 4:
15481 switch (bRm)
15482 {
15483 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15484 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15485 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15486 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15487 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15488 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15489 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15490 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15492 }
15493 break;
15494 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15495 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15496 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15498 }
15499 }
15500 else
15501 {
15502 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15503 {
15504 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15505 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15506 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15507 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15508 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15509 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15510 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15511 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15513 }
15514 }
15515}
15516
15517
15518/**
15519 * Common worker for FPU instructions working on STn and ST0, and storing the
15520 * result in STn unless IE, DE or ZE was raised.
15521 *
15522 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15523 */
15524FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15525{
15526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15527
15528 IEM_MC_BEGIN(3, 1);
15529 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15530 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15531 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15533
15534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15536
15537 IEM_MC_PREPARE_FPU_USAGE();
15538 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15539 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15540 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15541 IEM_MC_ELSE()
15542 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15543 IEM_MC_ENDIF();
15544 IEM_MC_ADVANCE_RIP();
15545
15546 IEM_MC_END();
15547 return VINF_SUCCESS;
15548}
15549
15550
15551/** Opcode 0xdc 11/0. */
15552FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15553{
15554 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
15555 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15556}
15557
15558
15559/** Opcode 0xdc 11/1. */
15560FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15561{
15562 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
15563 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15564}
15565
15566
15567/** Opcode 0xdc 11/4. */
15568FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15569{
15570 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
15571 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15572}
15573
15574
15575/** Opcode 0xdc 11/5. */
15576FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15577{
15578 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
15579 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15580}
15581
15582
15583/** Opcode 0xdc 11/6. */
15584FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15585{
15586 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
15587 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15588}
15589
15590
15591/** Opcode 0xdc 11/7. */
15592FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15593{
15594 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
15595 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15596}
15597
15598
15599/**
15600 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15601 * memory operand, and storing the result in ST0.
15602 *
15603 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15604 */
15605FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15606{
15607 IEM_MC_BEGIN(3, 3);
15608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15609 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15610 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15611 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15612 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15613 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15614
15615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15617 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15618 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15619
15620 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15621 IEM_MC_PREPARE_FPU_USAGE();
15622 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15623 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15624 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15625 IEM_MC_ELSE()
15626 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15627 IEM_MC_ENDIF();
15628 IEM_MC_ADVANCE_RIP();
15629
15630 IEM_MC_END();
15631 return VINF_SUCCESS;
15632}
15633
15634
15635/** Opcode 0xdc !11/0. */
15636FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15637{
15638 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
15639 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15640}
15641
15642
15643/** Opcode 0xdc !11/1. */
15644FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15645{
15646 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
15647 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15648}
15649
15650
15651/** Opcode 0xdc !11/2. */
15652FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15653{
15654 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
15655
15656 IEM_MC_BEGIN(3, 3);
15657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15658 IEM_MC_LOCAL(uint16_t, u16Fsw);
15659 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15660 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15661 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15662 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15663
15664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15666
15667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15669 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15670
15671 IEM_MC_PREPARE_FPU_USAGE();
15672 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15673 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15674 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15675 IEM_MC_ELSE()
15676 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15677 IEM_MC_ENDIF();
15678 IEM_MC_ADVANCE_RIP();
15679
15680 IEM_MC_END();
15681 return VINF_SUCCESS;
15682}
15683
15684
15685/** Opcode 0xdc !11/3. */
15686FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15687{
15688 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
15689
15690 IEM_MC_BEGIN(3, 3);
15691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15692 IEM_MC_LOCAL(uint16_t, u16Fsw);
15693 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15694 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15695 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15696 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15697
15698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15700
15701 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15702 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15703 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15704
15705 IEM_MC_PREPARE_FPU_USAGE();
15706 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15707 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15708 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15709 IEM_MC_ELSE()
15710 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15711 IEM_MC_ENDIF();
15712 IEM_MC_ADVANCE_RIP();
15713
15714 IEM_MC_END();
15715 return VINF_SUCCESS;
15716}
15717
15718
15719/** Opcode 0xdc !11/4. */
15720FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15721{
15722 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
15723 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15724}
15725
15726
15727/** Opcode 0xdc !11/5. */
15728FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15729{
15730 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
15731 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15732}
15733
15734
15735/** Opcode 0xdc !11/6. */
15736FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15737{
15738 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
15739 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15740}
15741
15742
15743/** Opcode 0xdc !11/7. */
15744FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15745{
15746 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
15747 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15748}
15749
15750
15751/** Opcode 0xdc. */
15752FNIEMOP_DEF(iemOp_EscF4)
15753{
15754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15755 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
15756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15757 {
15758 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15759 {
15760 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15761 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15762 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15763 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15764 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15765 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15766 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15767 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15769 }
15770 }
15771 else
15772 {
15773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15774 {
15775 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15776 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15777 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15778 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15779 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15780 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15781 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15782 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15784 }
15785 }
15786}
15787
15788
15789/** Opcode 0xdd !11/0.
15790 * @sa iemOp_fld_m32r */
15791FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15792{
15793 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
15794
15795 IEM_MC_BEGIN(2, 3);
15796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15797 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15798 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15799 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15800 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15801
15802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15804 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15805 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15806
15807 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15808 IEM_MC_PREPARE_FPU_USAGE();
15809 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15810 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15811 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15812 IEM_MC_ELSE()
15813 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15814 IEM_MC_ENDIF();
15815 IEM_MC_ADVANCE_RIP();
15816
15817 IEM_MC_END();
15818 return VINF_SUCCESS;
15819}
15820
15821
15822/** Opcode 0xdd !11/0. */
15823FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15824{
15825 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
15826 IEM_MC_BEGIN(3, 2);
15827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15828 IEM_MC_LOCAL(uint16_t, u16Fsw);
15829 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15830 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15831 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15832
15833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15836 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15837
15838 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15839 IEM_MC_PREPARE_FPU_USAGE();
15840 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15841 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15842 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15843 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15844 IEM_MC_ELSE()
15845 IEM_MC_IF_FCW_IM()
15846 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15847 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15848 IEM_MC_ENDIF();
15849 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15850 IEM_MC_ENDIF();
15851 IEM_MC_ADVANCE_RIP();
15852
15853 IEM_MC_END();
15854 return VINF_SUCCESS;
15855}
15856
15857
15858/** Opcode 0xdd !11/0. */
15859FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15860{
15861 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
15862 IEM_MC_BEGIN(3, 2);
15863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15864 IEM_MC_LOCAL(uint16_t, u16Fsw);
15865 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15866 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15867 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15868
15869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15871 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15872 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15873
15874 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15875 IEM_MC_PREPARE_FPU_USAGE();
15876 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15877 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15878 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15879 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15880 IEM_MC_ELSE()
15881 IEM_MC_IF_FCW_IM()
15882 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15883 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15884 IEM_MC_ENDIF();
15885 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15886 IEM_MC_ENDIF();
15887 IEM_MC_ADVANCE_RIP();
15888
15889 IEM_MC_END();
15890 return VINF_SUCCESS;
15891}
15892
15893
15894
15895
15896/** Opcode 0xdd !11/0. */
15897FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15898{
15899 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
15900 IEM_MC_BEGIN(3, 2);
15901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15902 IEM_MC_LOCAL(uint16_t, u16Fsw);
15903 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15904 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15905 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15906
15907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15911
15912 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15913 IEM_MC_PREPARE_FPU_USAGE();
15914 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15915 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15916 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15917 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15918 IEM_MC_ELSE()
15919 IEM_MC_IF_FCW_IM()
15920 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15921 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15922 IEM_MC_ENDIF();
15923 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15924 IEM_MC_ENDIF();
15925 IEM_MC_ADVANCE_RIP();
15926
15927 IEM_MC_END();
15928 return VINF_SUCCESS;
15929}
15930
15931
15932/** Opcode 0xdd !11/0. */
15933FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15934{
15935 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
15936 IEM_MC_BEGIN(3, 0);
15937 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15938 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15939 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15942 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15943 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15944 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15945 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15946 IEM_MC_END();
15947 return VINF_SUCCESS;
15948}
15949
15950
15951/** Opcode 0xdd !11/0. */
15952FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15953{
15954 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
15955 IEM_MC_BEGIN(3, 0);
15956 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15957 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15958 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15962 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15963 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15964 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15965 IEM_MC_END();
15966 return VINF_SUCCESS;
15967
15968}
15969
15970/** Opcode 0xdd !11/0. */
15971FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15972{
15973 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
15974
15975 IEM_MC_BEGIN(0, 2);
15976 IEM_MC_LOCAL(uint16_t, u16Tmp);
15977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15978
15979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15981 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15982
15983 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15984 IEM_MC_FETCH_FSW(u16Tmp);
15985 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
15986 IEM_MC_ADVANCE_RIP();
15987
15988/** @todo Debug / drop a hint to the verifier that things may differ
15989 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15990 * NT4SP1. (X86_FSW_PE) */
15991 IEM_MC_END();
15992 return VINF_SUCCESS;
15993}
15994
15995
15996/** Opcode 0xdd 11/0. */
15997FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15998{
15999 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16001 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16002 unmodified. */
16003
16004 IEM_MC_BEGIN(0, 0);
16005
16006 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16007 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16008
16009 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16010 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16011 IEM_MC_UPDATE_FPU_OPCODE_IP();
16012
16013 IEM_MC_ADVANCE_RIP();
16014 IEM_MC_END();
16015 return VINF_SUCCESS;
16016}
16017
16018
16019/** Opcode 0xdd 11/1. */
16020FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16021{
16022 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16024
16025 IEM_MC_BEGIN(0, 2);
16026 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16027 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16028 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16029 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16030
16031 IEM_MC_PREPARE_FPU_USAGE();
16032 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16033 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16034 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16035 IEM_MC_ELSE()
16036 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16037 IEM_MC_ENDIF();
16038
16039 IEM_MC_ADVANCE_RIP();
16040 IEM_MC_END();
16041 return VINF_SUCCESS;
16042}
16043
16044
16045/** Opcode 0xdd 11/3. */
16046FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16047{
16048 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16049 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16050}
16051
16052
16053/** Opcode 0xdd 11/4. */
16054FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16055{
16056 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16057 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16058}
16059
16060
16061/** Opcode 0xdd. */
16062FNIEMOP_DEF(iemOp_EscF5)
16063{
16064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16065 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16067 {
16068 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16069 {
16070 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16071 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16072 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16073 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16074 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16075 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16076 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16077 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16079 }
16080 }
16081 else
16082 {
16083 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16084 {
16085 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16086 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16087 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16088 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16089 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16090 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16091 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16092 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16094 }
16095 }
16096}
16097
16098
16099/** Opcode 0xde 11/0. */
16100FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16101{
16102 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16103 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16104}
16105
16106
16107/** Opcode 0xde 11/0. */
16108FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16109{
16110 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16111 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16112}
16113
16114
16115/** Opcode 0xde 0xd9. */
16116FNIEMOP_DEF(iemOp_fcompp)
16117{
16118 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16119 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16120}
16121
16122
16123/** Opcode 0xde 11/4. */
16124FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16125{
16126 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16127 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16128}
16129
16130
16131/** Opcode 0xde 11/5. */
16132FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16133{
16134 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16135 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16136}
16137
16138
16139/** Opcode 0xde 11/6. */
16140FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16141{
16142 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16143 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16144}
16145
16146
16147/** Opcode 0xde 11/7. */
16148FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16149{
16150 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16151 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16152}
16153
16154
16155/**
16156 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16157 * the result in ST0.
16158 *
16159 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16160 */
16161FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16162{
16163 IEM_MC_BEGIN(3, 3);
16164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16165 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16166 IEM_MC_LOCAL(int16_t, i16Val2);
16167 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16168 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16169 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16170
16171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16173
16174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16176 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16177
16178 IEM_MC_PREPARE_FPU_USAGE();
16179 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16180 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16181 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16182 IEM_MC_ELSE()
16183 IEM_MC_FPU_STACK_UNDERFLOW(0);
16184 IEM_MC_ENDIF();
16185 IEM_MC_ADVANCE_RIP();
16186
16187 IEM_MC_END();
16188 return VINF_SUCCESS;
16189}
16190
16191
16192/** Opcode 0xde !11/0. */
16193FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16194{
16195 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16196 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16197}
16198
16199
16200/** Opcode 0xde !11/1. */
16201FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16202{
16203 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16204 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16205}
16206
16207
16208/** Opcode 0xde !11/2. */
16209FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16210{
16211 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16212
16213 IEM_MC_BEGIN(3, 3);
16214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16215 IEM_MC_LOCAL(uint16_t, u16Fsw);
16216 IEM_MC_LOCAL(int16_t, i16Val2);
16217 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16218 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16219 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16220
16221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16223
16224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16226 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16227
16228 IEM_MC_PREPARE_FPU_USAGE();
16229 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16230 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16231 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16232 IEM_MC_ELSE()
16233 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16234 IEM_MC_ENDIF();
16235 IEM_MC_ADVANCE_RIP();
16236
16237 IEM_MC_END();
16238 return VINF_SUCCESS;
16239}
16240
16241
16242/** Opcode 0xde !11/3. */
16243FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16244{
16245 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16246
16247 IEM_MC_BEGIN(3, 3);
16248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16249 IEM_MC_LOCAL(uint16_t, u16Fsw);
16250 IEM_MC_LOCAL(int16_t, i16Val2);
16251 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16252 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16253 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16254
16255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16257
16258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16259 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16260 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16261
16262 IEM_MC_PREPARE_FPU_USAGE();
16263 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16264 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16265 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16266 IEM_MC_ELSE()
16267 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16268 IEM_MC_ENDIF();
16269 IEM_MC_ADVANCE_RIP();
16270
16271 IEM_MC_END();
16272 return VINF_SUCCESS;
16273}
16274
16275
16276/** Opcode 0xde !11/4. */
16277FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16278{
16279 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16281}
16282
16283
16284/** Opcode 0xde !11/5. */
16285FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16286{
16287 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16289}
16290
16291
16292/** Opcode 0xde !11/6. */
16293FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16294{
16295 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16297}
16298
16299
16300/** Opcode 0xde !11/7. */
16301FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16302{
16303 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16304 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16305}
16306
16307
16308/** Opcode 0xde. */
16309FNIEMOP_DEF(iemOp_EscF6)
16310{
16311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16312 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16314 {
16315 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16316 {
16317 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16318 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16319 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16320 case 3: if (bRm == 0xd9)
16321 return FNIEMOP_CALL(iemOp_fcompp);
16322 return IEMOP_RAISE_INVALID_OPCODE();
16323 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16324 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16325 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16326 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16328 }
16329 }
16330 else
16331 {
16332 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16333 {
16334 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16335 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16336 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16337 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16338 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16339 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16340 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16341 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16343 }
16344 }
16345}
16346
16347
16348/** Opcode 0xdf 11/0.
16349 * Undocument instruction, assumed to work like ffree + fincstp. */
16350FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16351{
16352 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
16353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16354
16355 IEM_MC_BEGIN(0, 0);
16356
16357 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16359
16360 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16361 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16362 IEM_MC_FPU_STACK_INC_TOP();
16363 IEM_MC_UPDATE_FPU_OPCODE_IP();
16364
16365 IEM_MC_ADVANCE_RIP();
16366 IEM_MC_END();
16367 return VINF_SUCCESS;
16368}
16369
16370
16371/** Opcode 0xdf 0xe0. */
16372FNIEMOP_DEF(iemOp_fnstsw_ax)
16373{
16374 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
16375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16376
16377 IEM_MC_BEGIN(0, 1);
16378 IEM_MC_LOCAL(uint16_t, u16Tmp);
16379 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16380 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16381 IEM_MC_FETCH_FSW(u16Tmp);
16382 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16383 IEM_MC_ADVANCE_RIP();
16384 IEM_MC_END();
16385 return VINF_SUCCESS;
16386}
16387
16388
16389/** Opcode 0xdf 11/5. */
16390FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16391{
16392 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
16393 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16394}
16395
16396
16397/** Opcode 0xdf 11/6. */
16398FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16399{
16400 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
16401 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16402}
16403
16404
16405/** Opcode 0xdf !11/0. */
16406FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16407{
16408 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
16409
16410 IEM_MC_BEGIN(2, 3);
16411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16412 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16413 IEM_MC_LOCAL(int16_t, i16Val);
16414 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16415 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16416
16417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16419
16420 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16421 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16422 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16423
16424 IEM_MC_PREPARE_FPU_USAGE();
16425 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16426 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16427 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16428 IEM_MC_ELSE()
16429 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16430 IEM_MC_ENDIF();
16431 IEM_MC_ADVANCE_RIP();
16432
16433 IEM_MC_END();
16434 return VINF_SUCCESS;
16435}
16436
16437
16438/** Opcode 0xdf !11/1. */
16439FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16440{
16441 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
16442 IEM_MC_BEGIN(3, 2);
16443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16444 IEM_MC_LOCAL(uint16_t, u16Fsw);
16445 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16446 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16447 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16448
16449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16451 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16452 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16453
16454 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16455 IEM_MC_PREPARE_FPU_USAGE();
16456 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16457 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16458 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16459 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16460 IEM_MC_ELSE()
16461 IEM_MC_IF_FCW_IM()
16462 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16463 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16464 IEM_MC_ENDIF();
16465 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16466 IEM_MC_ENDIF();
16467 IEM_MC_ADVANCE_RIP();
16468
16469 IEM_MC_END();
16470 return VINF_SUCCESS;
16471}
16472
16473
16474/** Opcode 0xdf !11/2. */
16475FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16476{
16477 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
16478 IEM_MC_BEGIN(3, 2);
16479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16480 IEM_MC_LOCAL(uint16_t, u16Fsw);
16481 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16482 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16483 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16484
16485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16487 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16488 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16489
16490 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16491 IEM_MC_PREPARE_FPU_USAGE();
16492 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16493 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16494 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16495 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16496 IEM_MC_ELSE()
16497 IEM_MC_IF_FCW_IM()
16498 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16499 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16500 IEM_MC_ENDIF();
16501 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16502 IEM_MC_ENDIF();
16503 IEM_MC_ADVANCE_RIP();
16504
16505 IEM_MC_END();
16506 return VINF_SUCCESS;
16507}
16508
16509
16510/** Opcode 0xdf !11/3. */
16511FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16512{
16513 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
16514 IEM_MC_BEGIN(3, 2);
16515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16516 IEM_MC_LOCAL(uint16_t, u16Fsw);
16517 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16518 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16519 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16520
16521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16524 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16525
16526 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16527 IEM_MC_PREPARE_FPU_USAGE();
16528 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16529 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16530 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16531 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16532 IEM_MC_ELSE()
16533 IEM_MC_IF_FCW_IM()
16534 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16535 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16536 IEM_MC_ENDIF();
16537 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16538 IEM_MC_ENDIF();
16539 IEM_MC_ADVANCE_RIP();
16540
16541 IEM_MC_END();
16542 return VINF_SUCCESS;
16543}
16544
16545
16546/** Opcode 0xdf !11/4. */
16547FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16548
16549
16550/** Opcode 0xdf !11/5. */
16551FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16552{
16553 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
16554
16555 IEM_MC_BEGIN(2, 3);
16556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16557 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16558 IEM_MC_LOCAL(int64_t, i64Val);
16559 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16560 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16561
16562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16564
16565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16566 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16567 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16568
16569 IEM_MC_PREPARE_FPU_USAGE();
16570 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16571 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16572 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16573 IEM_MC_ELSE()
16574 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16575 IEM_MC_ENDIF();
16576 IEM_MC_ADVANCE_RIP();
16577
16578 IEM_MC_END();
16579 return VINF_SUCCESS;
16580}
16581
16582
16583/** Opcode 0xdf !11/6. */
16584FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16585
16586
16587/** Opcode 0xdf !11/7. */
16588FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16589{
16590 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
16591 IEM_MC_BEGIN(3, 2);
16592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16593 IEM_MC_LOCAL(uint16_t, u16Fsw);
16594 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16595 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16596 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16597
16598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16600 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16601 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16602
16603 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16604 IEM_MC_PREPARE_FPU_USAGE();
16605 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16606 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16607 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16608 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16609 IEM_MC_ELSE()
16610 IEM_MC_IF_FCW_IM()
16611 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16612 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16613 IEM_MC_ENDIF();
16614 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16615 IEM_MC_ENDIF();
16616 IEM_MC_ADVANCE_RIP();
16617
16618 IEM_MC_END();
16619 return VINF_SUCCESS;
16620}
16621
16622
16623/** Opcode 0xdf. */
16624FNIEMOP_DEF(iemOp_EscF7)
16625{
16626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16628 {
16629 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16630 {
16631 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16632 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16633 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16634 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16635 case 4: if (bRm == 0xe0)
16636 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16637 return IEMOP_RAISE_INVALID_OPCODE();
16638 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16639 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16640 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16642 }
16643 }
16644 else
16645 {
16646 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16647 {
16648 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16649 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16650 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16651 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16652 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16653 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16654 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16655 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16657 }
16658 }
16659}
16660
16661
16662/** Opcode 0xe0. */
16663FNIEMOP_DEF(iemOp_loopne_Jb)
16664{
16665 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
16666 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16668 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16669
16670 switch (pVCpu->iem.s.enmEffAddrMode)
16671 {
16672 case IEMMODE_16BIT:
16673 IEM_MC_BEGIN(0,0);
16674 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16675 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16676 IEM_MC_REL_JMP_S8(i8Imm);
16677 } IEM_MC_ELSE() {
16678 IEM_MC_ADVANCE_RIP();
16679 } IEM_MC_ENDIF();
16680 IEM_MC_END();
16681 return VINF_SUCCESS;
16682
16683 case IEMMODE_32BIT:
16684 IEM_MC_BEGIN(0,0);
16685 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16686 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16687 IEM_MC_REL_JMP_S8(i8Imm);
16688 } IEM_MC_ELSE() {
16689 IEM_MC_ADVANCE_RIP();
16690 } IEM_MC_ENDIF();
16691 IEM_MC_END();
16692 return VINF_SUCCESS;
16693
16694 case IEMMODE_64BIT:
16695 IEM_MC_BEGIN(0,0);
16696 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16697 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16698 IEM_MC_REL_JMP_S8(i8Imm);
16699 } IEM_MC_ELSE() {
16700 IEM_MC_ADVANCE_RIP();
16701 } IEM_MC_ENDIF();
16702 IEM_MC_END();
16703 return VINF_SUCCESS;
16704
16705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16706 }
16707}
16708
16709
16710/** Opcode 0xe1. */
16711FNIEMOP_DEF(iemOp_loope_Jb)
16712{
16713 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
16714 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16717
16718 switch (pVCpu->iem.s.enmEffAddrMode)
16719 {
16720 case IEMMODE_16BIT:
16721 IEM_MC_BEGIN(0,0);
16722 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16723 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16724 IEM_MC_REL_JMP_S8(i8Imm);
16725 } IEM_MC_ELSE() {
16726 IEM_MC_ADVANCE_RIP();
16727 } IEM_MC_ENDIF();
16728 IEM_MC_END();
16729 return VINF_SUCCESS;
16730
16731 case IEMMODE_32BIT:
16732 IEM_MC_BEGIN(0,0);
16733 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16734 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16735 IEM_MC_REL_JMP_S8(i8Imm);
16736 } IEM_MC_ELSE() {
16737 IEM_MC_ADVANCE_RIP();
16738 } IEM_MC_ENDIF();
16739 IEM_MC_END();
16740 return VINF_SUCCESS;
16741
16742 case IEMMODE_64BIT:
16743 IEM_MC_BEGIN(0,0);
16744 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16745 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16746 IEM_MC_REL_JMP_S8(i8Imm);
16747 } IEM_MC_ELSE() {
16748 IEM_MC_ADVANCE_RIP();
16749 } IEM_MC_ENDIF();
16750 IEM_MC_END();
16751 return VINF_SUCCESS;
16752
16753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16754 }
16755}
16756
16757
16758/** Opcode 0xe2. */
16759FNIEMOP_DEF(iemOp_loop_Jb)
16760{
16761 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
16762 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16764 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16765
16766 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16767 * using the 32-bit operand size override. How can that be restarted? See
16768 * weird pseudo code in intel manual. */
16769 switch (pVCpu->iem.s.enmEffAddrMode)
16770 {
16771 case IEMMODE_16BIT:
16772 IEM_MC_BEGIN(0,0);
16773 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16774 {
16775 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16776 IEM_MC_IF_CX_IS_NZ() {
16777 IEM_MC_REL_JMP_S8(i8Imm);
16778 } IEM_MC_ELSE() {
16779 IEM_MC_ADVANCE_RIP();
16780 } IEM_MC_ENDIF();
16781 }
16782 else
16783 {
16784 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16785 IEM_MC_ADVANCE_RIP();
16786 }
16787 IEM_MC_END();
16788 return VINF_SUCCESS;
16789
16790 case IEMMODE_32BIT:
16791 IEM_MC_BEGIN(0,0);
16792 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16793 {
16794 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16795 IEM_MC_IF_ECX_IS_NZ() {
16796 IEM_MC_REL_JMP_S8(i8Imm);
16797 } IEM_MC_ELSE() {
16798 IEM_MC_ADVANCE_RIP();
16799 } IEM_MC_ENDIF();
16800 }
16801 else
16802 {
16803 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16804 IEM_MC_ADVANCE_RIP();
16805 }
16806 IEM_MC_END();
16807 return VINF_SUCCESS;
16808
16809 case IEMMODE_64BIT:
16810 IEM_MC_BEGIN(0,0);
16811 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16812 {
16813 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16814 IEM_MC_IF_RCX_IS_NZ() {
16815 IEM_MC_REL_JMP_S8(i8Imm);
16816 } IEM_MC_ELSE() {
16817 IEM_MC_ADVANCE_RIP();
16818 } IEM_MC_ENDIF();
16819 }
16820 else
16821 {
16822 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16823 IEM_MC_ADVANCE_RIP();
16824 }
16825 IEM_MC_END();
16826 return VINF_SUCCESS;
16827
16828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16829 }
16830}
16831
16832
16833/** Opcode 0xe3. */
16834FNIEMOP_DEF(iemOp_jecxz_Jb)
16835{
16836 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
16837 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16840
16841 switch (pVCpu->iem.s.enmEffAddrMode)
16842 {
16843 case IEMMODE_16BIT:
16844 IEM_MC_BEGIN(0,0);
16845 IEM_MC_IF_CX_IS_NZ() {
16846 IEM_MC_ADVANCE_RIP();
16847 } IEM_MC_ELSE() {
16848 IEM_MC_REL_JMP_S8(i8Imm);
16849 } IEM_MC_ENDIF();
16850 IEM_MC_END();
16851 return VINF_SUCCESS;
16852
16853 case IEMMODE_32BIT:
16854 IEM_MC_BEGIN(0,0);
16855 IEM_MC_IF_ECX_IS_NZ() {
16856 IEM_MC_ADVANCE_RIP();
16857 } IEM_MC_ELSE() {
16858 IEM_MC_REL_JMP_S8(i8Imm);
16859 } IEM_MC_ENDIF();
16860 IEM_MC_END();
16861 return VINF_SUCCESS;
16862
16863 case IEMMODE_64BIT:
16864 IEM_MC_BEGIN(0,0);
16865 IEM_MC_IF_RCX_IS_NZ() {
16866 IEM_MC_ADVANCE_RIP();
16867 } IEM_MC_ELSE() {
16868 IEM_MC_REL_JMP_S8(i8Imm);
16869 } IEM_MC_ENDIF();
16870 IEM_MC_END();
16871 return VINF_SUCCESS;
16872
16873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16874 }
16875}
16876
16877
16878/** Opcode 0xe4 */
16879FNIEMOP_DEF(iemOp_in_AL_Ib)
16880{
16881 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
16882 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16884 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16885}
16886
16887
16888/** Opcode 0xe5 */
16889FNIEMOP_DEF(iemOp_in_eAX_Ib)
16890{
16891 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
16892 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16894 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16895}
16896
16897
16898/** Opcode 0xe6 */
16899FNIEMOP_DEF(iemOp_out_Ib_AL)
16900{
16901 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
16902 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16904 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16905}
16906
16907
16908/** Opcode 0xe7 */
16909FNIEMOP_DEF(iemOp_out_Ib_eAX)
16910{
16911 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
16912 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16914 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16915}
16916
16917
16918/** Opcode 0xe8. */
16919FNIEMOP_DEF(iemOp_call_Jv)
16920{
16921 IEMOP_MNEMONIC(call_Jv, "call Jv");
16922 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16923 switch (pVCpu->iem.s.enmEffOpSize)
16924 {
16925 case IEMMODE_16BIT:
16926 {
16927 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16928 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16929 }
16930
16931 case IEMMODE_32BIT:
16932 {
16933 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16934 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16935 }
16936
16937 case IEMMODE_64BIT:
16938 {
16939 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16940 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16941 }
16942
16943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16944 }
16945}
16946
16947
16948/** Opcode 0xe9. */
16949FNIEMOP_DEF(iemOp_jmp_Jv)
16950{
16951 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
16952 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16953 switch (pVCpu->iem.s.enmEffOpSize)
16954 {
16955 case IEMMODE_16BIT:
16956 {
16957 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16958 IEM_MC_BEGIN(0, 0);
16959 IEM_MC_REL_JMP_S16(i16Imm);
16960 IEM_MC_END();
16961 return VINF_SUCCESS;
16962 }
16963
16964 case IEMMODE_64BIT:
16965 case IEMMODE_32BIT:
16966 {
16967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16968 IEM_MC_BEGIN(0, 0);
16969 IEM_MC_REL_JMP_S32(i32Imm);
16970 IEM_MC_END();
16971 return VINF_SUCCESS;
16972 }
16973
16974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16975 }
16976}
16977
16978
16979/** Opcode 0xea. */
16980FNIEMOP_DEF(iemOp_jmp_Ap)
16981{
16982 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
16983 IEMOP_HLP_NO_64BIT();
16984
16985 /* Decode the far pointer address and pass it on to the far call C implementation. */
16986 uint32_t offSeg;
16987 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
16988 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16989 else
16990 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16991 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16993 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
16994}
16995
16996
16997/** Opcode 0xeb. */
16998FNIEMOP_DEF(iemOp_jmp_Jb)
16999{
17000 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17001 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17004
17005 IEM_MC_BEGIN(0, 0);
17006 IEM_MC_REL_JMP_S8(i8Imm);
17007 IEM_MC_END();
17008 return VINF_SUCCESS;
17009}
17010
17011
17012/** Opcode 0xec */
17013FNIEMOP_DEF(iemOp_in_AL_DX)
17014{
17015 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17017 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17018}
17019
17020
17021/** Opcode 0xed */
17022FNIEMOP_DEF(iemOp_eAX_DX)
17023{
17024 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17026 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17027}
17028
17029
17030/** Opcode 0xee */
17031FNIEMOP_DEF(iemOp_out_DX_AL)
17032{
17033 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17035 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17036}
17037
17038
17039/** Opcode 0xef */
17040FNIEMOP_DEF(iemOp_out_DX_eAX)
17041{
17042 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17044 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17045}
17046
17047
17048/** Opcode 0xf0. */
17049FNIEMOP_DEF(iemOp_lock)
17050{
17051 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17052 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17053
17054 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17055 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17056}
17057
17058
17059/** Opcode 0xf1. */
17060FNIEMOP_DEF(iemOp_int_1)
17061{
17062 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17063 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17064 /** @todo testcase! */
17065 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17066}
17067
17068
17069/** Opcode 0xf2. */
17070FNIEMOP_DEF(iemOp_repne)
17071{
17072 /* This overrides any previous REPE prefix. */
17073 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17074 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17075 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17076
17077 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17078 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17079}
17080
17081
17082/** Opcode 0xf3. */
17083FNIEMOP_DEF(iemOp_repe)
17084{
17085 /* This overrides any previous REPNE prefix. */
17086 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17087 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17088 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17089
17090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17091 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17092}
17093
17094
17095/** Opcode 0xf4. */
17096FNIEMOP_DEF(iemOp_hlt)
17097{
17098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17099 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17100}
17101
17102
17103/** Opcode 0xf5. */
17104FNIEMOP_DEF(iemOp_cmc)
17105{
17106 IEMOP_MNEMONIC(cmc, "cmc");
17107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17108 IEM_MC_BEGIN(0, 0);
17109 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17110 IEM_MC_ADVANCE_RIP();
17111 IEM_MC_END();
17112 return VINF_SUCCESS;
17113}
17114
17115
17116/**
17117 * Common implementation of 'inc/dec/not/neg Eb'.
17118 *
17119 * @param bRm The RM byte.
17120 * @param pImpl The instruction implementation.
17121 */
17122FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17123{
17124 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17125 {
17126 /* register access */
17127 IEM_MC_BEGIN(2, 0);
17128 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17129 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17130 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17131 IEM_MC_REF_EFLAGS(pEFlags);
17132 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17133 IEM_MC_ADVANCE_RIP();
17134 IEM_MC_END();
17135 }
17136 else
17137 {
17138 /* memory access. */
17139 IEM_MC_BEGIN(2, 2);
17140 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17141 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17143
17144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17145 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17146 IEM_MC_FETCH_EFLAGS(EFlags);
17147 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17148 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17149 else
17150 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17151
17152 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17153 IEM_MC_COMMIT_EFLAGS(EFlags);
17154 IEM_MC_ADVANCE_RIP();
17155 IEM_MC_END();
17156 }
17157 return VINF_SUCCESS;
17158}
17159
17160
17161/**
17162 * Common implementation of 'inc/dec/not/neg Ev'.
17163 *
17164 * @param bRm The RM byte.
17165 * @param pImpl The instruction implementation.
17166 */
17167FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17168{
17169 /* Registers are handled by a common worker. */
17170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17171 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17172
17173 /* Memory we do here. */
17174 switch (pVCpu->iem.s.enmEffOpSize)
17175 {
17176 case IEMMODE_16BIT:
17177 IEM_MC_BEGIN(2, 2);
17178 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17179 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17181
17182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17183 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17184 IEM_MC_FETCH_EFLAGS(EFlags);
17185 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17186 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17187 else
17188 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17189
17190 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17191 IEM_MC_COMMIT_EFLAGS(EFlags);
17192 IEM_MC_ADVANCE_RIP();
17193 IEM_MC_END();
17194 return VINF_SUCCESS;
17195
17196 case IEMMODE_32BIT:
17197 IEM_MC_BEGIN(2, 2);
17198 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17199 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17201
17202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17203 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17204 IEM_MC_FETCH_EFLAGS(EFlags);
17205 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17206 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17207 else
17208 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17209
17210 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17211 IEM_MC_COMMIT_EFLAGS(EFlags);
17212 IEM_MC_ADVANCE_RIP();
17213 IEM_MC_END();
17214 return VINF_SUCCESS;
17215
17216 case IEMMODE_64BIT:
17217 IEM_MC_BEGIN(2, 2);
17218 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17219 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17221
17222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17223 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17224 IEM_MC_FETCH_EFLAGS(EFlags);
17225 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17226 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17227 else
17228 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17229
17230 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17231 IEM_MC_COMMIT_EFLAGS(EFlags);
17232 IEM_MC_ADVANCE_RIP();
17233 IEM_MC_END();
17234 return VINF_SUCCESS;
17235
17236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17237 }
17238}
17239
17240
17241/** Opcode 0xf6 /0. */
17242FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17243{
17244 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17246
17247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17248 {
17249 /* register access */
17250 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17252
17253 IEM_MC_BEGIN(3, 0);
17254 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17255 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17256 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17257 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17258 IEM_MC_REF_EFLAGS(pEFlags);
17259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17260 IEM_MC_ADVANCE_RIP();
17261 IEM_MC_END();
17262 }
17263 else
17264 {
17265 /* memory access. */
17266 IEM_MC_BEGIN(3, 2);
17267 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17268 IEM_MC_ARG(uint8_t, u8Src, 1);
17269 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17271
17272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17273 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17274 IEM_MC_ASSIGN(u8Src, u8Imm);
17275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17276 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17277 IEM_MC_FETCH_EFLAGS(EFlags);
17278 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17279
17280 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17281 IEM_MC_COMMIT_EFLAGS(EFlags);
17282 IEM_MC_ADVANCE_RIP();
17283 IEM_MC_END();
17284 }
17285 return VINF_SUCCESS;
17286}
17287
17288
17289/** Opcode 0xf7 /0. */
17290FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17291{
17292 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17294
17295 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17296 {
17297 /* register access */
17298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17299 switch (pVCpu->iem.s.enmEffOpSize)
17300 {
17301 case IEMMODE_16BIT:
17302 {
17303 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17304 IEM_MC_BEGIN(3, 0);
17305 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17306 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17307 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17308 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17309 IEM_MC_REF_EFLAGS(pEFlags);
17310 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17311 IEM_MC_ADVANCE_RIP();
17312 IEM_MC_END();
17313 return VINF_SUCCESS;
17314 }
17315
17316 case IEMMODE_32BIT:
17317 {
17318 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17319 IEM_MC_BEGIN(3, 0);
17320 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17321 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17322 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17323 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17324 IEM_MC_REF_EFLAGS(pEFlags);
17325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17326 /* No clearing the high dword here - test doesn't write back the result. */
17327 IEM_MC_ADVANCE_RIP();
17328 IEM_MC_END();
17329 return VINF_SUCCESS;
17330 }
17331
17332 case IEMMODE_64BIT:
17333 {
17334 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17335 IEM_MC_BEGIN(3, 0);
17336 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17337 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17338 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17339 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17340 IEM_MC_REF_EFLAGS(pEFlags);
17341 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17342 IEM_MC_ADVANCE_RIP();
17343 IEM_MC_END();
17344 return VINF_SUCCESS;
17345 }
17346
17347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17348 }
17349 }
17350 else
17351 {
17352 /* memory access. */
17353 switch (pVCpu->iem.s.enmEffOpSize)
17354 {
17355 case IEMMODE_16BIT:
17356 {
17357 IEM_MC_BEGIN(3, 2);
17358 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17359 IEM_MC_ARG(uint16_t, u16Src, 1);
17360 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17362
17363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17364 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17365 IEM_MC_ASSIGN(u16Src, u16Imm);
17366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17367 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17368 IEM_MC_FETCH_EFLAGS(EFlags);
17369 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17370
17371 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17372 IEM_MC_COMMIT_EFLAGS(EFlags);
17373 IEM_MC_ADVANCE_RIP();
17374 IEM_MC_END();
17375 return VINF_SUCCESS;
17376 }
17377
17378 case IEMMODE_32BIT:
17379 {
17380 IEM_MC_BEGIN(3, 2);
17381 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17382 IEM_MC_ARG(uint32_t, u32Src, 1);
17383 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17385
17386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17387 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17388 IEM_MC_ASSIGN(u32Src, u32Imm);
17389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17390 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17391 IEM_MC_FETCH_EFLAGS(EFlags);
17392 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17393
17394 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17395 IEM_MC_COMMIT_EFLAGS(EFlags);
17396 IEM_MC_ADVANCE_RIP();
17397 IEM_MC_END();
17398 return VINF_SUCCESS;
17399 }
17400
17401 case IEMMODE_64BIT:
17402 {
17403 IEM_MC_BEGIN(3, 2);
17404 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17405 IEM_MC_ARG(uint64_t, u64Src, 1);
17406 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17408
17409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17410 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17411 IEM_MC_ASSIGN(u64Src, u64Imm);
17412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17413 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17414 IEM_MC_FETCH_EFLAGS(EFlags);
17415 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17416
17417 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17418 IEM_MC_COMMIT_EFLAGS(EFlags);
17419 IEM_MC_ADVANCE_RIP();
17420 IEM_MC_END();
17421 return VINF_SUCCESS;
17422 }
17423
17424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17425 }
17426 }
17427}
17428
17429
17430/** Opcode 0xf6 /4, /5, /6 and /7. */
17431FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17432{
17433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17434 {
17435 /* register access */
17436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17437 IEM_MC_BEGIN(3, 1);
17438 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17439 IEM_MC_ARG(uint8_t, u8Value, 1);
17440 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17441 IEM_MC_LOCAL(int32_t, rc);
17442
17443 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17444 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17445 IEM_MC_REF_EFLAGS(pEFlags);
17446 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17447 IEM_MC_IF_LOCAL_IS_Z(rc) {
17448 IEM_MC_ADVANCE_RIP();
17449 } IEM_MC_ELSE() {
17450 IEM_MC_RAISE_DIVIDE_ERROR();
17451 } IEM_MC_ENDIF();
17452
17453 IEM_MC_END();
17454 }
17455 else
17456 {
17457 /* memory access. */
17458 IEM_MC_BEGIN(3, 2);
17459 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17460 IEM_MC_ARG(uint8_t, u8Value, 1);
17461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17463 IEM_MC_LOCAL(int32_t, rc);
17464
17465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17467 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17468 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17469 IEM_MC_REF_EFLAGS(pEFlags);
17470 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17471 IEM_MC_IF_LOCAL_IS_Z(rc) {
17472 IEM_MC_ADVANCE_RIP();
17473 } IEM_MC_ELSE() {
17474 IEM_MC_RAISE_DIVIDE_ERROR();
17475 } IEM_MC_ENDIF();
17476
17477 IEM_MC_END();
17478 }
17479 return VINF_SUCCESS;
17480}
17481
17482
17483/** Opcode 0xf7 /4, /5, /6 and /7. */
17484FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17485{
17486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17487
17488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17489 {
17490 /* register access */
17491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17492 switch (pVCpu->iem.s.enmEffOpSize)
17493 {
17494 case IEMMODE_16BIT:
17495 {
17496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17497 IEM_MC_BEGIN(4, 1);
17498 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17499 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17500 IEM_MC_ARG(uint16_t, u16Value, 2);
17501 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17502 IEM_MC_LOCAL(int32_t, rc);
17503
17504 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17505 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17506 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17507 IEM_MC_REF_EFLAGS(pEFlags);
17508 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17509 IEM_MC_IF_LOCAL_IS_Z(rc) {
17510 IEM_MC_ADVANCE_RIP();
17511 } IEM_MC_ELSE() {
17512 IEM_MC_RAISE_DIVIDE_ERROR();
17513 } IEM_MC_ENDIF();
17514
17515 IEM_MC_END();
17516 return VINF_SUCCESS;
17517 }
17518
17519 case IEMMODE_32BIT:
17520 {
17521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17522 IEM_MC_BEGIN(4, 1);
17523 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17524 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17525 IEM_MC_ARG(uint32_t, u32Value, 2);
17526 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17527 IEM_MC_LOCAL(int32_t, rc);
17528
17529 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17530 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17531 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17532 IEM_MC_REF_EFLAGS(pEFlags);
17533 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17534 IEM_MC_IF_LOCAL_IS_Z(rc) {
17535 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17536 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17537 IEM_MC_ADVANCE_RIP();
17538 } IEM_MC_ELSE() {
17539 IEM_MC_RAISE_DIVIDE_ERROR();
17540 } IEM_MC_ENDIF();
17541
17542 IEM_MC_END();
17543 return VINF_SUCCESS;
17544 }
17545
17546 case IEMMODE_64BIT:
17547 {
17548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17549 IEM_MC_BEGIN(4, 1);
17550 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17551 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17552 IEM_MC_ARG(uint64_t, u64Value, 2);
17553 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17554 IEM_MC_LOCAL(int32_t, rc);
17555
17556 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17557 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17558 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17559 IEM_MC_REF_EFLAGS(pEFlags);
17560 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17561 IEM_MC_IF_LOCAL_IS_Z(rc) {
17562 IEM_MC_ADVANCE_RIP();
17563 } IEM_MC_ELSE() {
17564 IEM_MC_RAISE_DIVIDE_ERROR();
17565 } IEM_MC_ENDIF();
17566
17567 IEM_MC_END();
17568 return VINF_SUCCESS;
17569 }
17570
17571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17572 }
17573 }
17574 else
17575 {
17576 /* memory access. */
17577 switch (pVCpu->iem.s.enmEffOpSize)
17578 {
17579 case IEMMODE_16BIT:
17580 {
17581 IEM_MC_BEGIN(4, 2);
17582 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17583 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17584 IEM_MC_ARG(uint16_t, u16Value, 2);
17585 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17587 IEM_MC_LOCAL(int32_t, rc);
17588
17589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17591 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17592 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17593 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17594 IEM_MC_REF_EFLAGS(pEFlags);
17595 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17596 IEM_MC_IF_LOCAL_IS_Z(rc) {
17597 IEM_MC_ADVANCE_RIP();
17598 } IEM_MC_ELSE() {
17599 IEM_MC_RAISE_DIVIDE_ERROR();
17600 } IEM_MC_ENDIF();
17601
17602 IEM_MC_END();
17603 return VINF_SUCCESS;
17604 }
17605
17606 case IEMMODE_32BIT:
17607 {
17608 IEM_MC_BEGIN(4, 2);
17609 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17610 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17611 IEM_MC_ARG(uint32_t, u32Value, 2);
17612 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17614 IEM_MC_LOCAL(int32_t, rc);
17615
17616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17618 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17619 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17620 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17621 IEM_MC_REF_EFLAGS(pEFlags);
17622 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17623 IEM_MC_IF_LOCAL_IS_Z(rc) {
17624 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17625 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17626 IEM_MC_ADVANCE_RIP();
17627 } IEM_MC_ELSE() {
17628 IEM_MC_RAISE_DIVIDE_ERROR();
17629 } IEM_MC_ENDIF();
17630
17631 IEM_MC_END();
17632 return VINF_SUCCESS;
17633 }
17634
17635 case IEMMODE_64BIT:
17636 {
17637 IEM_MC_BEGIN(4, 2);
17638 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17639 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17640 IEM_MC_ARG(uint64_t, u64Value, 2);
17641 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17643 IEM_MC_LOCAL(int32_t, rc);
17644
17645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17647 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17648 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17649 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17650 IEM_MC_REF_EFLAGS(pEFlags);
17651 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17652 IEM_MC_IF_LOCAL_IS_Z(rc) {
17653 IEM_MC_ADVANCE_RIP();
17654 } IEM_MC_ELSE() {
17655 IEM_MC_RAISE_DIVIDE_ERROR();
17656 } IEM_MC_ENDIF();
17657
17658 IEM_MC_END();
17659 return VINF_SUCCESS;
17660 }
17661
17662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17663 }
17664 }
17665}
17666
17667/** Opcode 0xf6. */
17668FNIEMOP_DEF(iemOp_Grp3_Eb)
17669{
17670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17671 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17672 {
17673 case 0:
17674 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17675 case 1:
17676/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17677 return IEMOP_RAISE_INVALID_OPCODE();
17678 case 2:
17679 IEMOP_MNEMONIC(not_Eb, "not Eb");
17680 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17681 case 3:
17682 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
17683 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17684 case 4:
17685 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
17686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17687 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17688 case 5:
17689 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
17690 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17691 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17692 case 6:
17693 IEMOP_MNEMONIC(div_Eb, "div Eb");
17694 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17695 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17696 case 7:
17697 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
17698 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17699 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17701 }
17702}
17703
17704
17705/** Opcode 0xf7. */
17706FNIEMOP_DEF(iemOp_Grp3_Ev)
17707{
17708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17709 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17710 {
17711 case 0:
17712 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17713 case 1:
17714/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17715 return IEMOP_RAISE_INVALID_OPCODE();
17716 case 2:
17717 IEMOP_MNEMONIC(not_Ev, "not Ev");
17718 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17719 case 3:
17720 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
17721 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17722 case 4:
17723 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
17724 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17725 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17726 case 5:
17727 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
17728 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17729 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17730 case 6:
17731 IEMOP_MNEMONIC(div_Ev, "div Ev");
17732 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17733 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17734 case 7:
17735 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
17736 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17737 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17739 }
17740}
17741
17742
17743/** Opcode 0xf8. */
17744FNIEMOP_DEF(iemOp_clc)
17745{
17746 IEMOP_MNEMONIC(clc, "clc");
17747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17748 IEM_MC_BEGIN(0, 0);
17749 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17750 IEM_MC_ADVANCE_RIP();
17751 IEM_MC_END();
17752 return VINF_SUCCESS;
17753}
17754
17755
17756/** Opcode 0xf9. */
17757FNIEMOP_DEF(iemOp_stc)
17758{
17759 IEMOP_MNEMONIC(stc, "stc");
17760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17761 IEM_MC_BEGIN(0, 0);
17762 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17763 IEM_MC_ADVANCE_RIP();
17764 IEM_MC_END();
17765 return VINF_SUCCESS;
17766}
17767
17768
17769/** Opcode 0xfa. */
17770FNIEMOP_DEF(iemOp_cli)
17771{
17772 IEMOP_MNEMONIC(cli, "cli");
17773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17774 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17775}
17776
17777
17778FNIEMOP_DEF(iemOp_sti)
17779{
17780 IEMOP_MNEMONIC(sti, "sti");
17781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17782 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17783}
17784
17785
17786/** Opcode 0xfc. */
17787FNIEMOP_DEF(iemOp_cld)
17788{
17789 IEMOP_MNEMONIC(cld, "cld");
17790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17791 IEM_MC_BEGIN(0, 0);
17792 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17793 IEM_MC_ADVANCE_RIP();
17794 IEM_MC_END();
17795 return VINF_SUCCESS;
17796}
17797
17798
17799/** Opcode 0xfd. */
17800FNIEMOP_DEF(iemOp_std)
17801{
17802 IEMOP_MNEMONIC(std, "std");
17803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17804 IEM_MC_BEGIN(0, 0);
17805 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17806 IEM_MC_ADVANCE_RIP();
17807 IEM_MC_END();
17808 return VINF_SUCCESS;
17809}
17810
17811
17812/** Opcode 0xfe. */
17813FNIEMOP_DEF(iemOp_Grp4)
17814{
17815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17816 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17817 {
17818 case 0:
17819 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
17820 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17821 case 1:
17822 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
17823 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17824 default:
17825 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
17826 return IEMOP_RAISE_INVALID_OPCODE();
17827 }
17828}
17829
17830
17831/**
17832 * Opcode 0xff /2.
17833 * @param bRm The RM byte.
17834 */
17835FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17836{
17837 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
17838 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17839
17840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17841 {
17842 /* The new RIP is taken from a register. */
17843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17844 switch (pVCpu->iem.s.enmEffOpSize)
17845 {
17846 case IEMMODE_16BIT:
17847 IEM_MC_BEGIN(1, 0);
17848 IEM_MC_ARG(uint16_t, u16Target, 0);
17849 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17850 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17851 IEM_MC_END()
17852 return VINF_SUCCESS;
17853
17854 case IEMMODE_32BIT:
17855 IEM_MC_BEGIN(1, 0);
17856 IEM_MC_ARG(uint32_t, u32Target, 0);
17857 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17858 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17859 IEM_MC_END()
17860 return VINF_SUCCESS;
17861
17862 case IEMMODE_64BIT:
17863 IEM_MC_BEGIN(1, 0);
17864 IEM_MC_ARG(uint64_t, u64Target, 0);
17865 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17866 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17867 IEM_MC_END()
17868 return VINF_SUCCESS;
17869
17870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17871 }
17872 }
17873 else
17874 {
17875 /* The new RIP is taken from a register. */
17876 switch (pVCpu->iem.s.enmEffOpSize)
17877 {
17878 case IEMMODE_16BIT:
17879 IEM_MC_BEGIN(1, 1);
17880 IEM_MC_ARG(uint16_t, u16Target, 0);
17881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17884 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17885 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17886 IEM_MC_END()
17887 return VINF_SUCCESS;
17888
17889 case IEMMODE_32BIT:
17890 IEM_MC_BEGIN(1, 1);
17891 IEM_MC_ARG(uint32_t, u32Target, 0);
17892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17895 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17896 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17897 IEM_MC_END()
17898 return VINF_SUCCESS;
17899
17900 case IEMMODE_64BIT:
17901 IEM_MC_BEGIN(1, 1);
17902 IEM_MC_ARG(uint64_t, u64Target, 0);
17903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17906 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17907 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17908 IEM_MC_END()
17909 return VINF_SUCCESS;
17910
17911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17912 }
17913 }
17914}
17915
17916typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17917
17918FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17919{
17920 /* Registers? How?? */
17921 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
17922 { /* likely */ }
17923 else
17924 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17925
17926 /* Far pointer loaded from memory. */
17927 switch (pVCpu->iem.s.enmEffOpSize)
17928 {
17929 case IEMMODE_16BIT:
17930 IEM_MC_BEGIN(3, 1);
17931 IEM_MC_ARG(uint16_t, u16Sel, 0);
17932 IEM_MC_ARG(uint16_t, offSeg, 1);
17933 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17937 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17938 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
17939 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17940 IEM_MC_END();
17941 return VINF_SUCCESS;
17942
17943 case IEMMODE_64BIT:
17944 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17945 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17946 * and call far qword [rsp] encodings. */
17947 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
17948 {
17949 IEM_MC_BEGIN(3, 1);
17950 IEM_MC_ARG(uint16_t, u16Sel, 0);
17951 IEM_MC_ARG(uint64_t, offSeg, 1);
17952 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17956 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17957 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
17958 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17959 IEM_MC_END();
17960 return VINF_SUCCESS;
17961 }
17962 /* AMD falls thru. */
17963
17964 case IEMMODE_32BIT:
17965 IEM_MC_BEGIN(3, 1);
17966 IEM_MC_ARG(uint16_t, u16Sel, 0);
17967 IEM_MC_ARG(uint32_t, offSeg, 1);
17968 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17972 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17973 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
17974 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17975 IEM_MC_END();
17976 return VINF_SUCCESS;
17977
17978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17979 }
17980}
17981
17982
17983/**
17984 * Opcode 0xff /3.
17985 * @param bRm The RM byte.
17986 */
17987FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17988{
17989 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
17990 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17991}
17992
17993
17994/**
17995 * Opcode 0xff /4.
17996 * @param bRm The RM byte.
17997 */
17998FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17999{
18000 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18002
18003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18004 {
18005 /* The new RIP is taken from a register. */
18006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18007 switch (pVCpu->iem.s.enmEffOpSize)
18008 {
18009 case IEMMODE_16BIT:
18010 IEM_MC_BEGIN(0, 1);
18011 IEM_MC_LOCAL(uint16_t, u16Target);
18012 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18013 IEM_MC_SET_RIP_U16(u16Target);
18014 IEM_MC_END()
18015 return VINF_SUCCESS;
18016
18017 case IEMMODE_32BIT:
18018 IEM_MC_BEGIN(0, 1);
18019 IEM_MC_LOCAL(uint32_t, u32Target);
18020 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18021 IEM_MC_SET_RIP_U32(u32Target);
18022 IEM_MC_END()
18023 return VINF_SUCCESS;
18024
18025 case IEMMODE_64BIT:
18026 IEM_MC_BEGIN(0, 1);
18027 IEM_MC_LOCAL(uint64_t, u64Target);
18028 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18029 IEM_MC_SET_RIP_U64(u64Target);
18030 IEM_MC_END()
18031 return VINF_SUCCESS;
18032
18033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18034 }
18035 }
18036 else
18037 {
18038 /* The new RIP is taken from a memory location. */
18039 switch (pVCpu->iem.s.enmEffOpSize)
18040 {
18041 case IEMMODE_16BIT:
18042 IEM_MC_BEGIN(0, 2);
18043 IEM_MC_LOCAL(uint16_t, u16Target);
18044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18047 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18048 IEM_MC_SET_RIP_U16(u16Target);
18049 IEM_MC_END()
18050 return VINF_SUCCESS;
18051
18052 case IEMMODE_32BIT:
18053 IEM_MC_BEGIN(0, 2);
18054 IEM_MC_LOCAL(uint32_t, u32Target);
18055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18058 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18059 IEM_MC_SET_RIP_U32(u32Target);
18060 IEM_MC_END()
18061 return VINF_SUCCESS;
18062
18063 case IEMMODE_64BIT:
18064 IEM_MC_BEGIN(0, 2);
18065 IEM_MC_LOCAL(uint64_t, u64Target);
18066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18069 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18070 IEM_MC_SET_RIP_U64(u64Target);
18071 IEM_MC_END()
18072 return VINF_SUCCESS;
18073
18074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18075 }
18076 }
18077}
18078
18079
18080/**
18081 * Opcode 0xff /5.
18082 * @param bRm The RM byte.
18083 */
18084FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18085{
18086 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18087 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18088}
18089
18090
18091/**
18092 * Opcode 0xff /6.
18093 * @param bRm The RM byte.
18094 */
18095FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18096{
18097 IEMOP_MNEMONIC(push_Ev, "push Ev");
18098
18099 /* Registers are handled by a common worker. */
18100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18101 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18102
18103 /* Memory we do here. */
18104 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18105 switch (pVCpu->iem.s.enmEffOpSize)
18106 {
18107 case IEMMODE_16BIT:
18108 IEM_MC_BEGIN(0, 2);
18109 IEM_MC_LOCAL(uint16_t, u16Src);
18110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18113 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18114 IEM_MC_PUSH_U16(u16Src);
18115 IEM_MC_ADVANCE_RIP();
18116 IEM_MC_END();
18117 return VINF_SUCCESS;
18118
18119 case IEMMODE_32BIT:
18120 IEM_MC_BEGIN(0, 2);
18121 IEM_MC_LOCAL(uint32_t, u32Src);
18122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18125 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18126 IEM_MC_PUSH_U32(u32Src);
18127 IEM_MC_ADVANCE_RIP();
18128 IEM_MC_END();
18129 return VINF_SUCCESS;
18130
18131 case IEMMODE_64BIT:
18132 IEM_MC_BEGIN(0, 2);
18133 IEM_MC_LOCAL(uint64_t, u64Src);
18134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18137 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18138 IEM_MC_PUSH_U64(u64Src);
18139 IEM_MC_ADVANCE_RIP();
18140 IEM_MC_END();
18141 return VINF_SUCCESS;
18142
18143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18144 }
18145}
18146
18147
18148/** Opcode 0xff. */
18149FNIEMOP_DEF(iemOp_Grp5)
18150{
18151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18152 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18153 {
18154 case 0:
18155 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18156 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18157 case 1:
18158 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18159 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18160 case 2:
18161 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18162 case 3:
18163 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18164 case 4:
18165 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18166 case 5:
18167 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18168 case 6:
18169 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18170 case 7:
18171 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18172 return IEMOP_RAISE_INVALID_OPCODE();
18173 }
18174 AssertFailedReturn(VERR_IEM_IPE_3);
18175}
18176
18177
18178
18179const PFNIEMOP g_apfnOneByteMap[256] =
18180{
18181 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18182 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18183 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18184 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18185 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18186 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18187 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18188 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18189 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18190 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18191 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18192 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18193 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18194 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18195 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18196 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18197 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18198 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18199 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18200 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18201 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18202 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18203 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18204 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18205 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18206 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18207 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18208 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18209 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18210 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18211 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18212 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18213 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18214 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18215 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18216 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18217 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18218 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18219 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18220 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18221 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18222 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18223 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18224 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18225 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18226 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18227 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18228 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18229 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18230 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18231 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18232 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18233 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18234 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18235 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18236 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18237 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18238 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18239 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18240 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18241 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18242 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18243 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18244 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18245};
18246
18247
18248/** @} */
18249
18250#ifdef _MSC_VER
18251# pragma warning(pop)
18252#endif
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette