VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65504

Last change on this file since 65504 was 65501, checked in by vboxsync, 8 years ago

IEM: some cmpxchg16b notes.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 632.5 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65501 2017-01-28 22:36:58Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551
552/** @name ..... opcodes.
553 *
554 * @{
555 */
556
557/** @} */
558
559
560/** @name Two byte opcodes (first byte 0x0f).
561 *
562 * @{
563 */
564
565/** Opcode 0x0f 0x00 /0. */
566FNIEMOPRM_DEF(iemOp_Grp6_sldt)
567{
568 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
569 IEMOP_HLP_MIN_286();
570 IEMOP_HLP_NO_REAL_OR_V86_MODE();
571
572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
573 {
574 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
575 switch (pVCpu->iem.s.enmEffOpSize)
576 {
577 case IEMMODE_16BIT:
578 IEM_MC_BEGIN(0, 1);
579 IEM_MC_LOCAL(uint16_t, u16Ldtr);
580 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
581 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 break;
585
586 case IEMMODE_32BIT:
587 IEM_MC_BEGIN(0, 1);
588 IEM_MC_LOCAL(uint32_t, u32Ldtr);
589 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
590 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
591 IEM_MC_ADVANCE_RIP();
592 IEM_MC_END();
593 break;
594
595 case IEMMODE_64BIT:
596 IEM_MC_BEGIN(0, 1);
597 IEM_MC_LOCAL(uint64_t, u64Ldtr);
598 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
599 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
600 IEM_MC_ADVANCE_RIP();
601 IEM_MC_END();
602 break;
603
604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
605 }
606 }
607 else
608 {
609 IEM_MC_BEGIN(0, 2);
610 IEM_MC_LOCAL(uint16_t, u16Ldtr);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
613 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
614 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
615 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
616 IEM_MC_ADVANCE_RIP();
617 IEM_MC_END();
618 }
619 return VINF_SUCCESS;
620}
621
622
623/** Opcode 0x0f 0x00 /1. */
624FNIEMOPRM_DEF(iemOp_Grp6_str)
625{
626 IEMOP_MNEMONIC(str, "str Rv/Mw");
627 IEMOP_HLP_MIN_286();
628 IEMOP_HLP_NO_REAL_OR_V86_MODE();
629
630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
631 {
632 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
633 switch (pVCpu->iem.s.enmEffOpSize)
634 {
635 case IEMMODE_16BIT:
636 IEM_MC_BEGIN(0, 1);
637 IEM_MC_LOCAL(uint16_t, u16Tr);
638 IEM_MC_FETCH_TR_U16(u16Tr);
639 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
640 IEM_MC_ADVANCE_RIP();
641 IEM_MC_END();
642 break;
643
644 case IEMMODE_32BIT:
645 IEM_MC_BEGIN(0, 1);
646 IEM_MC_LOCAL(uint32_t, u32Tr);
647 IEM_MC_FETCH_TR_U32(u32Tr);
648 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
649 IEM_MC_ADVANCE_RIP();
650 IEM_MC_END();
651 break;
652
653 case IEMMODE_64BIT:
654 IEM_MC_BEGIN(0, 1);
655 IEM_MC_LOCAL(uint64_t, u64Tr);
656 IEM_MC_FETCH_TR_U64(u64Tr);
657 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
658 IEM_MC_ADVANCE_RIP();
659 IEM_MC_END();
660 break;
661
662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
663 }
664 }
665 else
666 {
667 IEM_MC_BEGIN(0, 2);
668 IEM_MC_LOCAL(uint16_t, u16Tr);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
671 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
672 IEM_MC_FETCH_TR_U16(u16Tr);
673 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 }
677 return VINF_SUCCESS;
678}
679
680
681/** Opcode 0x0f 0x00 /2. */
682FNIEMOPRM_DEF(iemOp_Grp6_lldt)
683{
684 IEMOP_MNEMONIC(lldt, "lldt Ew");
685 IEMOP_HLP_MIN_286();
686 IEMOP_HLP_NO_REAL_OR_V86_MODE();
687
688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
689 {
690 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
691 IEM_MC_BEGIN(1, 0);
692 IEM_MC_ARG(uint16_t, u16Sel, 0);
693 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
694 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
695 IEM_MC_END();
696 }
697 else
698 {
699 IEM_MC_BEGIN(1, 1);
700 IEM_MC_ARG(uint16_t, u16Sel, 0);
701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
703 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
704 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
705 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
706 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
707 IEM_MC_END();
708 }
709 return VINF_SUCCESS;
710}
711
712
713/** Opcode 0x0f 0x00 /3. */
714FNIEMOPRM_DEF(iemOp_Grp6_ltr)
715{
716 IEMOP_MNEMONIC(ltr, "ltr Ew");
717 IEMOP_HLP_MIN_286();
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
723 IEM_MC_BEGIN(1, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
726 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
727 IEM_MC_END();
728 }
729 else
730 {
731 IEM_MC_BEGIN(1, 1);
732 IEM_MC_ARG(uint16_t, u16Sel, 0);
733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
737 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
738 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
739 IEM_MC_END();
740 }
741 return VINF_SUCCESS;
742}
743
744
745/** Opcode 0x0f 0x00 /3. */
746FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
747{
748 IEMOP_HLP_MIN_286();
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750
751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
752 {
753 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
754 IEM_MC_BEGIN(2, 0);
755 IEM_MC_ARG(uint16_t, u16Sel, 0);
756 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
757 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
758 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
759 IEM_MC_END();
760 }
761 else
762 {
763 IEM_MC_BEGIN(2, 1);
764 IEM_MC_ARG(uint16_t, u16Sel, 0);
765 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
768 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
769 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
770 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
771 IEM_MC_END();
772 }
773 return VINF_SUCCESS;
774}
775
776
777/** Opcode 0x0f 0x00 /4. */
778FNIEMOPRM_DEF(iemOp_Grp6_verr)
779{
780 IEMOP_MNEMONIC(verr, "verr Ew");
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
783}
784
785
786/** Opcode 0x0f 0x00 /5. */
787FNIEMOPRM_DEF(iemOp_Grp6_verw)
788{
789 IEMOP_MNEMONIC(verw, "verw Ew");
790 IEMOP_HLP_MIN_286();
791 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
792}
793
794
795/**
796 * Group 6 jump table.
797 */
798IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
799{
800 iemOp_Grp6_sldt,
801 iemOp_Grp6_str,
802 iemOp_Grp6_lldt,
803 iemOp_Grp6_ltr,
804 iemOp_Grp6_verr,
805 iemOp_Grp6_verw,
806 iemOp_InvalidWithRM,
807 iemOp_InvalidWithRM
808};
809
810/** Opcode 0x0f 0x00. */
811FNIEMOP_DEF(iemOp_Grp6)
812{
813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
814 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
815}
816
817
818/** Opcode 0x0f 0x01 /0. */
819FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
820{
821 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
822 IEMOP_HLP_MIN_286();
823 IEMOP_HLP_64BIT_OP_SIZE();
824 IEM_MC_BEGIN(2, 1);
825 IEM_MC_ARG(uint8_t, iEffSeg, 0);
826 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
829 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
830 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
831 IEM_MC_END();
832 return VINF_SUCCESS;
833}
834
835
836/** Opcode 0x0f 0x01 /0. */
837FNIEMOP_DEF(iemOp_Grp7_vmcall)
838{
839 IEMOP_BITCH_ABOUT_STUB();
840 return IEMOP_RAISE_INVALID_OPCODE();
841}
842
843
844/** Opcode 0x0f 0x01 /0. */
845FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
846{
847 IEMOP_BITCH_ABOUT_STUB();
848 return IEMOP_RAISE_INVALID_OPCODE();
849}
850
851
852/** Opcode 0x0f 0x01 /0. */
853FNIEMOP_DEF(iemOp_Grp7_vmresume)
854{
855 IEMOP_BITCH_ABOUT_STUB();
856 return IEMOP_RAISE_INVALID_OPCODE();
857}
858
859
860/** Opcode 0x0f 0x01 /0. */
861FNIEMOP_DEF(iemOp_Grp7_vmxoff)
862{
863 IEMOP_BITCH_ABOUT_STUB();
864 return IEMOP_RAISE_INVALID_OPCODE();
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC(sidt, "sidt Ms");
872 IEMOP_HLP_MIN_286();
873 IEMOP_HLP_64BIT_OP_SIZE();
874 IEM_MC_BEGIN(2, 1);
875 IEM_MC_ARG(uint8_t, iEffSeg, 0);
876 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
880 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 /1. */
887FNIEMOP_DEF(iemOp_Grp7_monitor)
888{
889 IEMOP_MNEMONIC(monitor, "monitor");
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
891 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
892}
893
894
895/** Opcode 0x0f 0x01 /1. */
896FNIEMOP_DEF(iemOp_Grp7_mwait)
897{
898 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
901}
902
903
904/** Opcode 0x0f 0x01 /2. */
905FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
906{
907 IEMOP_MNEMONIC(lgdt, "lgdt");
908 IEMOP_HLP_64BIT_OP_SIZE();
909 IEM_MC_BEGIN(3, 1);
910 IEM_MC_ARG(uint8_t, iEffSeg, 0);
911 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
912 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
915 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
916 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
917 IEM_MC_END();
918 return VINF_SUCCESS;
919}
920
921
922/** Opcode 0x0f 0x01 0xd0. */
923FNIEMOP_DEF(iemOp_Grp7_xgetbv)
924{
925 IEMOP_MNEMONIC(xgetbv, "xgetbv");
926 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
927 {
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
929 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
930 }
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 0xd1. */
936FNIEMOP_DEF(iemOp_Grp7_xsetbv)
937{
938 IEMOP_MNEMONIC(xsetbv, "xsetbv");
939 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
940 {
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
942 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
943 }
944 return IEMOP_RAISE_INVALID_OPCODE();
945}
946
947
948/** Opcode 0x0f 0x01 /3. */
949FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
950{
951 IEMOP_MNEMONIC(lidt, "lidt");
952 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
953 ? IEMMODE_64BIT
954 : pVCpu->iem.s.enmEffOpSize;
955 IEM_MC_BEGIN(3, 1);
956 IEM_MC_ARG(uint8_t, iEffSeg, 0);
957 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
958 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
961 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
962 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x01 0xd8. */
969FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
970
971/** Opcode 0x0f 0x01 0xd9. */
972FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
973
974/** Opcode 0x0f 0x01 0xda. */
975FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
976
977/** Opcode 0x0f 0x01 0xdb. */
978FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
979
980/** Opcode 0x0f 0x01 0xdc. */
981FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
982
983/** Opcode 0x0f 0x01 0xdd. */
984FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
985
986/** Opcode 0x0f 0x01 0xde. */
987FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
988
989/** Opcode 0x0f 0x01 0xdf. */
990FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
991
992/** Opcode 0x0f 0x01 /4. */
993FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
994{
995 IEMOP_MNEMONIC(smsw, "smsw");
996 IEMOP_HLP_MIN_286();
997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
998 {
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 switch (pVCpu->iem.s.enmEffOpSize)
1001 {
1002 case IEMMODE_16BIT:
1003 IEM_MC_BEGIN(0, 1);
1004 IEM_MC_LOCAL(uint16_t, u16Tmp);
1005 IEM_MC_FETCH_CR0_U16(u16Tmp);
1006 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1007 { /* likely */ }
1008 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1009 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1010 else
1011 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1012 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1013 IEM_MC_ADVANCE_RIP();
1014 IEM_MC_END();
1015 return VINF_SUCCESS;
1016
1017 case IEMMODE_32BIT:
1018 IEM_MC_BEGIN(0, 1);
1019 IEM_MC_LOCAL(uint32_t, u32Tmp);
1020 IEM_MC_FETCH_CR0_U32(u32Tmp);
1021 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 return VINF_SUCCESS;
1025
1026 case IEMMODE_64BIT:
1027 IEM_MC_BEGIN(0, 1);
1028 IEM_MC_LOCAL(uint64_t, u64Tmp);
1029 IEM_MC_FETCH_CR0_U64(u64Tmp);
1030 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1031 IEM_MC_ADVANCE_RIP();
1032 IEM_MC_END();
1033 return VINF_SUCCESS;
1034
1035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1036 }
1037 }
1038 else
1039 {
1040 /* Ignore operand size here, memory refs are always 16-bit. */
1041 IEM_MC_BEGIN(0, 2);
1042 IEM_MC_LOCAL(uint16_t, u16Tmp);
1043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1046 IEM_MC_FETCH_CR0_U16(u16Tmp);
1047 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1048 { /* likely */ }
1049 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1050 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1051 else
1052 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1053 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1054 IEM_MC_ADVANCE_RIP();
1055 IEM_MC_END();
1056 return VINF_SUCCESS;
1057 }
1058}
1059
1060
1061/** Opcode 0x0f 0x01 /6. */
1062FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1063{
1064 /* The operand size is effectively ignored, all is 16-bit and only the
1065 lower 3-bits are used. */
1066 IEMOP_MNEMONIC(lmsw, "lmsw");
1067 IEMOP_HLP_MIN_286();
1068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1069 {
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_BEGIN(1, 0);
1072 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1073 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1074 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1075 IEM_MC_END();
1076 }
1077 else
1078 {
1079 IEM_MC_BEGIN(1, 1);
1080 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1084 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1085 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1086 IEM_MC_END();
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/** Opcode 0x0f 0x01 /7. */
1093FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1094{
1095 IEMOP_MNEMONIC(invlpg, "invlpg");
1096 IEMOP_HLP_MIN_486();
1097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1098 IEM_MC_BEGIN(1, 1);
1099 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1101 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1102 IEM_MC_END();
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/** Opcode 0x0f 0x01 /7. */
1108FNIEMOP_DEF(iemOp_Grp7_swapgs)
1109{
1110 IEMOP_MNEMONIC(swapgs, "swapgs");
1111 IEMOP_HLP_ONLY_64BIT();
1112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1113 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1114}
1115
1116
1117/** Opcode 0x0f 0x01 /7. */
1118FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1119{
1120 NOREF(pVCpu);
1121 IEMOP_BITCH_ABOUT_STUB();
1122 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1123}
1124
1125
1126/** Opcode 0x0f 0x01. */
1127FNIEMOP_DEF(iemOp_Grp7)
1128{
1129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1130 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1131 {
1132 case 0:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1138 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1139 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1140 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1141 }
1142 return IEMOP_RAISE_INVALID_OPCODE();
1143
1144 case 1:
1145 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1146 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1147 switch (bRm & X86_MODRM_RM_MASK)
1148 {
1149 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1150 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1151 }
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 2:
1155 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1156 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1157 switch (bRm & X86_MODRM_RM_MASK)
1158 {
1159 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1160 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1161 }
1162 return IEMOP_RAISE_INVALID_OPCODE();
1163
1164 case 3:
1165 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1166 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1167 switch (bRm & X86_MODRM_RM_MASK)
1168 {
1169 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1170 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1171 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1172 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1173 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1174 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1175 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1176 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1178 }
1179
1180 case 4:
1181 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1182
1183 case 5:
1184 return IEMOP_RAISE_INVALID_OPCODE();
1185
1186 case 6:
1187 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1188
1189 case 7:
1190 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1191 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1192 switch (bRm & X86_MODRM_RM_MASK)
1193 {
1194 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1195 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1196 }
1197 return IEMOP_RAISE_INVALID_OPCODE();
1198
1199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1200 }
1201}
1202
1203/** Opcode 0x0f 0x00 /3. */
1204FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1205{
1206 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1208
1209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1210 {
1211 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1212 switch (pVCpu->iem.s.enmEffOpSize)
1213 {
1214 case IEMMODE_16BIT:
1215 {
1216 IEM_MC_BEGIN(3, 0);
1217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1218 IEM_MC_ARG(uint16_t, u16Sel, 1);
1219 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1220
1221 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1222 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1223 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1224
1225 IEM_MC_END();
1226 return VINF_SUCCESS;
1227 }
1228
1229 case IEMMODE_32BIT:
1230 case IEMMODE_64BIT:
1231 {
1232 IEM_MC_BEGIN(3, 0);
1233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1234 IEM_MC_ARG(uint16_t, u16Sel, 1);
1235 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1236
1237 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1238 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1239 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1246 }
1247 }
1248 else
1249 {
1250 switch (pVCpu->iem.s.enmEffOpSize)
1251 {
1252 case IEMMODE_16BIT:
1253 {
1254 IEM_MC_BEGIN(3, 1);
1255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1256 IEM_MC_ARG(uint16_t, u16Sel, 1);
1257 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259
1260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1261 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1262
1263 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1264 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1265 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1266
1267 IEM_MC_END();
1268 return VINF_SUCCESS;
1269 }
1270
1271 case IEMMODE_32BIT:
1272 case IEMMODE_64BIT:
1273 {
1274 IEM_MC_BEGIN(3, 1);
1275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1276 IEM_MC_ARG(uint16_t, u16Sel, 1);
1277 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279
1280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1281 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1282/** @todo testcase: make sure it's a 16-bit read. */
1283
1284 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1286 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1287
1288 IEM_MC_END();
1289 return VINF_SUCCESS;
1290 }
1291
1292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1293 }
1294 }
1295}
1296
1297
1298
1299/** Opcode 0x0f 0x02. */
1300FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1301{
1302 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1303 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1304}
1305
1306
1307/** Opcode 0x0f 0x03. */
1308FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1309{
1310 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1311 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1312}
1313
1314
1315/** Opcode 0x0f 0x05. */
1316FNIEMOP_DEF(iemOp_syscall)
1317{
1318 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1321}
1322
1323
1324/** Opcode 0x0f 0x06. */
1325FNIEMOP_DEF(iemOp_clts)
1326{
1327 IEMOP_MNEMONIC(clts, "clts");
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1330}
1331
1332
1333/** Opcode 0x0f 0x07. */
1334FNIEMOP_DEF(iemOp_sysret)
1335{
1336 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1339}
1340
1341
1342/** Opcode 0x0f 0x08. */
1343FNIEMOP_STUB(iemOp_invd);
1344// IEMOP_HLP_MIN_486();
1345
1346
1347/** Opcode 0x0f 0x09. */
1348FNIEMOP_DEF(iemOp_wbinvd)
1349{
1350 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1351 IEMOP_HLP_MIN_486();
1352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1353 IEM_MC_BEGIN(0, 0);
1354 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1355 IEM_MC_ADVANCE_RIP();
1356 IEM_MC_END();
1357 return VINF_SUCCESS; /* ignore for now */
1358}
1359
1360
1361/** Opcode 0x0f 0x0b. */
1362FNIEMOP_DEF(iemOp_ud2)
1363{
1364 IEMOP_MNEMONIC(ud2, "ud2");
1365 return IEMOP_RAISE_INVALID_OPCODE();
1366}
1367
1368/** Opcode 0x0f 0x0d. */
1369FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1370{
1371 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1372 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1373 {
1374 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1375 return IEMOP_RAISE_INVALID_OPCODE();
1376 }
1377
1378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1380 {
1381 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383 }
1384
1385 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1386 {
1387 case 2: /* Aliased to /0 for the time being. */
1388 case 4: /* Aliased to /0 for the time being. */
1389 case 5: /* Aliased to /0 for the time being. */
1390 case 6: /* Aliased to /0 for the time being. */
1391 case 7: /* Aliased to /0 for the time being. */
1392 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1393 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1394 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1396 }
1397
1398 IEM_MC_BEGIN(0, 1);
1399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1402 /* Currently a NOP. */
1403 NOREF(GCPtrEffSrc);
1404 IEM_MC_ADVANCE_RIP();
1405 IEM_MC_END();
1406 return VINF_SUCCESS;
1407}
1408
1409
1410/** Opcode 0x0f 0x0e. */
1411FNIEMOP_STUB(iemOp_femms);
1412
1413
1414/** Opcode 0x0f 0x0f 0x0c. */
1415FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1416
1417/** Opcode 0x0f 0x0f 0x0d. */
1418FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1419
1420/** Opcode 0x0f 0x0f 0x1c. */
1421FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1422
1423/** Opcode 0x0f 0x0f 0x1d. */
1424FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1425
1426/** Opcode 0x0f 0x0f 0x8a. */
1427FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1428
1429/** Opcode 0x0f 0x0f 0x8e. */
1430FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1431
1432/** Opcode 0x0f 0x0f 0x90. */
1433FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1434
1435/** Opcode 0x0f 0x0f 0x94. */
1436FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1437
1438/** Opcode 0x0f 0x0f 0x96. */
1439FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1440
1441/** Opcode 0x0f 0x0f 0x97. */
1442FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1443
1444/** Opcode 0x0f 0x0f 0x9a. */
1445FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1446
1447/** Opcode 0x0f 0x0f 0x9e. */
1448FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1449
1450/** Opcode 0x0f 0x0f 0xa0. */
1451FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1452
1453/** Opcode 0x0f 0x0f 0xa4. */
1454FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1455
1456/** Opcode 0x0f 0x0f 0xa6. */
1457FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1458
1459/** Opcode 0x0f 0x0f 0xa7. */
1460FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1461
1462/** Opcode 0x0f 0x0f 0xaa. */
1463FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1464
1465/** Opcode 0x0f 0x0f 0xae. */
1466FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1467
1468/** Opcode 0x0f 0x0f 0xb0. */
1469FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1470
1471/** Opcode 0x0f 0x0f 0xb4. */
1472FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1473
1474/** Opcode 0x0f 0x0f 0xb6. */
1475FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1476
1477/** Opcode 0x0f 0x0f 0xb7. */
1478FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1479
1480/** Opcode 0x0f 0x0f 0xbb. */
1481FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1482
1483/** Opcode 0x0f 0x0f 0xbf. */
1484FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1485
1486
1487/** Opcode 0x0f 0x0f. */
1488FNIEMOP_DEF(iemOp_3Dnow)
1489{
1490 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1491 {
1492 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1493 return IEMOP_RAISE_INVALID_OPCODE();
1494 }
1495
1496 /* This is pretty sparse, use switch instead of table. */
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 switch (b)
1499 {
1500 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1501 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1502 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1503 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1504 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1505 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1506 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1507 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1508 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1509 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1510 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1511 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1512 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1513 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1514 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1515 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1516 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1517 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1518 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1519 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1520 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1521 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1522 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1523 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1524 default:
1525 return IEMOP_RAISE_INVALID_OPCODE();
1526 }
1527}
1528
1529
1530/** Opcode 0x0f 0x10. */
1531FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1532
1533
1534/** Opcode 0x0f 0x11. */
1535FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1536{
1537 /* Quick hack. Need to restructure all of this later some time. */
1538 uint32_t const fRelevantPrefix = pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ);
1539 if (fRelevantPrefix == 0)
1540 {
1541 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1544 {
1545 /*
1546 * Register, register.
1547 */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_BEGIN(0, 0);
1550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1552 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1553 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 }
1557 else
1558 {
1559 /*
1560 * Memory, register.
1561 */
1562 IEM_MC_BEGIN(0, 2);
1563 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1565
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1568 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1570
1571 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1572 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1573
1574 IEM_MC_ADVANCE_RIP();
1575 IEM_MC_END();
1576 }
1577 }
1578 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ)
1579 {
1580 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1583 {
1584 /*
1585 * Register, register.
1586 */
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1588 IEM_MC_BEGIN(0, 1);
1589 IEM_MC_LOCAL(uint64_t, uSrc);
1590
1591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1593 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1594 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1595
1596 IEM_MC_ADVANCE_RIP();
1597 IEM_MC_END();
1598 }
1599 else
1600 {
1601 /*
1602 * Memory, register.
1603 */
1604 IEM_MC_BEGIN(0, 2);
1605 IEM_MC_LOCAL(uint64_t, uSrc);
1606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1607
1608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1610 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1611 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1612
1613 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 }
1620 else
1621 {
1622 IEMOP_BITCH_ABOUT_STUB();
1623 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1624 }
1625 return VINF_SUCCESS;
1626}
1627
1628
1629/** Opcode 0x0f 0x12. */
1630FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1631
1632
1633/** Opcode 0x0f 0x13. */
1634FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1635{
1636 /* Quick hack. Need to restructure all of this later some time. */
1637 if (pVCpu->iem.s.fPrefixes == IEM_OP_PRF_SIZE_OP)
1638 {
1639 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1642 {
1643#if 0
1644 /*
1645 * Register, register.
1646 */
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1648 IEM_MC_BEGIN(0, 1);
1649 IEM_MC_LOCAL(uint64_t, uSrc);
1650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1652 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1653 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1654 IEM_MC_ADVANCE_RIP();
1655 IEM_MC_END();
1656#else
1657 return IEMOP_RAISE_INVALID_OPCODE();
1658#endif
1659 }
1660 else
1661 {
1662 /*
1663 * Memory, register.
1664 */
1665 IEM_MC_BEGIN(0, 2);
1666 IEM_MC_LOCAL(uint64_t, uSrc);
1667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1668
1669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1670 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1671 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1673
1674 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1675 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1676
1677 IEM_MC_ADVANCE_RIP();
1678 IEM_MC_END();
1679 }
1680 return VINF_SUCCESS;
1681 }
1682
1683 IEMOP_BITCH_ABOUT_STUB();
1684 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1685}
1686
1687
1688/** Opcode 0x0f 0x14. */
1689FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1690/** Opcode 0x0f 0x15. */
1691FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1692/** Opcode 0x0f 0x16. */
1693FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1694/** Opcode 0x0f 0x17. */
1695FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1696
1697
1698/** Opcode 0x0f 0x18. */
1699FNIEMOP_DEF(iemOp_prefetch_Grp16)
1700{
1701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1702 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1703 {
1704 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1705 {
1706 case 4: /* Aliased to /0 for the time being according to AMD. */
1707 case 5: /* Aliased to /0 for the time being according to AMD. */
1708 case 6: /* Aliased to /0 for the time being according to AMD. */
1709 case 7: /* Aliased to /0 for the time being according to AMD. */
1710 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1711 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1712 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1713 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1715 }
1716
1717 IEM_MC_BEGIN(0, 1);
1718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1721 /* Currently a NOP. */
1722 NOREF(GCPtrEffSrc);
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 return VINF_SUCCESS;
1726 }
1727
1728 return IEMOP_RAISE_INVALID_OPCODE();
1729}
1730
1731
1732/** Opcode 0x0f 0x19..0x1f. */
1733FNIEMOP_DEF(iemOp_nop_Ev)
1734{
1735 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1738 {
1739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740 IEM_MC_BEGIN(0, 0);
1741 IEM_MC_ADVANCE_RIP();
1742 IEM_MC_END();
1743 }
1744 else
1745 {
1746 IEM_MC_BEGIN(0, 1);
1747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 /* Currently a NOP. */
1751 NOREF(GCPtrEffSrc);
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756}
1757
1758
1759/** Opcode 0x0f 0x20. */
1760FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1761{
1762 /* mod is ignored, as is operand size overrides. */
1763 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1764 IEMOP_HLP_MIN_386();
1765 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1766 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1767 else
1768 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1769
1770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1771 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1773 {
1774 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1776 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1777 iCrReg |= 8;
1778 }
1779 switch (iCrReg)
1780 {
1781 case 0: case 2: case 3: case 4: case 8:
1782 break;
1783 default:
1784 return IEMOP_RAISE_INVALID_OPCODE();
1785 }
1786 IEMOP_HLP_DONE_DECODING();
1787
1788 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1789}
1790
1791
1792/** Opcode 0x0f 0x21. */
1793FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1794{
1795 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1796 IEMOP_HLP_MIN_386();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1800 return IEMOP_RAISE_INVALID_OPCODE();
1801 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1802 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1803 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1804}
1805
1806
1807/** Opcode 0x0f 0x22. */
1808FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1809{
1810 /* mod is ignored, as is operand size overrides. */
1811 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1812 IEMOP_HLP_MIN_386();
1813 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1814 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1815 else
1816 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1817
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1821 {
1822 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1823 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1824 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1825 iCrReg |= 8;
1826 }
1827 switch (iCrReg)
1828 {
1829 case 0: case 2: case 3: case 4: case 8:
1830 break;
1831 default:
1832 return IEMOP_RAISE_INVALID_OPCODE();
1833 }
1834 IEMOP_HLP_DONE_DECODING();
1835
1836 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1837}
1838
1839
1840/** Opcode 0x0f 0x23. */
1841FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1842{
1843 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1844 IEMOP_HLP_MIN_386();
1845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1848 return IEMOP_RAISE_INVALID_OPCODE();
1849 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1850 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1851 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1852}
1853
1854
1855/** Opcode 0x0f 0x24. */
1856FNIEMOP_DEF(iemOp_mov_Rd_Td)
1857{
1858 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1859 /** @todo works on 386 and 486. */
1860 /* The RM byte is not considered, see testcase. */
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862}
1863
1864
1865/** Opcode 0x0f 0x26. */
1866FNIEMOP_DEF(iemOp_mov_Td_Rd)
1867{
1868 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1869 /** @todo works on 386 and 486. */
1870 /* The RM byte is not considered, see testcase. */
1871 return IEMOP_RAISE_INVALID_OPCODE();
1872}
1873
1874
1875/** Opcode 0x0f 0x28. */
1876FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1877{
1878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1879 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1880 else
1881 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1884 {
1885 /*
1886 * Register, register.
1887 */
1888 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1889 IEM_MC_BEGIN(0, 0);
1890 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1892 else
1893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1895 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1896 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1897 IEM_MC_ADVANCE_RIP();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 /*
1903 * Register, memory.
1904 */
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1911 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1912 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1913 else
1914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1916
1917 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1918 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1919
1920 IEM_MC_ADVANCE_RIP();
1921 IEM_MC_END();
1922 }
1923 return VINF_SUCCESS;
1924}
1925
1926
1927/** Opcode 0x0f 0x29. */
1928FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1929{
1930 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1931 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1932 else
1933 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1936 {
1937 /*
1938 * Register, register.
1939 */
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1941 IEM_MC_BEGIN(0, 0);
1942 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1944 else
1945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1946 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1947 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1948 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1949 IEM_MC_ADVANCE_RIP();
1950 IEM_MC_END();
1951 }
1952 else
1953 {
1954 /*
1955 * Memory, register.
1956 */
1957 IEM_MC_BEGIN(0, 2);
1958 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1960
1961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1964 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1965 else
1966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1968
1969 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1970 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1971
1972 IEM_MC_ADVANCE_RIP();
1973 IEM_MC_END();
1974 }
1975 return VINF_SUCCESS;
1976}
1977
1978
1979/** Opcode 0x0f 0x2a. */
1980FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1981
1982
1983/** Opcode 0x0f 0x2b. */
1984FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1985{
1986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1987 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1988 else
1989 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1991 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1992 {
1993 /*
1994 * memory, register.
1995 */
1996 IEM_MC_BEGIN(0, 2);
1997 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1999
2000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2002 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2003 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2004 else
2005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2007
2008 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 /* The register, register encoding is invalid. */
2015 else
2016 return IEMOP_RAISE_INVALID_OPCODE();
2017 return VINF_SUCCESS;
2018}
2019
2020
2021/** Opcode 0x0f 0x2c. */
2022FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
2023/** Opcode 0x0f 0x2d. */
2024FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
2025/** Opcode 0x0f 0x2e. */
2026FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
2027/** Opcode 0x0f 0x2f. */
2028FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
2029
2030
2031/** Opcode 0x0f 0x30. */
2032FNIEMOP_DEF(iemOp_wrmsr)
2033{
2034 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2037}
2038
2039
2040/** Opcode 0x0f 0x31. */
2041FNIEMOP_DEF(iemOp_rdtsc)
2042{
2043 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2046}
2047
2048
2049/** Opcode 0x0f 0x33. */
2050FNIEMOP_DEF(iemOp_rdmsr)
2051{
2052 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2055}
2056
2057
2058/** Opcode 0x0f 0x34. */
2059FNIEMOP_STUB(iemOp_rdpmc);
2060/** Opcode 0x0f 0x34. */
2061FNIEMOP_STUB(iemOp_sysenter);
2062/** Opcode 0x0f 0x35. */
2063FNIEMOP_STUB(iemOp_sysexit);
2064/** Opcode 0x0f 0x37. */
2065FNIEMOP_STUB(iemOp_getsec);
2066/** Opcode 0x0f 0x38. */
2067FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2068/** Opcode 0x0f 0x3a. */
2069FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2070
2071
2072/**
2073 * Implements a conditional move.
2074 *
2075 * Wish there was an obvious way to do this where we could share and reduce
2076 * code bloat.
2077 *
2078 * @param a_Cnd The conditional "microcode" operation.
2079 */
2080#define CMOV_X(a_Cnd) \
2081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2083 { \
2084 switch (pVCpu->iem.s.enmEffOpSize) \
2085 { \
2086 case IEMMODE_16BIT: \
2087 IEM_MC_BEGIN(0, 1); \
2088 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2089 a_Cnd { \
2090 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2091 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2092 } IEM_MC_ENDIF(); \
2093 IEM_MC_ADVANCE_RIP(); \
2094 IEM_MC_END(); \
2095 return VINF_SUCCESS; \
2096 \
2097 case IEMMODE_32BIT: \
2098 IEM_MC_BEGIN(0, 1); \
2099 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2100 a_Cnd { \
2101 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2102 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2103 } IEM_MC_ELSE() { \
2104 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2105 } IEM_MC_ENDIF(); \
2106 IEM_MC_ADVANCE_RIP(); \
2107 IEM_MC_END(); \
2108 return VINF_SUCCESS; \
2109 \
2110 case IEMMODE_64BIT: \
2111 IEM_MC_BEGIN(0, 1); \
2112 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2113 a_Cnd { \
2114 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2115 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2116 } IEM_MC_ENDIF(); \
2117 IEM_MC_ADVANCE_RIP(); \
2118 IEM_MC_END(); \
2119 return VINF_SUCCESS; \
2120 \
2121 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2122 } \
2123 } \
2124 else \
2125 { \
2126 switch (pVCpu->iem.s.enmEffOpSize) \
2127 { \
2128 case IEMMODE_16BIT: \
2129 IEM_MC_BEGIN(0, 2); \
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2131 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2133 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2134 a_Cnd { \
2135 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2136 } IEM_MC_ENDIF(); \
2137 IEM_MC_ADVANCE_RIP(); \
2138 IEM_MC_END(); \
2139 return VINF_SUCCESS; \
2140 \
2141 case IEMMODE_32BIT: \
2142 IEM_MC_BEGIN(0, 2); \
2143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2144 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2146 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2147 a_Cnd { \
2148 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2149 } IEM_MC_ELSE() { \
2150 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2151 } IEM_MC_ENDIF(); \
2152 IEM_MC_ADVANCE_RIP(); \
2153 IEM_MC_END(); \
2154 return VINF_SUCCESS; \
2155 \
2156 case IEMMODE_64BIT: \
2157 IEM_MC_BEGIN(0, 2); \
2158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2159 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2161 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2162 a_Cnd { \
2163 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2164 } IEM_MC_ENDIF(); \
2165 IEM_MC_ADVANCE_RIP(); \
2166 IEM_MC_END(); \
2167 return VINF_SUCCESS; \
2168 \
2169 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2170 } \
2171 } do {} while (0)
2172
2173
2174
2175/** Opcode 0x0f 0x40. */
2176FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2177{
2178 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2179 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2180}
2181
2182
2183/** Opcode 0x0f 0x41. */
2184FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2185{
2186 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2187 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2188}
2189
2190
2191/** Opcode 0x0f 0x42. */
2192FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2193{
2194 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2195 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2196}
2197
2198
2199/** Opcode 0x0f 0x43. */
2200FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2201{
2202 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2203 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2204}
2205
2206
2207/** Opcode 0x0f 0x44. */
2208FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2209{
2210 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2211 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2212}
2213
2214
2215/** Opcode 0x0f 0x45. */
2216FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2217{
2218 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2219 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2220}
2221
2222
2223/** Opcode 0x0f 0x46. */
2224FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2225{
2226 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2227 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2228}
2229
2230
2231/** Opcode 0x0f 0x47. */
2232FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2233{
2234 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2235 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2236}
2237
2238
2239/** Opcode 0x0f 0x48. */
2240FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2241{
2242 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2243 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2244}
2245
2246
2247/** Opcode 0x0f 0x49. */
2248FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2249{
2250 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2251 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2252}
2253
2254
2255/** Opcode 0x0f 0x4a. */
2256FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2257{
2258 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2259 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2260}
2261
2262
2263/** Opcode 0x0f 0x4b. */
2264FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2265{
2266 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2267 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2268}
2269
2270
2271/** Opcode 0x0f 0x4c. */
2272FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2273{
2274 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2275 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2276}
2277
2278
2279/** Opcode 0x0f 0x4d. */
2280FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2281{
2282 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2283 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2284}
2285
2286
2287/** Opcode 0x0f 0x4e. */
2288FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2289{
2290 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2291 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2292}
2293
2294
2295/** Opcode 0x0f 0x4f. */
2296FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2297{
2298 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2299 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2300}
2301
2302#undef CMOV_X
2303
2304/** Opcode 0x0f 0x50. */
2305FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2306/** Opcode 0x0f 0x51. */
2307FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2308/** Opcode 0x0f 0x52. */
2309FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2310/** Opcode 0x0f 0x53. */
2311FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2312/** Opcode 0x0f 0x54. */
2313FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2314/** Opcode 0x0f 0x55. */
2315FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2316/** Opcode 0x0f 0x56. */
2317FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2318/** Opcode 0x0f 0x57. */
2319FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2320/** Opcode 0x0f 0x58. */
2321FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2322/** Opcode 0x0f 0x59. */
2323FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2324/** Opcode 0x0f 0x5a. */
2325FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2326/** Opcode 0x0f 0x5b. */
2327FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2328/** Opcode 0x0f 0x5c. */
2329FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2330/** Opcode 0x0f 0x5d. */
2331FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2332/** Opcode 0x0f 0x5e. */
2333FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2334/** Opcode 0x0f 0x5f. */
2335FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2336
2337
2338/**
2339 * Common worker for SSE2 and MMX instructions on the forms:
2340 * pxxxx xmm1, xmm2/mem128
2341 * pxxxx mm1, mm2/mem32
2342 *
2343 * The 2nd operand is the first half of a register, which in the memory case
2344 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2345 * memory accessed for MMX.
2346 *
2347 * Exceptions type 4.
2348 */
2349FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2350{
2351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2352 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2353 {
2354 case IEM_OP_PRF_SIZE_OP: /* SSE */
2355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2356 {
2357 /*
2358 * Register, register.
2359 */
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_BEGIN(2, 0);
2362 IEM_MC_ARG(uint128_t *, pDst, 0);
2363 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_PREPARE_SSE_USAGE();
2366 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2367 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2368 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 else
2373 {
2374 /*
2375 * Register, memory.
2376 */
2377 IEM_MC_BEGIN(2, 2);
2378 IEM_MC_ARG(uint128_t *, pDst, 0);
2379 IEM_MC_LOCAL(uint64_t, uSrc);
2380 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2386 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2387
2388 IEM_MC_PREPARE_SSE_USAGE();
2389 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2390 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2391
2392 IEM_MC_ADVANCE_RIP();
2393 IEM_MC_END();
2394 }
2395 return VINF_SUCCESS;
2396
2397 case 0: /* MMX */
2398 if (!pImpl->pfnU64)
2399 return IEMOP_RAISE_INVALID_OPCODE();
2400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2401 {
2402 /*
2403 * Register, register.
2404 */
2405 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2406 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2408 IEM_MC_BEGIN(2, 0);
2409 IEM_MC_ARG(uint64_t *, pDst, 0);
2410 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2411 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2412 IEM_MC_PREPARE_FPU_USAGE();
2413 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2414 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2415 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2416 IEM_MC_ADVANCE_RIP();
2417 IEM_MC_END();
2418 }
2419 else
2420 {
2421 /*
2422 * Register, memory.
2423 */
2424 IEM_MC_BEGIN(2, 2);
2425 IEM_MC_ARG(uint64_t *, pDst, 0);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2429
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2432 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2433 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2434
2435 IEM_MC_PREPARE_FPU_USAGE();
2436 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2437 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2438
2439 IEM_MC_ADVANCE_RIP();
2440 IEM_MC_END();
2441 }
2442 return VINF_SUCCESS;
2443
2444 default:
2445 return IEMOP_RAISE_INVALID_OPCODE();
2446 }
2447}
2448
2449
2450/** Opcode 0x0f 0x60. */
2451FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2452{
2453 IEMOP_MNEMONIC(punpcklbw, "punpcklbw");
2454 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2455}
2456
2457
2458/** Opcode 0x0f 0x61. */
2459FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2460{
2461 IEMOP_MNEMONIC(punpcklwd, "punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2462 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2463}
2464
2465
2466/** Opcode 0x0f 0x62. */
2467FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2468{
2469 IEMOP_MNEMONIC(punpckldq, "punpckldq");
2470 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2471}
2472
2473
2474/** Opcode 0x0f 0x63. */
2475FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2476/** Opcode 0x0f 0x64. */
2477FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2478/** Opcode 0x0f 0x65. */
2479FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2480/** Opcode 0x0f 0x66. */
2481FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2482/** Opcode 0x0f 0x67. */
2483FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2484
2485
2486/**
2487 * Common worker for SSE2 and MMX instructions on the forms:
2488 * pxxxx xmm1, xmm2/mem128
2489 * pxxxx mm1, mm2/mem64
2490 *
2491 * The 2nd operand is the second half of a register, which in the memory case
2492 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2493 * where it may read the full 128 bits or only the upper 64 bits.
2494 *
2495 * Exceptions type 4.
2496 */
2497FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2498{
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2501 {
2502 case IEM_OP_PRF_SIZE_OP: /* SSE */
2503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2504 {
2505 /*
2506 * Register, register.
2507 */
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_BEGIN(2, 0);
2510 IEM_MC_ARG(uint128_t *, pDst, 0);
2511 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2512 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2513 IEM_MC_PREPARE_SSE_USAGE();
2514 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2515 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2516 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2517 IEM_MC_ADVANCE_RIP();
2518 IEM_MC_END();
2519 }
2520 else
2521 {
2522 /*
2523 * Register, memory.
2524 */
2525 IEM_MC_BEGIN(2, 2);
2526 IEM_MC_ARG(uint128_t *, pDst, 0);
2527 IEM_MC_LOCAL(uint128_t, uSrc);
2528 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2535
2536 IEM_MC_PREPARE_SSE_USAGE();
2537 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2538 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2539
2540 IEM_MC_ADVANCE_RIP();
2541 IEM_MC_END();
2542 }
2543 return VINF_SUCCESS;
2544
2545 case 0: /* MMX */
2546 if (!pImpl->pfnU64)
2547 return IEMOP_RAISE_INVALID_OPCODE();
2548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2549 {
2550 /*
2551 * Register, register.
2552 */
2553 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2554 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2556 IEM_MC_BEGIN(2, 0);
2557 IEM_MC_ARG(uint64_t *, pDst, 0);
2558 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2560 IEM_MC_PREPARE_FPU_USAGE();
2561 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2562 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2563 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2564 IEM_MC_ADVANCE_RIP();
2565 IEM_MC_END();
2566 }
2567 else
2568 {
2569 /*
2570 * Register, memory.
2571 */
2572 IEM_MC_BEGIN(2, 2);
2573 IEM_MC_ARG(uint64_t *, pDst, 0);
2574 IEM_MC_LOCAL(uint64_t, uSrc);
2575 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2577
2578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2582
2583 IEM_MC_PREPARE_FPU_USAGE();
2584 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2585 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2586
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 return VINF_SUCCESS;
2591
2592 default:
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594 }
2595}
2596
2597
2598/** Opcode 0x0f 0x68. */
2599FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2600{
2601 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2602 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2603}
2604
2605
2606/** Opcode 0x0f 0x69. */
2607FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2608{
2609 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2610 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2611}
2612
2613
2614/** Opcode 0x0f 0x6a. */
2615FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2616{
2617 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2618 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2619}
2620
2621/** Opcode 0x0f 0x6b. */
2622FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2623
2624
2625/** Opcode 0x0f 0x6c. */
2626FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2627{
2628 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq");
2629 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2630}
2631
2632
2633/** Opcode 0x0f 0x6d. */
2634FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2635{
2636 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2637 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2638}
2639
2640
2641/** Opcode 0x0f 0x6e. */
2642FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2646 {
2647 case IEM_OP_PRF_SIZE_OP: /* SSE */
2648 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2649 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2650 else
2651 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2653 {
2654 /* XMM, greg*/
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_BEGIN(0, 1);
2657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2660 {
2661 IEM_MC_LOCAL(uint64_t, u64Tmp);
2662 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2663 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2664 }
2665 else
2666 {
2667 IEM_MC_LOCAL(uint32_t, u32Tmp);
2668 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2669 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2670 }
2671 IEM_MC_ADVANCE_RIP();
2672 IEM_MC_END();
2673 }
2674 else
2675 {
2676 /* XMM, [mem] */
2677 IEM_MC_BEGIN(0, 2);
2678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2679 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2683 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2684 {
2685 IEM_MC_LOCAL(uint64_t, u64Tmp);
2686 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2687 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2688 }
2689 else
2690 {
2691 IEM_MC_LOCAL(uint32_t, u32Tmp);
2692 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2693 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2694 }
2695 IEM_MC_ADVANCE_RIP();
2696 IEM_MC_END();
2697 }
2698 return VINF_SUCCESS;
2699
2700 case 0: /* MMX */
2701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2702 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2703 else
2704 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2706 {
2707 /* MMX, greg */
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEM_MC_BEGIN(0, 1);
2710 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2712 IEM_MC_LOCAL(uint64_t, u64Tmp);
2713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2714 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2715 else
2716 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2717 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2718 IEM_MC_ADVANCE_RIP();
2719 IEM_MC_END();
2720 }
2721 else
2722 {
2723 /* MMX, [mem] */
2724 IEM_MC_BEGIN(0, 2);
2725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2726 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2730 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2731 {
2732 IEM_MC_LOCAL(uint64_t, u64Tmp);
2733 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2734 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2735 }
2736 else
2737 {
2738 IEM_MC_LOCAL(uint32_t, u32Tmp);
2739 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2740 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2741 }
2742 IEM_MC_ADVANCE_RIP();
2743 IEM_MC_END();
2744 }
2745 return VINF_SUCCESS;
2746
2747 default:
2748 return IEMOP_RAISE_INVALID_OPCODE();
2749 }
2750}
2751
2752
2753/** Opcode 0x0f 0x6f. */
2754FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2755{
2756 bool fAligned = false;
2757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2758 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2759 {
2760 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2761 fAligned = true;
2762 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2763 if (fAligned)
2764 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2765 else
2766 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2768 {
2769 /*
2770 * Register, register.
2771 */
2772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2773 IEM_MC_BEGIN(0, 0);
2774 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2775 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2776 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2777 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 else
2782 {
2783 /*
2784 * Register, memory.
2785 */
2786 IEM_MC_BEGIN(0, 2);
2787 IEM_MC_LOCAL(uint128_t, u128Tmp);
2788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2789
2790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2792 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 if (fAligned)
2795 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2796 else
2797 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2798 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2799
2800 IEM_MC_ADVANCE_RIP();
2801 IEM_MC_END();
2802 }
2803 return VINF_SUCCESS;
2804
2805 case 0: /* MMX */
2806 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2808 {
2809 /*
2810 * Register, register.
2811 */
2812 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2813 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEM_MC_BEGIN(0, 1);
2816 IEM_MC_LOCAL(uint64_t, u64Tmp);
2817 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2819 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2820 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2821 IEM_MC_ADVANCE_RIP();
2822 IEM_MC_END();
2823 }
2824 else
2825 {
2826 /*
2827 * Register, memory.
2828 */
2829 IEM_MC_BEGIN(0, 2);
2830 IEM_MC_LOCAL(uint64_t, u64Tmp);
2831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2832
2833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2835 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2836 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2837 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2838 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2839
2840 IEM_MC_ADVANCE_RIP();
2841 IEM_MC_END();
2842 }
2843 return VINF_SUCCESS;
2844
2845 default:
2846 return IEMOP_RAISE_INVALID_OPCODE();
2847 }
2848}
2849
2850
2851/** Opcode 0x0f 0x70. The immediate here is evil! */
2852FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2853{
2854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2855 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2856 {
2857 case IEM_OP_PRF_SIZE_OP: /* SSE */
2858 case IEM_OP_PRF_REPNZ: /* SSE */
2859 case IEM_OP_PRF_REPZ: /* SSE */
2860 {
2861 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2862 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2863 {
2864 case IEM_OP_PRF_SIZE_OP:
2865 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
2866 pfnAImpl = iemAImpl_pshufd;
2867 break;
2868 case IEM_OP_PRF_REPNZ:
2869 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
2870 pfnAImpl = iemAImpl_pshuflw;
2871 break;
2872 case IEM_OP_PRF_REPZ:
2873 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
2874 pfnAImpl = iemAImpl_pshufhw;
2875 break;
2876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2877 }
2878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2879 {
2880 /*
2881 * Register, register.
2882 */
2883 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2885
2886 IEM_MC_BEGIN(3, 0);
2887 IEM_MC_ARG(uint128_t *, pDst, 0);
2888 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2889 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2891 IEM_MC_PREPARE_SSE_USAGE();
2892 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2893 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2894 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2895 IEM_MC_ADVANCE_RIP();
2896 IEM_MC_END();
2897 }
2898 else
2899 {
2900 /*
2901 * Register, memory.
2902 */
2903 IEM_MC_BEGIN(3, 2);
2904 IEM_MC_ARG(uint128_t *, pDst, 0);
2905 IEM_MC_LOCAL(uint128_t, uSrc);
2906 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2908
2909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2910 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2911 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2914
2915 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2916 IEM_MC_PREPARE_SSE_USAGE();
2917 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2918 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2919
2920 IEM_MC_ADVANCE_RIP();
2921 IEM_MC_END();
2922 }
2923 return VINF_SUCCESS;
2924 }
2925
2926 case 0: /* MMX Extension */
2927 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2929 {
2930 /*
2931 * Register, register.
2932 */
2933 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935
2936 IEM_MC_BEGIN(3, 0);
2937 IEM_MC_ARG(uint64_t *, pDst, 0);
2938 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2939 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2940 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2941 IEM_MC_PREPARE_FPU_USAGE();
2942 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2943 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2944 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2945 IEM_MC_ADVANCE_RIP();
2946 IEM_MC_END();
2947 }
2948 else
2949 {
2950 /*
2951 * Register, memory.
2952 */
2953 IEM_MC_BEGIN(3, 2);
2954 IEM_MC_ARG(uint64_t *, pDst, 0);
2955 IEM_MC_LOCAL(uint64_t, uSrc);
2956 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2958
2959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2960 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2961 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2964
2965 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2966 IEM_MC_PREPARE_FPU_USAGE();
2967 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2968 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2969
2970 IEM_MC_ADVANCE_RIP();
2971 IEM_MC_END();
2972 }
2973 return VINF_SUCCESS;
2974
2975 default:
2976 return IEMOP_RAISE_INVALID_OPCODE();
2977 }
2978}
2979
2980
2981/** Opcode 0x0f 0x71 11/2. */
2982FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2983
2984/** Opcode 0x66 0x0f 0x71 11/2. */
2985FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2986
2987/** Opcode 0x0f 0x71 11/4. */
2988FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2989
2990/** Opcode 0x66 0x0f 0x71 11/4. */
2991FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2992
2993/** Opcode 0x0f 0x71 11/6. */
2994FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2995
2996/** Opcode 0x66 0x0f 0x71 11/6. */
2997FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2998
2999
3000/** Opcode 0x0f 0x71. */
3001FNIEMOP_DEF(iemOp_Grp12)
3002{
3003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3004 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3005 return IEMOP_RAISE_INVALID_OPCODE();
3006 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3007 {
3008 case 0: case 1: case 3: case 5: case 7:
3009 return IEMOP_RAISE_INVALID_OPCODE();
3010 case 2:
3011 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3012 {
3013 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3014 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3015 default: return IEMOP_RAISE_INVALID_OPCODE();
3016 }
3017 case 4:
3018 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3019 {
3020 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3021 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3022 default: return IEMOP_RAISE_INVALID_OPCODE();
3023 }
3024 case 6:
3025 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3026 {
3027 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3028 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3029 default: return IEMOP_RAISE_INVALID_OPCODE();
3030 }
3031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3032 }
3033}
3034
3035
3036/** Opcode 0x0f 0x72 11/2. */
3037FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3038
3039/** Opcode 0x66 0x0f 0x72 11/2. */
3040FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3041
3042/** Opcode 0x0f 0x72 11/4. */
3043FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3044
3045/** Opcode 0x66 0x0f 0x72 11/4. */
3046FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3047
3048/** Opcode 0x0f 0x72 11/6. */
3049FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3050
3051/** Opcode 0x66 0x0f 0x72 11/6. */
3052FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3053
3054
3055/** Opcode 0x0f 0x72. */
3056FNIEMOP_DEF(iemOp_Grp13)
3057{
3058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3059 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3060 return IEMOP_RAISE_INVALID_OPCODE();
3061 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3062 {
3063 case 0: case 1: case 3: case 5: case 7:
3064 return IEMOP_RAISE_INVALID_OPCODE();
3065 case 2:
3066 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3067 {
3068 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3069 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3070 default: return IEMOP_RAISE_INVALID_OPCODE();
3071 }
3072 case 4:
3073 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3074 {
3075 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3076 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3077 default: return IEMOP_RAISE_INVALID_OPCODE();
3078 }
3079 case 6:
3080 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3081 {
3082 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3083 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3084 default: return IEMOP_RAISE_INVALID_OPCODE();
3085 }
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088}
3089
3090
3091/** Opcode 0x0f 0x73 11/2. */
3092FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3093
3094/** Opcode 0x66 0x0f 0x73 11/2. */
3095FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3096
3097/** Opcode 0x66 0x0f 0x73 11/3. */
3098FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3099
3100/** Opcode 0x0f 0x73 11/6. */
3101FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3102
3103/** Opcode 0x66 0x0f 0x73 11/6. */
3104FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3105
3106/** Opcode 0x66 0x0f 0x73 11/7. */
3107FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3108
3109
3110/** Opcode 0x0f 0x73. */
3111FNIEMOP_DEF(iemOp_Grp14)
3112{
3113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3114 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3115 return IEMOP_RAISE_INVALID_OPCODE();
3116 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3117 {
3118 case 0: case 1: case 4: case 5:
3119 return IEMOP_RAISE_INVALID_OPCODE();
3120 case 2:
3121 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3122 {
3123 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3124 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3125 default: return IEMOP_RAISE_INVALID_OPCODE();
3126 }
3127 case 3:
3128 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3129 {
3130 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3131 default: return IEMOP_RAISE_INVALID_OPCODE();
3132 }
3133 case 6:
3134 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3135 {
3136 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3137 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3138 default: return IEMOP_RAISE_INVALID_OPCODE();
3139 }
3140 case 7:
3141 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3142 {
3143 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3144 default: return IEMOP_RAISE_INVALID_OPCODE();
3145 }
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148}
3149
3150
3151/**
3152 * Common worker for SSE2 and MMX instructions on the forms:
3153 * pxxx mm1, mm2/mem64
3154 * pxxx xmm1, xmm2/mem128
3155 *
3156 * Proper alignment of the 128-bit operand is enforced.
3157 * Exceptions type 4. SSE2 and MMX cpuid checks.
3158 */
3159FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3160{
3161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3162 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3163 {
3164 case IEM_OP_PRF_SIZE_OP: /* SSE */
3165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3166 {
3167 /*
3168 * Register, register.
3169 */
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171 IEM_MC_BEGIN(2, 0);
3172 IEM_MC_ARG(uint128_t *, pDst, 0);
3173 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3174 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3175 IEM_MC_PREPARE_SSE_USAGE();
3176 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3177 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3178 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3179 IEM_MC_ADVANCE_RIP();
3180 IEM_MC_END();
3181 }
3182 else
3183 {
3184 /*
3185 * Register, memory.
3186 */
3187 IEM_MC_BEGIN(2, 2);
3188 IEM_MC_ARG(uint128_t *, pDst, 0);
3189 IEM_MC_LOCAL(uint128_t, uSrc);
3190 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3192
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3196 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3197
3198 IEM_MC_PREPARE_SSE_USAGE();
3199 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3200 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3201
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 }
3205 return VINF_SUCCESS;
3206
3207 case 0: /* MMX */
3208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3209 {
3210 /*
3211 * Register, register.
3212 */
3213 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3214 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3216 IEM_MC_BEGIN(2, 0);
3217 IEM_MC_ARG(uint64_t *, pDst, 0);
3218 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3219 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3220 IEM_MC_PREPARE_FPU_USAGE();
3221 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3222 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3223 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3224 IEM_MC_ADVANCE_RIP();
3225 IEM_MC_END();
3226 }
3227 else
3228 {
3229 /*
3230 * Register, memory.
3231 */
3232 IEM_MC_BEGIN(2, 2);
3233 IEM_MC_ARG(uint64_t *, pDst, 0);
3234 IEM_MC_LOCAL(uint64_t, uSrc);
3235 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3241 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3242
3243 IEM_MC_PREPARE_FPU_USAGE();
3244 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3245 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3246
3247 IEM_MC_ADVANCE_RIP();
3248 IEM_MC_END();
3249 }
3250 return VINF_SUCCESS;
3251
3252 default:
3253 return IEMOP_RAISE_INVALID_OPCODE();
3254 }
3255}
3256
3257
3258/** Opcode 0x0f 0x74. */
3259FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3260{
3261 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3262 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3263}
3264
3265
3266/** Opcode 0x0f 0x75. */
3267FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3268{
3269 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3270 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3271}
3272
3273
3274/** Opcode 0x0f 0x76. */
3275FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3276{
3277 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3278 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3279}
3280
3281
3282/** Opcode 0x0f 0x77. */
3283FNIEMOP_STUB(iemOp_emms);
3284/** Opcode 0x0f 0x78. */
3285FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3286/** Opcode 0x0f 0x79. */
3287FNIEMOP_UD_STUB(iemOp_vmwrite);
3288/** Opcode 0x0f 0x7c. */
3289FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3290/** Opcode 0x0f 0x7d. */
3291FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3292
3293
3294/** Opcode 0x0f 0x7e. */
3295FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3296{
3297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3298 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3299 {
3300 case IEM_OP_PRF_SIZE_OP: /* SSE */
3301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3302 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3303 else
3304 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3306 {
3307 /* greg, XMM */
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 IEM_MC_BEGIN(0, 1);
3310 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3312 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3313 {
3314 IEM_MC_LOCAL(uint64_t, u64Tmp);
3315 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3316 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3317 }
3318 else
3319 {
3320 IEM_MC_LOCAL(uint32_t, u32Tmp);
3321 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3322 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3323 }
3324 IEM_MC_ADVANCE_RIP();
3325 IEM_MC_END();
3326 }
3327 else
3328 {
3329 /* [mem], XMM */
3330 IEM_MC_BEGIN(0, 2);
3331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3335 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3337 {
3338 IEM_MC_LOCAL(uint64_t, u64Tmp);
3339 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3340 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3341 }
3342 else
3343 {
3344 IEM_MC_LOCAL(uint32_t, u32Tmp);
3345 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3346 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3347 }
3348 IEM_MC_ADVANCE_RIP();
3349 IEM_MC_END();
3350 }
3351 return VINF_SUCCESS;
3352
3353 case 0: /* MMX */
3354 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3355 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3356 else
3357 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3358 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3359 {
3360 /* greg, MMX */
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_BEGIN(0, 1);
3363 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3365 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3366 {
3367 IEM_MC_LOCAL(uint64_t, u64Tmp);
3368 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3369 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3370 }
3371 else
3372 {
3373 IEM_MC_LOCAL(uint32_t, u32Tmp);
3374 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3375 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3376 }
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 }
3380 else
3381 {
3382 /* [mem], MMX */
3383 IEM_MC_BEGIN(0, 2);
3384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3389 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3390 {
3391 IEM_MC_LOCAL(uint64_t, u64Tmp);
3392 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3393 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3394 }
3395 else
3396 {
3397 IEM_MC_LOCAL(uint32_t, u32Tmp);
3398 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3399 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3400 }
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 return VINF_SUCCESS;
3405
3406 default:
3407 return IEMOP_RAISE_INVALID_OPCODE();
3408 }
3409}
3410
3411
3412/** Opcode 0x0f 0x7f. */
3413FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3414{
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 bool fAligned = false;
3417 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3418 {
3419 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3420 fAligned = true;
3421 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3422 if (fAligned)
3423 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3424 else
3425 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3427 {
3428 /*
3429 * Register, register.
3430 */
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3432 IEM_MC_BEGIN(0, 0);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3436 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3437 IEM_MC_ADVANCE_RIP();
3438 IEM_MC_END();
3439 }
3440 else
3441 {
3442 /*
3443 * Register, memory.
3444 */
3445 IEM_MC_BEGIN(0, 2);
3446 IEM_MC_LOCAL(uint128_t, u128Tmp);
3447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3453
3454 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3455 if (fAligned)
3456 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3457 else
3458 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3459
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 }
3463 return VINF_SUCCESS;
3464
3465 case 0: /* MMX */
3466 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3467
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /*
3471 * Register, register.
3472 */
3473 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3474 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3476 IEM_MC_BEGIN(0, 1);
3477 IEM_MC_LOCAL(uint64_t, u64Tmp);
3478 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3479 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3480 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3481 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /*
3488 * Register, memory.
3489 */
3490 IEM_MC_BEGIN(0, 2);
3491 IEM_MC_LOCAL(uint64_t, u64Tmp);
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3498
3499 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3500 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3501
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 return VINF_SUCCESS;
3506
3507 default:
3508 return IEMOP_RAISE_INVALID_OPCODE();
3509 }
3510}
3511
3512
3513
3514/** Opcode 0x0f 0x80. */
3515FNIEMOP_DEF(iemOp_jo_Jv)
3516{
3517 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3518 IEMOP_HLP_MIN_386();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3520 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3521 {
3522 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524
3525 IEM_MC_BEGIN(0, 0);
3526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3527 IEM_MC_REL_JMP_S16(i16Imm);
3528 } IEM_MC_ELSE() {
3529 IEM_MC_ADVANCE_RIP();
3530 } IEM_MC_ENDIF();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537
3538 IEM_MC_BEGIN(0, 0);
3539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3540 IEM_MC_REL_JMP_S32(i32Imm);
3541 } IEM_MC_ELSE() {
3542 IEM_MC_ADVANCE_RIP();
3543 } IEM_MC_ENDIF();
3544 IEM_MC_END();
3545 }
3546 return VINF_SUCCESS;
3547}
3548
3549
3550/** Opcode 0x0f 0x81. */
3551FNIEMOP_DEF(iemOp_jno_Jv)
3552{
3553 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3554 IEMOP_HLP_MIN_386();
3555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3556 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3557 {
3558 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3563 IEM_MC_ADVANCE_RIP();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S16(i16Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3576 IEM_MC_ADVANCE_RIP();
3577 } IEM_MC_ELSE() {
3578 IEM_MC_REL_JMP_S32(i32Imm);
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581 }
3582 return VINF_SUCCESS;
3583}
3584
3585
3586/** Opcode 0x0f 0x82. */
3587FNIEMOP_DEF(iemOp_jc_Jv)
3588{
3589 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3590 IEMOP_HLP_MIN_386();
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3592 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3593 {
3594 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596
3597 IEM_MC_BEGIN(0, 0);
3598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3599 IEM_MC_REL_JMP_S16(i16Imm);
3600 } IEM_MC_ELSE() {
3601 IEM_MC_ADVANCE_RIP();
3602 } IEM_MC_ENDIF();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3609
3610 IEM_MC_BEGIN(0, 0);
3611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3612 IEM_MC_REL_JMP_S32(i32Imm);
3613 } IEM_MC_ELSE() {
3614 IEM_MC_ADVANCE_RIP();
3615 } IEM_MC_ENDIF();
3616 IEM_MC_END();
3617 }
3618 return VINF_SUCCESS;
3619}
3620
3621
3622/** Opcode 0x0f 0x83. */
3623FNIEMOP_DEF(iemOp_jnc_Jv)
3624{
3625 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3626 IEMOP_HLP_MIN_386();
3627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3628 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3629 {
3630 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0);
3634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3635 IEM_MC_ADVANCE_RIP();
3636 } IEM_MC_ELSE() {
3637 IEM_MC_REL_JMP_S16(i16Imm);
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640 }
3641 else
3642 {
3643 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645
3646 IEM_MC_BEGIN(0, 0);
3647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3648 IEM_MC_ADVANCE_RIP();
3649 } IEM_MC_ELSE() {
3650 IEM_MC_REL_JMP_S32(i32Imm);
3651 } IEM_MC_ENDIF();
3652 IEM_MC_END();
3653 }
3654 return VINF_SUCCESS;
3655}
3656
3657
3658/** Opcode 0x0f 0x84. */
3659FNIEMOP_DEF(iemOp_je_Jv)
3660{
3661 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3662 IEMOP_HLP_MIN_386();
3663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3664 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3665 {
3666 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3668
3669 IEM_MC_BEGIN(0, 0);
3670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3671 IEM_MC_REL_JMP_S16(i16Imm);
3672 } IEM_MC_ELSE() {
3673 IEM_MC_ADVANCE_RIP();
3674 } IEM_MC_ENDIF();
3675 IEM_MC_END();
3676 }
3677 else
3678 {
3679 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681
3682 IEM_MC_BEGIN(0, 0);
3683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3684 IEM_MC_REL_JMP_S32(i32Imm);
3685 } IEM_MC_ELSE() {
3686 IEM_MC_ADVANCE_RIP();
3687 } IEM_MC_ENDIF();
3688 IEM_MC_END();
3689 }
3690 return VINF_SUCCESS;
3691}
3692
3693
3694/** Opcode 0x0f 0x85. */
3695FNIEMOP_DEF(iemOp_jne_Jv)
3696{
3697 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3698 IEMOP_HLP_MIN_386();
3699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3700 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3701 {
3702 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3704
3705 IEM_MC_BEGIN(0, 0);
3706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3707 IEM_MC_ADVANCE_RIP();
3708 } IEM_MC_ELSE() {
3709 IEM_MC_REL_JMP_S16(i16Imm);
3710 } IEM_MC_ENDIF();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3720 IEM_MC_ADVANCE_RIP();
3721 } IEM_MC_ELSE() {
3722 IEM_MC_REL_JMP_S32(i32Imm);
3723 } IEM_MC_ENDIF();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729
3730/** Opcode 0x0f 0x86. */
3731FNIEMOP_DEF(iemOp_jbe_Jv)
3732{
3733 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3734 IEMOP_HLP_MIN_386();
3735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3737 {
3738 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740
3741 IEM_MC_BEGIN(0, 0);
3742 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3743 IEM_MC_REL_JMP_S16(i16Imm);
3744 } IEM_MC_ELSE() {
3745 IEM_MC_ADVANCE_RIP();
3746 } IEM_MC_ENDIF();
3747 IEM_MC_END();
3748 }
3749 else
3750 {
3751 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0);
3755 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3756 IEM_MC_REL_JMP_S32(i32Imm);
3757 } IEM_MC_ELSE() {
3758 IEM_MC_ADVANCE_RIP();
3759 } IEM_MC_ENDIF();
3760 IEM_MC_END();
3761 }
3762 return VINF_SUCCESS;
3763}
3764
3765
3766/** Opcode 0x0f 0x87. */
3767FNIEMOP_DEF(iemOp_jnbe_Jv)
3768{
3769 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3770 IEMOP_HLP_MIN_386();
3771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3772 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3773 {
3774 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776
3777 IEM_MC_BEGIN(0, 0);
3778 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3779 IEM_MC_ADVANCE_RIP();
3780 } IEM_MC_ELSE() {
3781 IEM_MC_REL_JMP_S16(i16Imm);
3782 } IEM_MC_ENDIF();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789
3790 IEM_MC_BEGIN(0, 0);
3791 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3792 IEM_MC_ADVANCE_RIP();
3793 } IEM_MC_ELSE() {
3794 IEM_MC_REL_JMP_S32(i32Imm);
3795 } IEM_MC_ENDIF();
3796 IEM_MC_END();
3797 }
3798 return VINF_SUCCESS;
3799}
3800
3801
3802/** Opcode 0x0f 0x88. */
3803FNIEMOP_DEF(iemOp_js_Jv)
3804{
3805 IEMOP_MNEMONIC(js_Jv, "js Jv");
3806 IEMOP_HLP_MIN_386();
3807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3809 {
3810 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3812
3813 IEM_MC_BEGIN(0, 0);
3814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3815 IEM_MC_REL_JMP_S16(i16Imm);
3816 } IEM_MC_ELSE() {
3817 IEM_MC_ADVANCE_RIP();
3818 } IEM_MC_ENDIF();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825
3826 IEM_MC_BEGIN(0, 0);
3827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3828 IEM_MC_REL_JMP_S32(i32Imm);
3829 } IEM_MC_ELSE() {
3830 IEM_MC_ADVANCE_RIP();
3831 } IEM_MC_ENDIF();
3832 IEM_MC_END();
3833 }
3834 return VINF_SUCCESS;
3835}
3836
3837
3838/** Opcode 0x0f 0x89. */
3839FNIEMOP_DEF(iemOp_jns_Jv)
3840{
3841 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3842 IEMOP_HLP_MIN_386();
3843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3844 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3845 {
3846 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3848
3849 IEM_MC_BEGIN(0, 0);
3850 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3851 IEM_MC_ADVANCE_RIP();
3852 } IEM_MC_ELSE() {
3853 IEM_MC_REL_JMP_S16(i16Imm);
3854 } IEM_MC_ENDIF();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861
3862 IEM_MC_BEGIN(0, 0);
3863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3864 IEM_MC_ADVANCE_RIP();
3865 } IEM_MC_ELSE() {
3866 IEM_MC_REL_JMP_S32(i32Imm);
3867 } IEM_MC_ENDIF();
3868 IEM_MC_END();
3869 }
3870 return VINF_SUCCESS;
3871}
3872
3873
3874/** Opcode 0x0f 0x8a. */
3875FNIEMOP_DEF(iemOp_jp_Jv)
3876{
3877 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3878 IEMOP_HLP_MIN_386();
3879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3881 {
3882 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3884
3885 IEM_MC_BEGIN(0, 0);
3886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3887 IEM_MC_REL_JMP_S16(i16Imm);
3888 } IEM_MC_ELSE() {
3889 IEM_MC_ADVANCE_RIP();
3890 } IEM_MC_ENDIF();
3891 IEM_MC_END();
3892 }
3893 else
3894 {
3895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3897
3898 IEM_MC_BEGIN(0, 0);
3899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3900 IEM_MC_REL_JMP_S32(i32Imm);
3901 } IEM_MC_ELSE() {
3902 IEM_MC_ADVANCE_RIP();
3903 } IEM_MC_ENDIF();
3904 IEM_MC_END();
3905 }
3906 return VINF_SUCCESS;
3907}
3908
3909
3910/** Opcode 0x0f 0x8b. */
3911FNIEMOP_DEF(iemOp_jnp_Jv)
3912{
3913 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3914 IEMOP_HLP_MIN_386();
3915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3916 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3917 {
3918 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920
3921 IEM_MC_BEGIN(0, 0);
3922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3923 IEM_MC_ADVANCE_RIP();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_REL_JMP_S16(i16Imm);
3926 } IEM_MC_ENDIF();
3927 IEM_MC_END();
3928 }
3929 else
3930 {
3931 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0);
3935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3936 IEM_MC_ADVANCE_RIP();
3937 } IEM_MC_ELSE() {
3938 IEM_MC_REL_JMP_S32(i32Imm);
3939 } IEM_MC_ENDIF();
3940 IEM_MC_END();
3941 }
3942 return VINF_SUCCESS;
3943}
3944
3945
3946/** Opcode 0x0f 0x8c. */
3947FNIEMOP_DEF(iemOp_jl_Jv)
3948{
3949 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3950 IEMOP_HLP_MIN_386();
3951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3952 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3953 {
3954 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3956
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3959 IEM_MC_REL_JMP_S16(i16Imm);
3960 } IEM_MC_ELSE() {
3961 IEM_MC_ADVANCE_RIP();
3962 } IEM_MC_ENDIF();
3963 IEM_MC_END();
3964 }
3965 else
3966 {
3967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3969
3970 IEM_MC_BEGIN(0, 0);
3971 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3972 IEM_MC_REL_JMP_S32(i32Imm);
3973 } IEM_MC_ELSE() {
3974 IEM_MC_ADVANCE_RIP();
3975 } IEM_MC_ENDIF();
3976 IEM_MC_END();
3977 }
3978 return VINF_SUCCESS;
3979}
3980
3981
3982/** Opcode 0x0f 0x8d. */
3983FNIEMOP_DEF(iemOp_jnl_Jv)
3984{
3985 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3986 IEMOP_HLP_MIN_386();
3987 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3988 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3989 {
3990 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3992
3993 IEM_MC_BEGIN(0, 0);
3994 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3995 IEM_MC_ADVANCE_RIP();
3996 } IEM_MC_ELSE() {
3997 IEM_MC_REL_JMP_S16(i16Imm);
3998 } IEM_MC_ENDIF();
3999 IEM_MC_END();
4000 }
4001 else
4002 {
4003 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005
4006 IEM_MC_BEGIN(0, 0);
4007 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4008 IEM_MC_ADVANCE_RIP();
4009 } IEM_MC_ELSE() {
4010 IEM_MC_REL_JMP_S32(i32Imm);
4011 } IEM_MC_ENDIF();
4012 IEM_MC_END();
4013 }
4014 return VINF_SUCCESS;
4015}
4016
4017
4018/** Opcode 0x0f 0x8e. */
4019FNIEMOP_DEF(iemOp_jle_Jv)
4020{
4021 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4022 IEMOP_HLP_MIN_386();
4023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4024 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4025 {
4026 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4028
4029 IEM_MC_BEGIN(0, 0);
4030 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4031 IEM_MC_REL_JMP_S16(i16Imm);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_ADVANCE_RIP();
4034 } IEM_MC_ENDIF();
4035 IEM_MC_END();
4036 }
4037 else
4038 {
4039 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4041
4042 IEM_MC_BEGIN(0, 0);
4043 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4044 IEM_MC_REL_JMP_S32(i32Imm);
4045 } IEM_MC_ELSE() {
4046 IEM_MC_ADVANCE_RIP();
4047 } IEM_MC_ENDIF();
4048 IEM_MC_END();
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0x0f 0x8f. */
4055FNIEMOP_DEF(iemOp_jnle_Jv)
4056{
4057 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4058 IEMOP_HLP_MIN_386();
4059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4061 {
4062 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4064
4065 IEM_MC_BEGIN(0, 0);
4066 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4067 IEM_MC_ADVANCE_RIP();
4068 } IEM_MC_ELSE() {
4069 IEM_MC_REL_JMP_S16(i16Imm);
4070 } IEM_MC_ENDIF();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077
4078 IEM_MC_BEGIN(0, 0);
4079 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4080 IEM_MC_ADVANCE_RIP();
4081 } IEM_MC_ELSE() {
4082 IEM_MC_REL_JMP_S32(i32Imm);
4083 } IEM_MC_ENDIF();
4084 IEM_MC_END();
4085 }
4086 return VINF_SUCCESS;
4087}
4088
4089
4090/** Opcode 0x0f 0x90. */
4091FNIEMOP_DEF(iemOp_seto_Eb)
4092{
4093 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4094 IEMOP_HLP_MIN_386();
4095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4096
4097 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4098 * any way. AMD says it's "unused", whatever that means. We're
4099 * ignoring for now. */
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 {
4102 /* register target */
4103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4104 IEM_MC_BEGIN(0, 0);
4105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4107 } IEM_MC_ELSE() {
4108 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4109 } IEM_MC_ENDIF();
4110 IEM_MC_ADVANCE_RIP();
4111 IEM_MC_END();
4112 }
4113 else
4114 {
4115 /* memory target */
4116 IEM_MC_BEGIN(0, 1);
4117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4122 } IEM_MC_ELSE() {
4123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4124 } IEM_MC_ENDIF();
4125 IEM_MC_ADVANCE_RIP();
4126 IEM_MC_END();
4127 }
4128 return VINF_SUCCESS;
4129}
4130
4131
4132/** Opcode 0x0f 0x91. */
4133FNIEMOP_DEF(iemOp_setno_Eb)
4134{
4135 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4136 IEMOP_HLP_MIN_386();
4137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4138
4139 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4140 * any way. AMD says it's "unused", whatever that means. We're
4141 * ignoring for now. */
4142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4143 {
4144 /* register target */
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146 IEM_MC_BEGIN(0, 0);
4147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4149 } IEM_MC_ELSE() {
4150 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4151 } IEM_MC_ENDIF();
4152 IEM_MC_ADVANCE_RIP();
4153 IEM_MC_END();
4154 }
4155 else
4156 {
4157 /* memory target */
4158 IEM_MC_BEGIN(0, 1);
4159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4164 } IEM_MC_ELSE() {
4165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4166 } IEM_MC_ENDIF();
4167 IEM_MC_ADVANCE_RIP();
4168 IEM_MC_END();
4169 }
4170 return VINF_SUCCESS;
4171}
4172
4173
4174/** Opcode 0x0f 0x92. */
4175FNIEMOP_DEF(iemOp_setc_Eb)
4176{
4177 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4178 IEMOP_HLP_MIN_386();
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180
4181 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4182 * any way. AMD says it's "unused", whatever that means. We're
4183 * ignoring for now. */
4184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4185 {
4186 /* register target */
4187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4188 IEM_MC_BEGIN(0, 0);
4189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4190 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4191 } IEM_MC_ELSE() {
4192 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4193 } IEM_MC_ENDIF();
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 }
4197 else
4198 {
4199 /* memory target */
4200 IEM_MC_BEGIN(0, 1);
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4206 } IEM_MC_ELSE() {
4207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x93. */
4217FNIEMOP_DEF(iemOp_setnc_Eb)
4218{
4219 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4220 IEMOP_HLP_MIN_386();
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222
4223 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4224 * any way. AMD says it's "unused", whatever that means. We're
4225 * ignoring for now. */
4226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4227 {
4228 /* register target */
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4230 IEM_MC_BEGIN(0, 0);
4231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4232 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4233 } IEM_MC_ELSE() {
4234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4235 } IEM_MC_ENDIF();
4236 IEM_MC_ADVANCE_RIP();
4237 IEM_MC_END();
4238 }
4239 else
4240 {
4241 /* memory target */
4242 IEM_MC_BEGIN(0, 1);
4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4247 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4248 } IEM_MC_ELSE() {
4249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4250 } IEM_MC_ENDIF();
4251 IEM_MC_ADVANCE_RIP();
4252 IEM_MC_END();
4253 }
4254 return VINF_SUCCESS;
4255}
4256
4257
4258/** Opcode 0x0f 0x94. */
4259FNIEMOP_DEF(iemOp_sete_Eb)
4260{
4261 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4262 IEMOP_HLP_MIN_386();
4263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4264
4265 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4266 * any way. AMD says it's "unused", whatever that means. We're
4267 * ignoring for now. */
4268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4269 {
4270 /* register target */
4271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4272 IEM_MC_BEGIN(0, 0);
4273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4274 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4275 } IEM_MC_ELSE() {
4276 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4277 } IEM_MC_ENDIF();
4278 IEM_MC_ADVANCE_RIP();
4279 IEM_MC_END();
4280 }
4281 else
4282 {
4283 /* memory target */
4284 IEM_MC_BEGIN(0, 1);
4285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4290 } IEM_MC_ELSE() {
4291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4292 } IEM_MC_ENDIF();
4293 IEM_MC_ADVANCE_RIP();
4294 IEM_MC_END();
4295 }
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** Opcode 0x0f 0x95. */
4301FNIEMOP_DEF(iemOp_setne_Eb)
4302{
4303 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4304 IEMOP_HLP_MIN_386();
4305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4306
4307 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4308 * any way. AMD says it's "unused", whatever that means. We're
4309 * ignoring for now. */
4310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4311 {
4312 /* register target */
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314 IEM_MC_BEGIN(0, 0);
4315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 /* memory target */
4326 IEM_MC_BEGIN(0, 1);
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4332 } IEM_MC_ELSE() {
4333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4334 } IEM_MC_ENDIF();
4335 IEM_MC_ADVANCE_RIP();
4336 IEM_MC_END();
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0x0f 0x96. */
4343FNIEMOP_DEF(iemOp_setbe_Eb)
4344{
4345 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4346 IEMOP_HLP_MIN_386();
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348
4349 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4350 * any way. AMD says it's "unused", whatever that means. We're
4351 * ignoring for now. */
4352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4353 {
4354 /* register target */
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356 IEM_MC_BEGIN(0, 0);
4357 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4358 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4359 } IEM_MC_ELSE() {
4360 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4361 } IEM_MC_ENDIF();
4362 IEM_MC_ADVANCE_RIP();
4363 IEM_MC_END();
4364 }
4365 else
4366 {
4367 /* memory target */
4368 IEM_MC_BEGIN(0, 1);
4369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4373 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4374 } IEM_MC_ELSE() {
4375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_ADVANCE_RIP();
4378 IEM_MC_END();
4379 }
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/** Opcode 0x0f 0x97. */
4385FNIEMOP_DEF(iemOp_setnbe_Eb)
4386{
4387 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4388 IEMOP_HLP_MIN_386();
4389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4390
4391 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4392 * any way. AMD says it's "unused", whatever that means. We're
4393 * ignoring for now. */
4394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4395 {
4396 /* register target */
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_BEGIN(0, 0);
4399 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4400 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4401 } IEM_MC_ELSE() {
4402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4403 } IEM_MC_ENDIF();
4404 IEM_MC_ADVANCE_RIP();
4405 IEM_MC_END();
4406 }
4407 else
4408 {
4409 /* memory target */
4410 IEM_MC_BEGIN(0, 1);
4411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4414 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4416 } IEM_MC_ELSE() {
4417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4418 } IEM_MC_ENDIF();
4419 IEM_MC_ADVANCE_RIP();
4420 IEM_MC_END();
4421 }
4422 return VINF_SUCCESS;
4423}
4424
4425
4426/** Opcode 0x0f 0x98. */
4427FNIEMOP_DEF(iemOp_sets_Eb)
4428{
4429 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4430 IEMOP_HLP_MIN_386();
4431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4432
4433 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4434 * any way. AMD says it's "unused", whatever that means. We're
4435 * ignoring for now. */
4436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4437 {
4438 /* register target */
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4440 IEM_MC_BEGIN(0, 0);
4441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4442 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4443 } IEM_MC_ELSE() {
4444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4445 } IEM_MC_ENDIF();
4446 IEM_MC_ADVANCE_RIP();
4447 IEM_MC_END();
4448 }
4449 else
4450 {
4451 /* memory target */
4452 IEM_MC_BEGIN(0, 1);
4453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4458 } IEM_MC_ELSE() {
4459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4460 } IEM_MC_ENDIF();
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x99. */
4469FNIEMOP_DEF(iemOp_setns_Eb)
4470{
4471 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4472 IEMOP_HLP_MIN_386();
4473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4474
4475 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4476 * any way. AMD says it's "unused", whatever that means. We're
4477 * ignoring for now. */
4478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4479 {
4480 /* register target */
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482 IEM_MC_BEGIN(0, 0);
4483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4484 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4485 } IEM_MC_ELSE() {
4486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4487 } IEM_MC_ENDIF();
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 }
4491 else
4492 {
4493 /* memory target */
4494 IEM_MC_BEGIN(0, 1);
4495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4500 } IEM_MC_ELSE() {
4501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4502 } IEM_MC_ENDIF();
4503 IEM_MC_ADVANCE_RIP();
4504 IEM_MC_END();
4505 }
4506 return VINF_SUCCESS;
4507}
4508
4509
4510/** Opcode 0x0f 0x9a. */
4511FNIEMOP_DEF(iemOp_setp_Eb)
4512{
4513 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4514 IEMOP_HLP_MIN_386();
4515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4516
4517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4518 * any way. AMD says it's "unused", whatever that means. We're
4519 * ignoring for now. */
4520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4521 {
4522 /* register target */
4523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4524 IEM_MC_BEGIN(0, 0);
4525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4526 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4527 } IEM_MC_ELSE() {
4528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4529 } IEM_MC_ENDIF();
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 else
4534 {
4535 /* memory target */
4536 IEM_MC_BEGIN(0, 1);
4537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4542 } IEM_MC_ELSE() {
4543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4544 } IEM_MC_ENDIF();
4545 IEM_MC_ADVANCE_RIP();
4546 IEM_MC_END();
4547 }
4548 return VINF_SUCCESS;
4549}
4550
4551
4552/** Opcode 0x0f 0x9b. */
4553FNIEMOP_DEF(iemOp_setnp_Eb)
4554{
4555 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4556 IEMOP_HLP_MIN_386();
4557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4558
4559 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4560 * any way. AMD says it's "unused", whatever that means. We're
4561 * ignoring for now. */
4562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4563 {
4564 /* register target */
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566 IEM_MC_BEGIN(0, 0);
4567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4568 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4569 } IEM_MC_ELSE() {
4570 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4571 } IEM_MC_ENDIF();
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 }
4575 else
4576 {
4577 /* memory target */
4578 IEM_MC_BEGIN(0, 1);
4579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4583 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4584 } IEM_MC_ELSE() {
4585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4586 } IEM_MC_ENDIF();
4587 IEM_MC_ADVANCE_RIP();
4588 IEM_MC_END();
4589 }
4590 return VINF_SUCCESS;
4591}
4592
4593
4594/** Opcode 0x0f 0x9c. */
4595FNIEMOP_DEF(iemOp_setl_Eb)
4596{
4597 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4598 IEMOP_HLP_MIN_386();
4599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4600
4601 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4602 * any way. AMD says it's "unused", whatever that means. We're
4603 * ignoring for now. */
4604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4605 {
4606 /* register target */
4607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4608 IEM_MC_BEGIN(0, 0);
4609 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4610 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4611 } IEM_MC_ELSE() {
4612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4613 } IEM_MC_ENDIF();
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 }
4617 else
4618 {
4619 /* memory target */
4620 IEM_MC_BEGIN(0, 1);
4621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4625 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4626 } IEM_MC_ELSE() {
4627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4628 } IEM_MC_ENDIF();
4629 IEM_MC_ADVANCE_RIP();
4630 IEM_MC_END();
4631 }
4632 return VINF_SUCCESS;
4633}
4634
4635
4636/** Opcode 0x0f 0x9d. */
4637FNIEMOP_DEF(iemOp_setnl_Eb)
4638{
4639 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4640 IEMOP_HLP_MIN_386();
4641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4642
4643 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4644 * any way. AMD says it's "unused", whatever that means. We're
4645 * ignoring for now. */
4646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4647 {
4648 /* register target */
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_BEGIN(0, 0);
4651 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4652 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4653 } IEM_MC_ELSE() {
4654 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4655 } IEM_MC_ENDIF();
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 else
4660 {
4661 /* memory target */
4662 IEM_MC_BEGIN(0, 1);
4663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4666 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4667 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4668 } IEM_MC_ELSE() {
4669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4670 } IEM_MC_ENDIF();
4671 IEM_MC_ADVANCE_RIP();
4672 IEM_MC_END();
4673 }
4674 return VINF_SUCCESS;
4675}
4676
4677
4678/** Opcode 0x0f 0x9e. */
4679FNIEMOP_DEF(iemOp_setle_Eb)
4680{
4681 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4682 IEMOP_HLP_MIN_386();
4683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4684
4685 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4686 * any way. AMD says it's "unused", whatever that means. We're
4687 * ignoring for now. */
4688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4689 {
4690 /* register target */
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692 IEM_MC_BEGIN(0, 0);
4693 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4694 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4695 } IEM_MC_ELSE() {
4696 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4697 } IEM_MC_ENDIF();
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 /* memory target */
4704 IEM_MC_BEGIN(0, 1);
4705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4708 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4709 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4710 } IEM_MC_ELSE() {
4711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4712 } IEM_MC_ENDIF();
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 }
4716 return VINF_SUCCESS;
4717}
4718
4719
4720/** Opcode 0x0f 0x9f. */
4721FNIEMOP_DEF(iemOp_setnle_Eb)
4722{
4723 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4724 IEMOP_HLP_MIN_386();
4725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4726
4727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4728 * any way. AMD says it's "unused", whatever that means. We're
4729 * ignoring for now. */
4730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4731 {
4732 /* register target */
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4736 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4737 } IEM_MC_ELSE() {
4738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4739 } IEM_MC_ENDIF();
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 else
4744 {
4745 /* memory target */
4746 IEM_MC_BEGIN(0, 1);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4751 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4752 } IEM_MC_ELSE() {
4753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4754 } IEM_MC_ENDIF();
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 }
4758 return VINF_SUCCESS;
4759}
4760
4761
4762/**
4763 * Common 'push segment-register' helper.
4764 */
4765FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4766{
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768 if (iReg < X86_SREG_FS)
4769 IEMOP_HLP_NO_64BIT();
4770 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4771
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0, 1);
4776 IEM_MC_LOCAL(uint16_t, u16Value);
4777 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4778 IEM_MC_PUSH_U16(u16Value);
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 break;
4782
4783 case IEMMODE_32BIT:
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(uint32_t, u32Value);
4786 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4787 IEM_MC_PUSH_U32_SREG(u32Value);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 break;
4791
4792 case IEMMODE_64BIT:
4793 IEM_MC_BEGIN(0, 1);
4794 IEM_MC_LOCAL(uint64_t, u64Value);
4795 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4796 IEM_MC_PUSH_U64(u64Value);
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 break;
4800 }
4801
4802 return VINF_SUCCESS;
4803}
4804
4805
4806/** Opcode 0x0f 0xa0. */
4807FNIEMOP_DEF(iemOp_push_fs)
4808{
4809 IEMOP_MNEMONIC(push_fs, "push fs");
4810 IEMOP_HLP_MIN_386();
4811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4812 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4813}
4814
4815
4816/** Opcode 0x0f 0xa1. */
4817FNIEMOP_DEF(iemOp_pop_fs)
4818{
4819 IEMOP_MNEMONIC(pop_fs, "pop fs");
4820 IEMOP_HLP_MIN_386();
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4823}
4824
4825
4826/** Opcode 0x0f 0xa2. */
4827FNIEMOP_DEF(iemOp_cpuid)
4828{
4829 IEMOP_MNEMONIC(cpuid, "cpuid");
4830 IEMOP_HLP_MIN_486(); /* not all 486es. */
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4833}
4834
4835
4836/**
4837 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4838 * iemOp_bts_Ev_Gv.
4839 */
4840FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4841{
4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4843 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4844
4845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4846 {
4847 /* register destination. */
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849 switch (pVCpu->iem.s.enmEffOpSize)
4850 {
4851 case IEMMODE_16BIT:
4852 IEM_MC_BEGIN(3, 0);
4853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4854 IEM_MC_ARG(uint16_t, u16Src, 1);
4855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4856
4857 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4858 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4859 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4860 IEM_MC_REF_EFLAGS(pEFlags);
4861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4862
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 return VINF_SUCCESS;
4866
4867 case IEMMODE_32BIT:
4868 IEM_MC_BEGIN(3, 0);
4869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4870 IEM_MC_ARG(uint32_t, u32Src, 1);
4871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4872
4873 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4874 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4875 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4876 IEM_MC_REF_EFLAGS(pEFlags);
4877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4878
4879 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 case IEMMODE_64BIT:
4885 IEM_MC_BEGIN(3, 0);
4886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4887 IEM_MC_ARG(uint64_t, u64Src, 1);
4888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4889
4890 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4891 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4892 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4893 IEM_MC_REF_EFLAGS(pEFlags);
4894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4895
4896 IEM_MC_ADVANCE_RIP();
4897 IEM_MC_END();
4898 return VINF_SUCCESS;
4899
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4901 }
4902 }
4903 else
4904 {
4905 /* memory destination. */
4906
4907 uint32_t fAccess;
4908 if (pImpl->pfnLockedU16)
4909 fAccess = IEM_ACCESS_DATA_RW;
4910 else /* BT */
4911 fAccess = IEM_ACCESS_DATA_R;
4912
4913 /** @todo test negative bit offsets! */
4914 switch (pVCpu->iem.s.enmEffOpSize)
4915 {
4916 case IEMMODE_16BIT:
4917 IEM_MC_BEGIN(3, 2);
4918 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4919 IEM_MC_ARG(uint16_t, u16Src, 1);
4920 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4922 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4923
4924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4925 if (pImpl->pfnLockedU16)
4926 IEMOP_HLP_DONE_DECODING();
4927 else
4928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4929 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4930 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4931 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4932 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4933 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4934 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4935 IEM_MC_FETCH_EFLAGS(EFlags);
4936
4937 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4940 else
4941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4942 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4943
4944 IEM_MC_COMMIT_EFLAGS(EFlags);
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 return VINF_SUCCESS;
4948
4949 case IEMMODE_32BIT:
4950 IEM_MC_BEGIN(3, 2);
4951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4952 IEM_MC_ARG(uint32_t, u32Src, 1);
4953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4955 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4956
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4958 if (pImpl->pfnLockedU16)
4959 IEMOP_HLP_DONE_DECODING();
4960 else
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4963 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4964 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4965 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4966 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4967 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4968 IEM_MC_FETCH_EFLAGS(EFlags);
4969
4970 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4971 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4973 else
4974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4975 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4976
4977 IEM_MC_COMMIT_EFLAGS(EFlags);
4978 IEM_MC_ADVANCE_RIP();
4979 IEM_MC_END();
4980 return VINF_SUCCESS;
4981
4982 case IEMMODE_64BIT:
4983 IEM_MC_BEGIN(3, 2);
4984 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4985 IEM_MC_ARG(uint64_t, u64Src, 1);
4986 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4988 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4989
4990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4991 if (pImpl->pfnLockedU16)
4992 IEMOP_HLP_DONE_DECODING();
4993 else
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4996 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4997 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4998 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4999 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5000 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5001 IEM_MC_FETCH_EFLAGS(EFlags);
5002
5003 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5004 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5006 else
5007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5009
5010 IEM_MC_COMMIT_EFLAGS(EFlags);
5011 IEM_MC_ADVANCE_RIP();
5012 IEM_MC_END();
5013 return VINF_SUCCESS;
5014
5015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5016 }
5017 }
5018}
5019
5020
5021/** Opcode 0x0f 0xa3. */
5022FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5023{
5024 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5025 IEMOP_HLP_MIN_386();
5026 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5027}
5028
5029
5030/**
5031 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5032 */
5033FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5034{
5035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5036 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5037
5038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5039 {
5040 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5042
5043 switch (pVCpu->iem.s.enmEffOpSize)
5044 {
5045 case IEMMODE_16BIT:
5046 IEM_MC_BEGIN(4, 0);
5047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5048 IEM_MC_ARG(uint16_t, u16Src, 1);
5049 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5050 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5051
5052 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5053 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5054 IEM_MC_REF_EFLAGS(pEFlags);
5055 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5056
5057 IEM_MC_ADVANCE_RIP();
5058 IEM_MC_END();
5059 return VINF_SUCCESS;
5060
5061 case IEMMODE_32BIT:
5062 IEM_MC_BEGIN(4, 0);
5063 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5064 IEM_MC_ARG(uint32_t, u32Src, 1);
5065 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5066 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5067
5068 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5069 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5070 IEM_MC_REF_EFLAGS(pEFlags);
5071 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5072
5073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5074 IEM_MC_ADVANCE_RIP();
5075 IEM_MC_END();
5076 return VINF_SUCCESS;
5077
5078 case IEMMODE_64BIT:
5079 IEM_MC_BEGIN(4, 0);
5080 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5081 IEM_MC_ARG(uint64_t, u64Src, 1);
5082 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5083 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5084
5085 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5086 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5087 IEM_MC_REF_EFLAGS(pEFlags);
5088 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5089
5090 IEM_MC_ADVANCE_RIP();
5091 IEM_MC_END();
5092 return VINF_SUCCESS;
5093
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5095 }
5096 }
5097 else
5098 {
5099 switch (pVCpu->iem.s.enmEffOpSize)
5100 {
5101 case IEMMODE_16BIT:
5102 IEM_MC_BEGIN(4, 2);
5103 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5104 IEM_MC_ARG(uint16_t, u16Src, 1);
5105 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5106 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5108
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5110 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5111 IEM_MC_ASSIGN(cShiftArg, cShift);
5112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5113 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5114 IEM_MC_FETCH_EFLAGS(EFlags);
5115 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5116 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5117
5118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5119 IEM_MC_COMMIT_EFLAGS(EFlags);
5120 IEM_MC_ADVANCE_RIP();
5121 IEM_MC_END();
5122 return VINF_SUCCESS;
5123
5124 case IEMMODE_32BIT:
5125 IEM_MC_BEGIN(4, 2);
5126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5127 IEM_MC_ARG(uint32_t, u32Src, 1);
5128 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5129 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5131
5132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5133 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5134 IEM_MC_ASSIGN(cShiftArg, cShift);
5135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5136 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5137 IEM_MC_FETCH_EFLAGS(EFlags);
5138 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5139 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5140
5141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5142 IEM_MC_COMMIT_EFLAGS(EFlags);
5143 IEM_MC_ADVANCE_RIP();
5144 IEM_MC_END();
5145 return VINF_SUCCESS;
5146
5147 case IEMMODE_64BIT:
5148 IEM_MC_BEGIN(4, 2);
5149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5150 IEM_MC_ARG(uint64_t, u64Src, 1);
5151 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5154
5155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5156 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5157 IEM_MC_ASSIGN(cShiftArg, cShift);
5158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5159 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5160 IEM_MC_FETCH_EFLAGS(EFlags);
5161 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5162 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5163
5164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5165 IEM_MC_COMMIT_EFLAGS(EFlags);
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 return VINF_SUCCESS;
5169
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172 }
5173}
5174
5175
5176/**
5177 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5178 */
5179FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5180{
5181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5182 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5183
5184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5185 {
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187
5188 switch (pVCpu->iem.s.enmEffOpSize)
5189 {
5190 case IEMMODE_16BIT:
5191 IEM_MC_BEGIN(4, 0);
5192 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5193 IEM_MC_ARG(uint16_t, u16Src, 1);
5194 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5196
5197 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5198 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5199 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5200 IEM_MC_REF_EFLAGS(pEFlags);
5201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5202
5203 IEM_MC_ADVANCE_RIP();
5204 IEM_MC_END();
5205 return VINF_SUCCESS;
5206
5207 case IEMMODE_32BIT:
5208 IEM_MC_BEGIN(4, 0);
5209 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5210 IEM_MC_ARG(uint32_t, u32Src, 1);
5211 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5212 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5213
5214 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5215 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5216 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5217 IEM_MC_REF_EFLAGS(pEFlags);
5218 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5219
5220 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5221 IEM_MC_ADVANCE_RIP();
5222 IEM_MC_END();
5223 return VINF_SUCCESS;
5224
5225 case IEMMODE_64BIT:
5226 IEM_MC_BEGIN(4, 0);
5227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5228 IEM_MC_ARG(uint64_t, u64Src, 1);
5229 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5230 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5231
5232 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5233 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5234 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5235 IEM_MC_REF_EFLAGS(pEFlags);
5236 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5237
5238 IEM_MC_ADVANCE_RIP();
5239 IEM_MC_END();
5240 return VINF_SUCCESS;
5241
5242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5243 }
5244 }
5245 else
5246 {
5247 switch (pVCpu->iem.s.enmEffOpSize)
5248 {
5249 case IEMMODE_16BIT:
5250 IEM_MC_BEGIN(4, 2);
5251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5252 IEM_MC_ARG(uint16_t, u16Src, 1);
5253 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5256
5257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5260 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5261 IEM_MC_FETCH_EFLAGS(EFlags);
5262 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5264
5265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5266 IEM_MC_COMMIT_EFLAGS(EFlags);
5267 IEM_MC_ADVANCE_RIP();
5268 IEM_MC_END();
5269 return VINF_SUCCESS;
5270
5271 case IEMMODE_32BIT:
5272 IEM_MC_BEGIN(4, 2);
5273 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5274 IEM_MC_ARG(uint32_t, u32Src, 1);
5275 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5278
5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5281 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5282 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5283 IEM_MC_FETCH_EFLAGS(EFlags);
5284 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5286
5287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5288 IEM_MC_COMMIT_EFLAGS(EFlags);
5289 IEM_MC_ADVANCE_RIP();
5290 IEM_MC_END();
5291 return VINF_SUCCESS;
5292
5293 case IEMMODE_64BIT:
5294 IEM_MC_BEGIN(4, 2);
5295 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5296 IEM_MC_ARG(uint64_t, u64Src, 1);
5297 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5304 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5305 IEM_MC_FETCH_EFLAGS(EFlags);
5306 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5307 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5308
5309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5310 IEM_MC_COMMIT_EFLAGS(EFlags);
5311 IEM_MC_ADVANCE_RIP();
5312 IEM_MC_END();
5313 return VINF_SUCCESS;
5314
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 }
5318}
5319
5320
5321
5322/** Opcode 0x0f 0xa4. */
5323FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5324{
5325 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5326 IEMOP_HLP_MIN_386();
5327 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5328}
5329
5330
5331/** Opcode 0x0f 0xa5. */
5332FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5333{
5334 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5335 IEMOP_HLP_MIN_386();
5336 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5337}
5338
5339
5340/** Opcode 0x0f 0xa8. */
5341FNIEMOP_DEF(iemOp_push_gs)
5342{
5343 IEMOP_MNEMONIC(push_gs, "push gs");
5344 IEMOP_HLP_MIN_386();
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5347}
5348
5349
5350/** Opcode 0x0f 0xa9. */
5351FNIEMOP_DEF(iemOp_pop_gs)
5352{
5353 IEMOP_MNEMONIC(pop_gs, "pop gs");
5354 IEMOP_HLP_MIN_386();
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5357}
5358
5359
5360/** Opcode 0x0f 0xaa. */
5361FNIEMOP_STUB(iemOp_rsm);
5362//IEMOP_HLP_MIN_386();
5363
5364
5365/** Opcode 0x0f 0xab. */
5366FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5367{
5368 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5369 IEMOP_HLP_MIN_386();
5370 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5371}
5372
5373
5374/** Opcode 0x0f 0xac. */
5375FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5376{
5377 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5378 IEMOP_HLP_MIN_386();
5379 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5380}
5381
5382
5383/** Opcode 0x0f 0xad. */
5384FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5385{
5386 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5387 IEMOP_HLP_MIN_386();
5388 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5389}
5390
5391
5392/** Opcode 0x0f 0xae mem/0. */
5393FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5394{
5395 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5396 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5397 return IEMOP_RAISE_INVALID_OPCODE();
5398
5399 IEM_MC_BEGIN(3, 1);
5400 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5401 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5402 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5406 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5407 IEM_MC_END();
5408 return VINF_SUCCESS;
5409}
5410
5411
5412/** Opcode 0x0f 0xae mem/1. */
5413FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5414{
5415 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5416 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5417 return IEMOP_RAISE_INVALID_OPCODE();
5418
5419 IEM_MC_BEGIN(3, 1);
5420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5421 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5422 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5426 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5427 IEM_MC_END();
5428 return VINF_SUCCESS;
5429}
5430
5431
5432/** Opcode 0x0f 0xae mem/2. */
5433FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5434
5435/** Opcode 0x0f 0xae mem/3. */
5436FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5437
5438/** Opcode 0x0f 0xae mem/4. */
5439FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5440
5441/** Opcode 0x0f 0xae mem/5. */
5442FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5443
5444/** Opcode 0x0f 0xae mem/6. */
5445FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5446
5447/** Opcode 0x0f 0xae mem/7. */
5448FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5449
5450
5451/** Opcode 0x0f 0xae 11b/5. */
5452FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5453{
5454 RT_NOREF_PV(bRm);
5455 IEMOP_MNEMONIC(lfence, "lfence");
5456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5457 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5458 return IEMOP_RAISE_INVALID_OPCODE();
5459
5460 IEM_MC_BEGIN(0, 0);
5461 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5462 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5463 else
5464 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5465 IEM_MC_ADVANCE_RIP();
5466 IEM_MC_END();
5467 return VINF_SUCCESS;
5468}
5469
5470
5471/** Opcode 0x0f 0xae 11b/6. */
5472FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5473{
5474 RT_NOREF_PV(bRm);
5475 IEMOP_MNEMONIC(mfence, "mfence");
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5478 return IEMOP_RAISE_INVALID_OPCODE();
5479
5480 IEM_MC_BEGIN(0, 0);
5481 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5482 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5483 else
5484 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487 return VINF_SUCCESS;
5488}
5489
5490
5491/** Opcode 0x0f 0xae 11b/7. */
5492FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5493{
5494 RT_NOREF_PV(bRm);
5495 IEMOP_MNEMONIC(sfence, "sfence");
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5498 return IEMOP_RAISE_INVALID_OPCODE();
5499
5500 IEM_MC_BEGIN(0, 0);
5501 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5502 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5503 else
5504 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508}
5509
5510
5511/** Opcode 0xf3 0x0f 0xae 11b/0. */
5512FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5513
5514/** Opcode 0xf3 0x0f 0xae 11b/1. */
5515FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5516
5517/** Opcode 0xf3 0x0f 0xae 11b/2. */
5518FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5519
5520/** Opcode 0xf3 0x0f 0xae 11b/3. */
5521FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5522
5523
5524/** Opcode 0x0f 0xae. */
5525FNIEMOP_DEF(iemOp_Grp15)
5526{
5527 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5529 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5530 {
5531 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5532 {
5533 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5534 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5535 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5536 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5537 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5538 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5539 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5540 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5542 }
5543 }
5544 else
5545 {
5546 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5547 {
5548 case 0:
5549 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5550 {
5551 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5552 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5553 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5554 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5555 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5556 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5557 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5558 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5560 }
5561 break;
5562
5563 case IEM_OP_PRF_REPZ:
5564 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5565 {
5566 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5567 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5568 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5569 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5570 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5571 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5572 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5573 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5575 }
5576 break;
5577
5578 default:
5579 return IEMOP_RAISE_INVALID_OPCODE();
5580 }
5581 }
5582}
5583
5584
5585/** Opcode 0x0f 0xaf. */
5586FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5587{
5588 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5589 IEMOP_HLP_MIN_386();
5590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5592}
5593
5594
5595/** Opcode 0x0f 0xb0. */
5596FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5597{
5598 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5599 IEMOP_HLP_MIN_486();
5600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5601
5602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5603 {
5604 IEMOP_HLP_DONE_DECODING();
5605 IEM_MC_BEGIN(4, 0);
5606 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5607 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5608 IEM_MC_ARG(uint8_t, u8Src, 2);
5609 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5610
5611 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5612 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5613 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5614 IEM_MC_REF_EFLAGS(pEFlags);
5615 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5616 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5617 else
5618 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5619
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 }
5623 else
5624 {
5625 IEM_MC_BEGIN(4, 3);
5626 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5627 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5628 IEM_MC_ARG(uint8_t, u8Src, 2);
5629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5631 IEM_MC_LOCAL(uint8_t, u8Al);
5632
5633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5634 IEMOP_HLP_DONE_DECODING();
5635 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5636 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5637 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5638 IEM_MC_FETCH_EFLAGS(EFlags);
5639 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5640 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5641 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5642 else
5643 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5644
5645 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5646 IEM_MC_COMMIT_EFLAGS(EFlags);
5647 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5648 IEM_MC_ADVANCE_RIP();
5649 IEM_MC_END();
5650 }
5651 return VINF_SUCCESS;
5652}
5653
5654/** Opcode 0x0f 0xb1. */
5655FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5656{
5657 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5658 IEMOP_HLP_MIN_486();
5659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5660
5661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5662 {
5663 IEMOP_HLP_DONE_DECODING();
5664 switch (pVCpu->iem.s.enmEffOpSize)
5665 {
5666 case IEMMODE_16BIT:
5667 IEM_MC_BEGIN(4, 0);
5668 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5669 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5670 IEM_MC_ARG(uint16_t, u16Src, 2);
5671 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5672
5673 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5674 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5675 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5676 IEM_MC_REF_EFLAGS(pEFlags);
5677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5678 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5679 else
5680 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5681
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 return VINF_SUCCESS;
5685
5686 case IEMMODE_32BIT:
5687 IEM_MC_BEGIN(4, 0);
5688 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5689 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5690 IEM_MC_ARG(uint32_t, u32Src, 2);
5691 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5692
5693 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5694 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5695 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5696 IEM_MC_REF_EFLAGS(pEFlags);
5697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5698 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5699 else
5700 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5701
5702 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5703 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 return VINF_SUCCESS;
5707
5708 case IEMMODE_64BIT:
5709 IEM_MC_BEGIN(4, 0);
5710 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5711 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5712#ifdef RT_ARCH_X86
5713 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5714#else
5715 IEM_MC_ARG(uint64_t, u64Src, 2);
5716#endif
5717 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5718
5719 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5720 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5721 IEM_MC_REF_EFLAGS(pEFlags);
5722#ifdef RT_ARCH_X86
5723 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5724 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5726 else
5727 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5728#else
5729 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5731 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5732 else
5733 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5734#endif
5735
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 return VINF_SUCCESS;
5739
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 }
5743 else
5744 {
5745 switch (pVCpu->iem.s.enmEffOpSize)
5746 {
5747 case IEMMODE_16BIT:
5748 IEM_MC_BEGIN(4, 3);
5749 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5750 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5751 IEM_MC_ARG(uint16_t, u16Src, 2);
5752 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5754 IEM_MC_LOCAL(uint16_t, u16Ax);
5755
5756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5757 IEMOP_HLP_DONE_DECODING();
5758 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5759 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5760 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5761 IEM_MC_FETCH_EFLAGS(EFlags);
5762 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5763 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5764 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5765 else
5766 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5767
5768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5769 IEM_MC_COMMIT_EFLAGS(EFlags);
5770 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5771 IEM_MC_ADVANCE_RIP();
5772 IEM_MC_END();
5773 return VINF_SUCCESS;
5774
5775 case IEMMODE_32BIT:
5776 IEM_MC_BEGIN(4, 3);
5777 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5778 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5779 IEM_MC_ARG(uint32_t, u32Src, 2);
5780 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5782 IEM_MC_LOCAL(uint32_t, u32Eax);
5783
5784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5785 IEMOP_HLP_DONE_DECODING();
5786 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5787 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5788 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5789 IEM_MC_FETCH_EFLAGS(EFlags);
5790 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5791 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5792 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5793 else
5794 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5795
5796 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5797 IEM_MC_COMMIT_EFLAGS(EFlags);
5798 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5799 IEM_MC_ADVANCE_RIP();
5800 IEM_MC_END();
5801 return VINF_SUCCESS;
5802
5803 case IEMMODE_64BIT:
5804 IEM_MC_BEGIN(4, 3);
5805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5806 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5807#ifdef RT_ARCH_X86
5808 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5809#else
5810 IEM_MC_ARG(uint64_t, u64Src, 2);
5811#endif
5812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5814 IEM_MC_LOCAL(uint64_t, u64Rax);
5815
5816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5817 IEMOP_HLP_DONE_DECODING();
5818 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5819 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5820 IEM_MC_FETCH_EFLAGS(EFlags);
5821 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5822#ifdef RT_ARCH_X86
5823 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5824 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5825 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5826 else
5827 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5828#else
5829 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5830 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5831 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5832 else
5833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5834#endif
5835
5836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5837 IEM_MC_COMMIT_EFLAGS(EFlags);
5838 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 return VINF_SUCCESS;
5842
5843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5844 }
5845 }
5846}
5847
5848
5849FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5850{
5851 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5852 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5853
5854 switch (pVCpu->iem.s.enmEffOpSize)
5855 {
5856 case IEMMODE_16BIT:
5857 IEM_MC_BEGIN(5, 1);
5858 IEM_MC_ARG(uint16_t, uSel, 0);
5859 IEM_MC_ARG(uint16_t, offSeg, 1);
5860 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5861 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5862 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5863 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5867 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5868 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5869 IEM_MC_END();
5870 return VINF_SUCCESS;
5871
5872 case IEMMODE_32BIT:
5873 IEM_MC_BEGIN(5, 1);
5874 IEM_MC_ARG(uint16_t, uSel, 0);
5875 IEM_MC_ARG(uint32_t, offSeg, 1);
5876 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5877 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5878 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5879 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5882 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5883 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5884 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5885 IEM_MC_END();
5886 return VINF_SUCCESS;
5887
5888 case IEMMODE_64BIT:
5889 IEM_MC_BEGIN(5, 1);
5890 IEM_MC_ARG(uint16_t, uSel, 0);
5891 IEM_MC_ARG(uint64_t, offSeg, 1);
5892 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5893 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5894 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5895 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5898 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5899 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5900 else
5901 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5902 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5903 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5908 }
5909}
5910
5911
5912/** Opcode 0x0f 0xb2. */
5913FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5914{
5915 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5916 IEMOP_HLP_MIN_386();
5917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5919 return IEMOP_RAISE_INVALID_OPCODE();
5920 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5921}
5922
5923
5924/** Opcode 0x0f 0xb3. */
5925FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5926{
5927 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5928 IEMOP_HLP_MIN_386();
5929 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5930}
5931
5932
5933/** Opcode 0x0f 0xb4. */
5934FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5935{
5936 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5937 IEMOP_HLP_MIN_386();
5938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5940 return IEMOP_RAISE_INVALID_OPCODE();
5941 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5942}
5943
5944
5945/** Opcode 0x0f 0xb5. */
5946FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5947{
5948 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5949 IEMOP_HLP_MIN_386();
5950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5952 return IEMOP_RAISE_INVALID_OPCODE();
5953 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5954}
5955
5956
5957/** Opcode 0x0f 0xb6. */
5958FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5959{
5960 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5961 IEMOP_HLP_MIN_386();
5962
5963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5964
5965 /*
5966 * If rm is denoting a register, no more instruction bytes.
5967 */
5968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5969 {
5970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5971 switch (pVCpu->iem.s.enmEffOpSize)
5972 {
5973 case IEMMODE_16BIT:
5974 IEM_MC_BEGIN(0, 1);
5975 IEM_MC_LOCAL(uint16_t, u16Value);
5976 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5977 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5978 IEM_MC_ADVANCE_RIP();
5979 IEM_MC_END();
5980 return VINF_SUCCESS;
5981
5982 case IEMMODE_32BIT:
5983 IEM_MC_BEGIN(0, 1);
5984 IEM_MC_LOCAL(uint32_t, u32Value);
5985 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5986 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5987 IEM_MC_ADVANCE_RIP();
5988 IEM_MC_END();
5989 return VINF_SUCCESS;
5990
5991 case IEMMODE_64BIT:
5992 IEM_MC_BEGIN(0, 1);
5993 IEM_MC_LOCAL(uint64_t, u64Value);
5994 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5995 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 return VINF_SUCCESS;
5999
6000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6001 }
6002 }
6003 else
6004 {
6005 /*
6006 * We're loading a register from memory.
6007 */
6008 switch (pVCpu->iem.s.enmEffOpSize)
6009 {
6010 case IEMMODE_16BIT:
6011 IEM_MC_BEGIN(0, 2);
6012 IEM_MC_LOCAL(uint16_t, u16Value);
6013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6016 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6017 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 return VINF_SUCCESS;
6021
6022 case IEMMODE_32BIT:
6023 IEM_MC_BEGIN(0, 2);
6024 IEM_MC_LOCAL(uint32_t, u32Value);
6025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6029 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 return VINF_SUCCESS;
6033
6034 case IEMMODE_64BIT:
6035 IEM_MC_BEGIN(0, 2);
6036 IEM_MC_LOCAL(uint64_t, u64Value);
6037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6041 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6042 IEM_MC_ADVANCE_RIP();
6043 IEM_MC_END();
6044 return VINF_SUCCESS;
6045
6046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6047 }
6048 }
6049}
6050
6051
6052/** Opcode 0x0f 0xb7. */
6053FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6054{
6055 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6056 IEMOP_HLP_MIN_386();
6057
6058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6059
6060 /** @todo Not entirely sure how the operand size prefix is handled here,
6061 * assuming that it will be ignored. Would be nice to have a few
6062 * test for this. */
6063 /*
6064 * If rm is denoting a register, no more instruction bytes.
6065 */
6066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6067 {
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6070 {
6071 IEM_MC_BEGIN(0, 1);
6072 IEM_MC_LOCAL(uint32_t, u32Value);
6073 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6074 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6075 IEM_MC_ADVANCE_RIP();
6076 IEM_MC_END();
6077 }
6078 else
6079 {
6080 IEM_MC_BEGIN(0, 1);
6081 IEM_MC_LOCAL(uint64_t, u64Value);
6082 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6083 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 }
6087 }
6088 else
6089 {
6090 /*
6091 * We're loading a register from memory.
6092 */
6093 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6094 {
6095 IEM_MC_BEGIN(0, 2);
6096 IEM_MC_LOCAL(uint32_t, u32Value);
6097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6100 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6101 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6102 IEM_MC_ADVANCE_RIP();
6103 IEM_MC_END();
6104 }
6105 else
6106 {
6107 IEM_MC_BEGIN(0, 2);
6108 IEM_MC_LOCAL(uint64_t, u64Value);
6109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6112 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6113 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 }
6117 }
6118 return VINF_SUCCESS;
6119}
6120
6121
6122/** Opcode 0x0f 0xb8. */
6123FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
6124
6125
6126/** Opcode 0x0f 0xb9. */
6127FNIEMOP_DEF(iemOp_Grp10)
6128{
6129 Log(("iemOp_Grp10 -> #UD\n"));
6130 return IEMOP_RAISE_INVALID_OPCODE();
6131}
6132
6133
6134/** Opcode 0x0f 0xba. */
6135FNIEMOP_DEF(iemOp_Grp8)
6136{
6137 IEMOP_HLP_MIN_386();
6138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6139 PCIEMOPBINSIZES pImpl;
6140 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6141 {
6142 case 0: case 1: case 2: case 3:
6143 return IEMOP_RAISE_INVALID_OPCODE();
6144 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6145 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6146 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6147 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6151
6152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6153 {
6154 /* register destination. */
6155 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6157
6158 switch (pVCpu->iem.s.enmEffOpSize)
6159 {
6160 case IEMMODE_16BIT:
6161 IEM_MC_BEGIN(3, 0);
6162 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6163 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6165
6166 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6167 IEM_MC_REF_EFLAGS(pEFlags);
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6169
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_32BIT:
6175 IEM_MC_BEGIN(3, 0);
6176 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6177 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6179
6180 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6181 IEM_MC_REF_EFLAGS(pEFlags);
6182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6183
6184 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6185 IEM_MC_ADVANCE_RIP();
6186 IEM_MC_END();
6187 return VINF_SUCCESS;
6188
6189 case IEMMODE_64BIT:
6190 IEM_MC_BEGIN(3, 0);
6191 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6192 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6194
6195 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6196 IEM_MC_REF_EFLAGS(pEFlags);
6197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6198
6199 IEM_MC_ADVANCE_RIP();
6200 IEM_MC_END();
6201 return VINF_SUCCESS;
6202
6203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6204 }
6205 }
6206 else
6207 {
6208 /* memory destination. */
6209
6210 uint32_t fAccess;
6211 if (pImpl->pfnLockedU16)
6212 fAccess = IEM_ACCESS_DATA_RW;
6213 else /* BT */
6214 fAccess = IEM_ACCESS_DATA_R;
6215
6216 /** @todo test negative bit offsets! */
6217 switch (pVCpu->iem.s.enmEffOpSize)
6218 {
6219 case IEMMODE_16BIT:
6220 IEM_MC_BEGIN(3, 1);
6221 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6222 IEM_MC_ARG(uint16_t, u16Src, 1);
6223 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6225
6226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6227 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6228 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6229 if (pImpl->pfnLockedU16)
6230 IEMOP_HLP_DONE_DECODING();
6231 else
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233 IEM_MC_FETCH_EFLAGS(EFlags);
6234 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6235 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6237 else
6238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6240
6241 IEM_MC_COMMIT_EFLAGS(EFlags);
6242 IEM_MC_ADVANCE_RIP();
6243 IEM_MC_END();
6244 return VINF_SUCCESS;
6245
6246 case IEMMODE_32BIT:
6247 IEM_MC_BEGIN(3, 1);
6248 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6249 IEM_MC_ARG(uint32_t, u32Src, 1);
6250 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6252
6253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6254 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6255 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6256 if (pImpl->pfnLockedU16)
6257 IEMOP_HLP_DONE_DECODING();
6258 else
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6260 IEM_MC_FETCH_EFLAGS(EFlags);
6261 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6262 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6264 else
6265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6266 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6267
6268 IEM_MC_COMMIT_EFLAGS(EFlags);
6269 IEM_MC_ADVANCE_RIP();
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272
6273 case IEMMODE_64BIT:
6274 IEM_MC_BEGIN(3, 1);
6275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6276 IEM_MC_ARG(uint64_t, u64Src, 1);
6277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6281 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6282 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6283 if (pImpl->pfnLockedU16)
6284 IEMOP_HLP_DONE_DECODING();
6285 else
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6287 IEM_MC_FETCH_EFLAGS(EFlags);
6288 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6289 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6291 else
6292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6294
6295 IEM_MC_COMMIT_EFLAGS(EFlags);
6296 IEM_MC_ADVANCE_RIP();
6297 IEM_MC_END();
6298 return VINF_SUCCESS;
6299
6300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6301 }
6302 }
6303
6304}
6305
6306
6307/** Opcode 0x0f 0xbb. */
6308FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6309{
6310 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6311 IEMOP_HLP_MIN_386();
6312 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6313}
6314
6315
6316/** Opcode 0x0f 0xbc. */
6317FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6318{
6319 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6320 IEMOP_HLP_MIN_386();
6321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6323}
6324
6325
6326/** Opcode 0x0f 0xbd. */
6327FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6328{
6329 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6330 IEMOP_HLP_MIN_386();
6331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6333}
6334
6335
6336/** Opcode 0x0f 0xbe. */
6337FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6338{
6339 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6340 IEMOP_HLP_MIN_386();
6341
6342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6343
6344 /*
6345 * If rm is denoting a register, no more instruction bytes.
6346 */
6347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6348 {
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 switch (pVCpu->iem.s.enmEffOpSize)
6351 {
6352 case IEMMODE_16BIT:
6353 IEM_MC_BEGIN(0, 1);
6354 IEM_MC_LOCAL(uint16_t, u16Value);
6355 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6356 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6357 IEM_MC_ADVANCE_RIP();
6358 IEM_MC_END();
6359 return VINF_SUCCESS;
6360
6361 case IEMMODE_32BIT:
6362 IEM_MC_BEGIN(0, 1);
6363 IEM_MC_LOCAL(uint32_t, u32Value);
6364 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6365 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6366 IEM_MC_ADVANCE_RIP();
6367 IEM_MC_END();
6368 return VINF_SUCCESS;
6369
6370 case IEMMODE_64BIT:
6371 IEM_MC_BEGIN(0, 1);
6372 IEM_MC_LOCAL(uint64_t, u64Value);
6373 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6374 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6375 IEM_MC_ADVANCE_RIP();
6376 IEM_MC_END();
6377 return VINF_SUCCESS;
6378
6379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6380 }
6381 }
6382 else
6383 {
6384 /*
6385 * We're loading a register from memory.
6386 */
6387 switch (pVCpu->iem.s.enmEffOpSize)
6388 {
6389 case IEMMODE_16BIT:
6390 IEM_MC_BEGIN(0, 2);
6391 IEM_MC_LOCAL(uint16_t, u16Value);
6392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6395 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6396 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_32BIT:
6402 IEM_MC_BEGIN(0, 2);
6403 IEM_MC_LOCAL(uint32_t, u32Value);
6404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6407 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6408 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 case IEMMODE_64BIT:
6414 IEM_MC_BEGIN(0, 2);
6415 IEM_MC_LOCAL(uint64_t, u64Value);
6416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6420 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6421 IEM_MC_ADVANCE_RIP();
6422 IEM_MC_END();
6423 return VINF_SUCCESS;
6424
6425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6426 }
6427 }
6428}
6429
6430
6431/** Opcode 0x0f 0xbf. */
6432FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6433{
6434 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6435 IEMOP_HLP_MIN_386();
6436
6437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6438
6439 /** @todo Not entirely sure how the operand size prefix is handled here,
6440 * assuming that it will be ignored. Would be nice to have a few
6441 * test for this. */
6442 /*
6443 * If rm is denoting a register, no more instruction bytes.
6444 */
6445 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6446 {
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6449 {
6450 IEM_MC_BEGIN(0, 1);
6451 IEM_MC_LOCAL(uint32_t, u32Value);
6452 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6453 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6454 IEM_MC_ADVANCE_RIP();
6455 IEM_MC_END();
6456 }
6457 else
6458 {
6459 IEM_MC_BEGIN(0, 1);
6460 IEM_MC_LOCAL(uint64_t, u64Value);
6461 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6462 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6463 IEM_MC_ADVANCE_RIP();
6464 IEM_MC_END();
6465 }
6466 }
6467 else
6468 {
6469 /*
6470 * We're loading a register from memory.
6471 */
6472 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6473 {
6474 IEM_MC_BEGIN(0, 2);
6475 IEM_MC_LOCAL(uint32_t, u32Value);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6480 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6481 IEM_MC_ADVANCE_RIP();
6482 IEM_MC_END();
6483 }
6484 else
6485 {
6486 IEM_MC_BEGIN(0, 2);
6487 IEM_MC_LOCAL(uint64_t, u64Value);
6488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6491 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6492 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6493 IEM_MC_ADVANCE_RIP();
6494 IEM_MC_END();
6495 }
6496 }
6497 return VINF_SUCCESS;
6498}
6499
6500
6501/** Opcode 0x0f 0xc0. */
6502FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6503{
6504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6505 IEMOP_HLP_MIN_486();
6506 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6507
6508 /*
6509 * If rm is denoting a register, no more instruction bytes.
6510 */
6511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6512 {
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514
6515 IEM_MC_BEGIN(3, 0);
6516 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6517 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6519
6520 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6521 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6522 IEM_MC_REF_EFLAGS(pEFlags);
6523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6524
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 }
6528 else
6529 {
6530 /*
6531 * We're accessing memory.
6532 */
6533 IEM_MC_BEGIN(3, 3);
6534 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6535 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6536 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6537 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6539
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6541 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6542 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6543 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6544 IEM_MC_FETCH_EFLAGS(EFlags);
6545 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6546 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6547 else
6548 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6549
6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6551 IEM_MC_COMMIT_EFLAGS(EFlags);
6552 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6553 IEM_MC_ADVANCE_RIP();
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556 }
6557 return VINF_SUCCESS;
6558}
6559
6560
6561/** Opcode 0x0f 0xc1. */
6562FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6563{
6564 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6565 IEMOP_HLP_MIN_486();
6566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6567
6568 /*
6569 * If rm is denoting a register, no more instruction bytes.
6570 */
6571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6572 {
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574
6575 switch (pVCpu->iem.s.enmEffOpSize)
6576 {
6577 case IEMMODE_16BIT:
6578 IEM_MC_BEGIN(3, 0);
6579 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6580 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6582
6583 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6584 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6585 IEM_MC_REF_EFLAGS(pEFlags);
6586 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6587
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 case IEMMODE_32BIT:
6593 IEM_MC_BEGIN(3, 0);
6594 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6595 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6597
6598 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6599 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6600 IEM_MC_REF_EFLAGS(pEFlags);
6601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6602
6603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6604 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6605 IEM_MC_ADVANCE_RIP();
6606 IEM_MC_END();
6607 return VINF_SUCCESS;
6608
6609 case IEMMODE_64BIT:
6610 IEM_MC_BEGIN(3, 0);
6611 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6612 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6613 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6614
6615 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6616 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6617 IEM_MC_REF_EFLAGS(pEFlags);
6618 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6619
6620 IEM_MC_ADVANCE_RIP();
6621 IEM_MC_END();
6622 return VINF_SUCCESS;
6623
6624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6625 }
6626 }
6627 else
6628 {
6629 /*
6630 * We're accessing memory.
6631 */
6632 switch (pVCpu->iem.s.enmEffOpSize)
6633 {
6634 case IEMMODE_16BIT:
6635 IEM_MC_BEGIN(3, 3);
6636 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6637 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6638 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6639 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6641
6642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6643 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6644 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6645 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6646 IEM_MC_FETCH_EFLAGS(EFlags);
6647 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6648 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6649 else
6650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6651
6652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6653 IEM_MC_COMMIT_EFLAGS(EFlags);
6654 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 return VINF_SUCCESS;
6658
6659 case IEMMODE_32BIT:
6660 IEM_MC_BEGIN(3, 3);
6661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6662 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6663 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6664 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6666
6667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6668 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6669 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6670 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6671 IEM_MC_FETCH_EFLAGS(EFlags);
6672 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6673 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6674 else
6675 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6676
6677 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6678 IEM_MC_COMMIT_EFLAGS(EFlags);
6679 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6680 IEM_MC_ADVANCE_RIP();
6681 IEM_MC_END();
6682 return VINF_SUCCESS;
6683
6684 case IEMMODE_64BIT:
6685 IEM_MC_BEGIN(3, 3);
6686 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6687 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6688 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6689 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6691
6692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6693 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6694 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6695 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6696 IEM_MC_FETCH_EFLAGS(EFlags);
6697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6699 else
6700 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6701
6702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6703 IEM_MC_COMMIT_EFLAGS(EFlags);
6704 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 return VINF_SUCCESS;
6708
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711 }
6712}
6713
6714/** Opcode 0x0f 0xc2. */
6715FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6716
6717
6718/** Opcode 0x0f 0xc3. */
6719FNIEMOP_DEF(iemOp_movnti_My_Gy)
6720{
6721 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6722
6723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6724
6725 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6726 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6727 {
6728 switch (pVCpu->iem.s.enmEffOpSize)
6729 {
6730 case IEMMODE_32BIT:
6731 IEM_MC_BEGIN(0, 2);
6732 IEM_MC_LOCAL(uint32_t, u32Value);
6733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6734
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6738 return IEMOP_RAISE_INVALID_OPCODE();
6739
6740 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6741 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6742 IEM_MC_ADVANCE_RIP();
6743 IEM_MC_END();
6744 break;
6745
6746 case IEMMODE_64BIT:
6747 IEM_MC_BEGIN(0, 2);
6748 IEM_MC_LOCAL(uint64_t, u64Value);
6749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6750
6751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6753 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6754 return IEMOP_RAISE_INVALID_OPCODE();
6755
6756 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6757 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6758 IEM_MC_ADVANCE_RIP();
6759 IEM_MC_END();
6760 break;
6761
6762 case IEMMODE_16BIT:
6763 /** @todo check this form. */
6764 return IEMOP_RAISE_INVALID_OPCODE();
6765 }
6766 }
6767 else
6768 return IEMOP_RAISE_INVALID_OPCODE();
6769 return VINF_SUCCESS;
6770}
6771
6772
6773/** Opcode 0x0f 0xc4. */
6774FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6775
6776/** Opcode 0x0f 0xc5. */
6777FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6778
6779/** Opcode 0x0f 0xc6. */
6780FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6781
6782
6783/** Opcode 0x0f 0xc7 !11/1. */
6784FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6785{
6786 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6787
6788 IEM_MC_BEGIN(4, 3);
6789 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6790 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6791 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6792 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6793 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6794 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6796
6797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6798 IEMOP_HLP_DONE_DECODING();
6799 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6800
6801 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6802 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6803 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6804
6805 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6806 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6807 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6808
6809 IEM_MC_FETCH_EFLAGS(EFlags);
6810 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6811 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6812 else
6813 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6814
6815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6816 IEM_MC_COMMIT_EFLAGS(EFlags);
6817 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6818 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6819 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6820 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6821 IEM_MC_ENDIF();
6822 IEM_MC_ADVANCE_RIP();
6823
6824 IEM_MC_END();
6825 return VINF_SUCCESS;
6826}
6827
6828
6829/** Opcode REX.W 0x0f 0xc7 !11/1. */
6830FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6831{
6832 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6833 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6834 {
6835#if 1
6836 RT_NOREF(bRm);
6837 IEMOP_BITCH_ABOUT_STUB();
6838 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6839#else
6840 IEM_MC_BEGIN(4, 3);
6841 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6842 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6843 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6844 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6845 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6846 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6848
6849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6850 IEMOP_HLP_DONE_DECODING();
6851 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6852
6853 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6854 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6855 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6856
6857 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6858 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6859 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6860
6861 IEM_MC_FETCH_EFLAGS(EFlags);
6862# ifdef RT_ARCH_AMD64
6863 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6864 {
6865 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6866 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6867 else
6868 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6869 }
6870 else
6871# endif
6872 {
6873 /* Note! The fallback for 32-bit systems and systems without CX16 is to use
6874 SSE instructions for 16-byte loads and stores. Since these aren't
6875 atomic and there are cycles between the loading and storing, this
6876 only works correctly in UNI CPU guests. If guest SMP is active
6877 we have no choice but to use a rendezvous callback here. Sigh. */
6878 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); /* HACK ALERT! */
6879 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6880 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback_sse, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6881 else
6882 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6883 }
6884
6885 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6886 IEM_MC_COMMIT_EFLAGS(EFlags);
6887 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6888 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6889 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6890 IEM_MC_ENDIF();
6891 IEM_MC_ADVANCE_RIP();
6892
6893 IEM_MC_END();
6894 return VINF_SUCCESS;
6895#endif
6896 }
6897 Log(("cmpxchg16b -> #UD\n"));
6898 return IEMOP_RAISE_INVALID_OPCODE();
6899}
6900
6901
6902/** Opcode 0x0f 0xc7 11/6. */
6903FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6904
6905/** Opcode 0x0f 0xc7 !11/6. */
6906FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6907
6908/** Opcode 0x66 0x0f 0xc7 !11/6. */
6909FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6910
6911/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6912FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6913
6914/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6915FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6916
6917
6918/** Opcode 0x0f 0xc7. */
6919FNIEMOP_DEF(iemOp_Grp9)
6920{
6921 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6924 {
6925 case 0: case 2: case 3: case 4: case 5:
6926 return IEMOP_RAISE_INVALID_OPCODE();
6927 case 1:
6928 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6929 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6930 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6931 return IEMOP_RAISE_INVALID_OPCODE();
6932 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6933 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6934 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6935 case 6:
6936 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6937 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6938 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6939 {
6940 case 0:
6941 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6942 case IEM_OP_PRF_SIZE_OP:
6943 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6944 case IEM_OP_PRF_REPZ:
6945 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6946 default:
6947 return IEMOP_RAISE_INVALID_OPCODE();
6948 }
6949 case 7:
6950 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6951 {
6952 case 0:
6953 case IEM_OP_PRF_REPZ:
6954 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6955 default:
6956 return IEMOP_RAISE_INVALID_OPCODE();
6957 }
6958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6959 }
6960}
6961
6962
6963/**
6964 * Common 'bswap register' helper.
6965 */
6966FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6967{
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 switch (pVCpu->iem.s.enmEffOpSize)
6970 {
6971 case IEMMODE_16BIT:
6972 IEM_MC_BEGIN(1, 0);
6973 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6974 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6975 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6976 IEM_MC_ADVANCE_RIP();
6977 IEM_MC_END();
6978 return VINF_SUCCESS;
6979
6980 case IEMMODE_32BIT:
6981 IEM_MC_BEGIN(1, 0);
6982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6983 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6984 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6985 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6986 IEM_MC_ADVANCE_RIP();
6987 IEM_MC_END();
6988 return VINF_SUCCESS;
6989
6990 case IEMMODE_64BIT:
6991 IEM_MC_BEGIN(1, 0);
6992 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6993 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6994 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6995 IEM_MC_ADVANCE_RIP();
6996 IEM_MC_END();
6997 return VINF_SUCCESS;
6998
6999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7000 }
7001}
7002
7003
7004/** Opcode 0x0f 0xc8. */
7005FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7006{
7007 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7008 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7009 prefix. REX.B is the correct prefix it appears. For a parallel
7010 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7011 IEMOP_HLP_MIN_486();
7012 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7013}
7014
7015
7016/** Opcode 0x0f 0xc9. */
7017FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7018{
7019 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7020 IEMOP_HLP_MIN_486();
7021 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7022}
7023
7024
7025/** Opcode 0x0f 0xca. */
7026FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7027{
7028 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7029 IEMOP_HLP_MIN_486();
7030 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7031}
7032
7033
7034/** Opcode 0x0f 0xcb. */
7035FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7036{
7037 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7038 IEMOP_HLP_MIN_486();
7039 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7040}
7041
7042
7043/** Opcode 0x0f 0xcc. */
7044FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7045{
7046 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7047 IEMOP_HLP_MIN_486();
7048 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7049}
7050
7051
7052/** Opcode 0x0f 0xcd. */
7053FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7054{
7055 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7056 IEMOP_HLP_MIN_486();
7057 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7058}
7059
7060
7061/** Opcode 0x0f 0xce. */
7062FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7063{
7064 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7065 IEMOP_HLP_MIN_486();
7066 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7067}
7068
7069
7070/** Opcode 0x0f 0xcf. */
7071FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7072{
7073 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7074 IEMOP_HLP_MIN_486();
7075 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7076}
7077
7078
7079
7080/** Opcode 0x0f 0xd0. */
7081FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
7082/** Opcode 0x0f 0xd1. */
7083FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
7084/** Opcode 0x0f 0xd2. */
7085FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
7086/** Opcode 0x0f 0xd3. */
7087FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
7088/** Opcode 0x0f 0xd4. */
7089FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
7090/** Opcode 0x0f 0xd5. */
7091FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
7092/** Opcode 0x0f 0xd6. */
7093FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq); /** @todo Win10 w/o np may need this: 66 0f d6 0a */
7094
7095
7096/** Opcode 0x0f 0xd7. */
7097FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7098{
7099 /* Docs says register only. */
7100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7102 return IEMOP_RAISE_INVALID_OPCODE();
7103
7104 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7105 /** @todo testcase: Check that the instruction implicitly clears the high
7106 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7107 * and opcode modifications are made to work with the whole width (not
7108 * just 128). */
7109 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7110 {
7111 case IEM_OP_PRF_SIZE_OP: /* SSE */
7112 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7113 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7114 IEM_MC_BEGIN(2, 0);
7115 IEM_MC_ARG(uint64_t *, pDst, 0);
7116 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7117 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7118 IEM_MC_PREPARE_SSE_USAGE();
7119 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7120 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7121 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7122 IEM_MC_ADVANCE_RIP();
7123 IEM_MC_END();
7124 return VINF_SUCCESS;
7125
7126 case 0: /* MMX */
7127 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7128 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7129 IEM_MC_BEGIN(2, 0);
7130 IEM_MC_ARG(uint64_t *, pDst, 0);
7131 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7132 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7133 IEM_MC_PREPARE_FPU_USAGE();
7134 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7135 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7136 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7137 IEM_MC_ADVANCE_RIP();
7138 IEM_MC_END();
7139 return VINF_SUCCESS;
7140
7141 default:
7142 return IEMOP_RAISE_INVALID_OPCODE();
7143 }
7144}
7145
7146
7147/** Opcode 0x0f 0xd8. */
7148FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
7149/** Opcode 0x0f 0xd9. */
7150FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
7151/** Opcode 0x0f 0xda. */
7152FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
7153/** Opcode 0x0f 0xdb. */
7154FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
7155/** Opcode 0x0f 0xdc. */
7156FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
7157/** Opcode 0x0f 0xdd. */
7158FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
7159/** Opcode 0x0f 0xde. */
7160FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
7161/** Opcode 0x0f 0xdf. */
7162FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
7163/** Opcode 0x0f 0xe0. */
7164FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
7165/** Opcode 0x0f 0xe1. */
7166FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
7167/** Opcode 0x0f 0xe2. */
7168FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
7169/** Opcode 0x0f 0xe3. */
7170FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
7171/** Opcode 0x0f 0xe4. */
7172FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
7173/** Opcode 0x0f 0xe5. */
7174FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
7175/** Opcode 0x0f 0xe6. */
7176FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
7177
7178
7179/** Opcode 0x0f 0xe7. */
7180FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7181{
7182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7183 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7184 {
7185 /*
7186 * Register, memory.
7187 */
7188/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7189 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7190 {
7191
7192 case IEM_OP_PRF_SIZE_OP: /* SSE */
7193 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7194 IEM_MC_BEGIN(0, 2);
7195 IEM_MC_LOCAL(uint128_t, uSrc);
7196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7197
7198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7200 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7201 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7202
7203 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7204 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7205
7206 IEM_MC_ADVANCE_RIP();
7207 IEM_MC_END();
7208 break;
7209
7210 case 0: /* MMX */
7211 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7212 IEM_MC_BEGIN(0, 2);
7213 IEM_MC_LOCAL(uint64_t, uSrc);
7214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7215
7216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7218 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7219 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7220
7221 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7222 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7223
7224 IEM_MC_ADVANCE_RIP();
7225 IEM_MC_END();
7226 break;
7227
7228 default:
7229 return IEMOP_RAISE_INVALID_OPCODE();
7230 }
7231 }
7232 /* The register, register encoding is invalid. */
7233 else
7234 return IEMOP_RAISE_INVALID_OPCODE();
7235 return VINF_SUCCESS;
7236}
7237
7238
7239/** Opcode 0x0f 0xe8. */
7240FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
7241/** Opcode 0x0f 0xe9. */
7242FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
7243/** Opcode 0x0f 0xea. */
7244FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
7245/** Opcode 0x0f 0xeb. */
7246FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
7247/** Opcode 0x0f 0xec. */
7248FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
7249/** Opcode 0x0f 0xed. */
7250FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
7251/** Opcode 0x0f 0xee. */
7252FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
7253
7254
7255/** Opcode 0x0f 0xef. */
7256FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7257{
7258 IEMOP_MNEMONIC(pxor, "pxor");
7259 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7260}
7261
7262
7263/** Opcode 0x0f 0xf0. */
7264FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7265/** Opcode 0x0f 0xf1. */
7266FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7267/** Opcode 0x0f 0xf2. */
7268FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7269/** Opcode 0x0f 0xf3. */
7270FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7271/** Opcode 0x0f 0xf4. */
7272FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7273/** Opcode 0x0f 0xf5. */
7274FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7275/** Opcode 0x0f 0xf6. */
7276FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7277/** Opcode 0x0f 0xf7. */
7278FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7279/** Opcode 0x0f 0xf8. */
7280FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7281/** Opcode 0x0f 0xf9. */
7282FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7283/** Opcode 0x0f 0xfa. */
7284FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7285/** Opcode 0x0f 0xfb. */
7286FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7287/** Opcode 0x0f 0xfc. */
7288FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7289/** Opcode 0x0f 0xfd. */
7290FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7291/** Opcode 0x0f 0xfe. */
7292FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7293
7294
7295IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[256] =
7296{
7297 /* 0x00 */ iemOp_Grp6,
7298 /* 0x01 */ iemOp_Grp7,
7299 /* 0x02 */ iemOp_lar_Gv_Ew,
7300 /* 0x03 */ iemOp_lsl_Gv_Ew,
7301 /* 0x04 */ iemOp_Invalid,
7302 /* 0x05 */ iemOp_syscall,
7303 /* 0x06 */ iemOp_clts,
7304 /* 0x07 */ iemOp_sysret,
7305 /* 0x08 */ iemOp_invd,
7306 /* 0x09 */ iemOp_wbinvd,
7307 /* 0x0a */ iemOp_Invalid,
7308 /* 0x0b */ iemOp_ud2,
7309 /* 0x0c */ iemOp_Invalid,
7310 /* 0x0d */ iemOp_nop_Ev_GrpP,
7311 /* 0x0e */ iemOp_femms,
7312 /* 0x0f */ iemOp_3Dnow,
7313 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7314 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7315 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7316 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7317 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7318 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7319 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7320 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7321 /* 0x18 */ iemOp_prefetch_Grp16,
7322 /* 0x19 */ iemOp_nop_Ev,
7323 /* 0x1a */ iemOp_nop_Ev,
7324 /* 0x1b */ iemOp_nop_Ev,
7325 /* 0x1c */ iemOp_nop_Ev,
7326 /* 0x1d */ iemOp_nop_Ev,
7327 /* 0x1e */ iemOp_nop_Ev,
7328 /* 0x1f */ iemOp_nop_Ev,
7329 /* 0x20 */ iemOp_mov_Rd_Cd,
7330 /* 0x21 */ iemOp_mov_Rd_Dd,
7331 /* 0x22 */ iemOp_mov_Cd_Rd,
7332 /* 0x23 */ iemOp_mov_Dd_Rd,
7333 /* 0x24 */ iemOp_mov_Rd_Td,
7334 /* 0x25 */ iemOp_Invalid,
7335 /* 0x26 */ iemOp_mov_Td_Rd,
7336 /* 0x27 */ iemOp_Invalid,
7337 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7338 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7339 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7340 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7341 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7342 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7343 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7344 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7345 /* 0x30 */ iemOp_wrmsr,
7346 /* 0x31 */ iemOp_rdtsc,
7347 /* 0x32 */ iemOp_rdmsr,
7348 /* 0x33 */ iemOp_rdpmc,
7349 /* 0x34 */ iemOp_sysenter,
7350 /* 0x35 */ iemOp_sysexit,
7351 /* 0x36 */ iemOp_Invalid,
7352 /* 0x37 */ iemOp_getsec,
7353 /* 0x38 */ iemOp_3byte_Esc_A4,
7354 /* 0x39 */ iemOp_Invalid,
7355 /* 0x3a */ iemOp_3byte_Esc_A5,
7356 /* 0x3b */ iemOp_Invalid,
7357 /* 0x3c */ iemOp_Invalid,
7358 /* 0x3d */ iemOp_Invalid,
7359 /* 0x3e */ iemOp_Invalid,
7360 /* 0x3f */ iemOp_Invalid,
7361 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7362 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7363 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7364 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7365 /* 0x44 */ iemOp_cmove_Gv_Ev,
7366 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7367 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7368 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7369 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7370 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7371 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7372 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7373 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7374 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7375 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7376 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7377 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7378 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7379 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7380 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7381 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7382 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7383 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7384 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7385 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7386 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7387 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7388 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7389 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7390 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7391 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7392 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7393 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7394 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7395 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7396 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7397 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7398 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7399 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7400 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7401 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7402 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7403 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7404 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7405 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7406 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7407 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7408 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7409 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7410 /* 0x71 */ iemOp_Grp12,
7411 /* 0x72 */ iemOp_Grp13,
7412 /* 0x73 */ iemOp_Grp14,
7413 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7414 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7415 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7416 /* 0x77 */ iemOp_emms,
7417 /* 0x78 */ iemOp_vmread_AmdGrp17,
7418 /* 0x79 */ iemOp_vmwrite,
7419 /* 0x7a */ iemOp_Invalid,
7420 /* 0x7b */ iemOp_Invalid,
7421 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7422 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7423 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7424 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7425 /* 0x80 */ iemOp_jo_Jv,
7426 /* 0x81 */ iemOp_jno_Jv,
7427 /* 0x82 */ iemOp_jc_Jv,
7428 /* 0x83 */ iemOp_jnc_Jv,
7429 /* 0x84 */ iemOp_je_Jv,
7430 /* 0x85 */ iemOp_jne_Jv,
7431 /* 0x86 */ iemOp_jbe_Jv,
7432 /* 0x87 */ iemOp_jnbe_Jv,
7433 /* 0x88 */ iemOp_js_Jv,
7434 /* 0x89 */ iemOp_jns_Jv,
7435 /* 0x8a */ iemOp_jp_Jv,
7436 /* 0x8b */ iemOp_jnp_Jv,
7437 /* 0x8c */ iemOp_jl_Jv,
7438 /* 0x8d */ iemOp_jnl_Jv,
7439 /* 0x8e */ iemOp_jle_Jv,
7440 /* 0x8f */ iemOp_jnle_Jv,
7441 /* 0x90 */ iemOp_seto_Eb,
7442 /* 0x91 */ iemOp_setno_Eb,
7443 /* 0x92 */ iemOp_setc_Eb,
7444 /* 0x93 */ iemOp_setnc_Eb,
7445 /* 0x94 */ iemOp_sete_Eb,
7446 /* 0x95 */ iemOp_setne_Eb,
7447 /* 0x96 */ iemOp_setbe_Eb,
7448 /* 0x97 */ iemOp_setnbe_Eb,
7449 /* 0x98 */ iemOp_sets_Eb,
7450 /* 0x99 */ iemOp_setns_Eb,
7451 /* 0x9a */ iemOp_setp_Eb,
7452 /* 0x9b */ iemOp_setnp_Eb,
7453 /* 0x9c */ iemOp_setl_Eb,
7454 /* 0x9d */ iemOp_setnl_Eb,
7455 /* 0x9e */ iemOp_setle_Eb,
7456 /* 0x9f */ iemOp_setnle_Eb,
7457 /* 0xa0 */ iemOp_push_fs,
7458 /* 0xa1 */ iemOp_pop_fs,
7459 /* 0xa2 */ iemOp_cpuid,
7460 /* 0xa3 */ iemOp_bt_Ev_Gv,
7461 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7462 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7463 /* 0xa6 */ iemOp_Invalid,
7464 /* 0xa7 */ iemOp_Invalid,
7465 /* 0xa8 */ iemOp_push_gs,
7466 /* 0xa9 */ iemOp_pop_gs,
7467 /* 0xaa */ iemOp_rsm,
7468 /* 0xab */ iemOp_bts_Ev_Gv,
7469 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7470 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7471 /* 0xae */ iemOp_Grp15,
7472 /* 0xaf */ iemOp_imul_Gv_Ev,
7473 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7474 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7475 /* 0xb2 */ iemOp_lss_Gv_Mp,
7476 /* 0xb3 */ iemOp_btr_Ev_Gv,
7477 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7478 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7479 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7480 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7481 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7482 /* 0xb9 */ iemOp_Grp10,
7483 /* 0xba */ iemOp_Grp8,
7484 /* 0xbb */ iemOp_btc_Ev_Gv,
7485 /* 0xbc */ iemOp_bsf_Gv_Ev,
7486 /* 0xbd */ iemOp_bsr_Gv_Ev,
7487 /* 0xbe */ iemOp_movsx_Gv_Eb,
7488 /* 0xbf */ iemOp_movsx_Gv_Ew,
7489 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7490 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7491 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7492 /* 0xc3 */ iemOp_movnti_My_Gy,
7493 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7494 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7495 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7496 /* 0xc7 */ iemOp_Grp9,
7497 /* 0xc8 */ iemOp_bswap_rAX_r8,
7498 /* 0xc9 */ iemOp_bswap_rCX_r9,
7499 /* 0xca */ iemOp_bswap_rDX_r10,
7500 /* 0xcb */ iemOp_bswap_rBX_r11,
7501 /* 0xcc */ iemOp_bswap_rSP_r12,
7502 /* 0xcd */ iemOp_bswap_rBP_r13,
7503 /* 0xce */ iemOp_bswap_rSI_r14,
7504 /* 0xcf */ iemOp_bswap_rDI_r15,
7505 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7506 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7507 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7508 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7509 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7510 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7511 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7512 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7513 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7514 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7515 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7516 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7517 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7518 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7519 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7520 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7521 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7522 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7523 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7524 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7525 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7526 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7527 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7528 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7529 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7530 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7531 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7532 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7533 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7534 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7535 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7536 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7537 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7538 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7539 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7540 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7541 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7542 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7543 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7544 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7545 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7546 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7547 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7548 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7549 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7550 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7551 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7552 /* 0xff */ iemOp_Invalid
7553};
7554
7555/** @} */
7556
7557
7558/** @name One byte opcodes.
7559 *
7560 * @{
7561 */
7562
7563/** Opcode 0x00. */
7564FNIEMOP_DEF(iemOp_add_Eb_Gb)
7565{
7566 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
7567 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7568}
7569
7570
7571/** Opcode 0x01. */
7572FNIEMOP_DEF(iemOp_add_Ev_Gv)
7573{
7574 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
7575 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7576}
7577
7578
7579/** Opcode 0x02. */
7580FNIEMOP_DEF(iemOp_add_Gb_Eb)
7581{
7582 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
7583 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7584}
7585
7586
7587/** Opcode 0x03. */
7588FNIEMOP_DEF(iemOp_add_Gv_Ev)
7589{
7590 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
7591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7592}
7593
7594
7595/** Opcode 0x04. */
7596FNIEMOP_DEF(iemOp_add_Al_Ib)
7597{
7598 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
7599 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7600}
7601
7602
7603/** Opcode 0x05. */
7604FNIEMOP_DEF(iemOp_add_eAX_Iz)
7605{
7606 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
7607 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7608}
7609
7610
7611/** Opcode 0x06. */
7612FNIEMOP_DEF(iemOp_push_ES)
7613{
7614 IEMOP_MNEMONIC(push_es, "push es");
7615 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7616}
7617
7618
7619/** Opcode 0x07. */
7620FNIEMOP_DEF(iemOp_pop_ES)
7621{
7622 IEMOP_MNEMONIC(pop_es, "pop es");
7623 IEMOP_HLP_NO_64BIT();
7624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7625 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
7626}
7627
7628
7629/** Opcode 0x08. */
7630FNIEMOP_DEF(iemOp_or_Eb_Gb)
7631{
7632 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
7633 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7634 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7635}
7636
7637
7638/** Opcode 0x09. */
7639FNIEMOP_DEF(iemOp_or_Ev_Gv)
7640{
7641 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
7642 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7643 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7644}
7645
7646
7647/** Opcode 0x0a. */
7648FNIEMOP_DEF(iemOp_or_Gb_Eb)
7649{
7650 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
7651 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7652 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7653}
7654
7655
7656/** Opcode 0x0b. */
7657FNIEMOP_DEF(iemOp_or_Gv_Ev)
7658{
7659 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
7660 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7661 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7662}
7663
7664
7665/** Opcode 0x0c. */
7666FNIEMOP_DEF(iemOp_or_Al_Ib)
7667{
7668 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
7669 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7670 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7671}
7672
7673
7674/** Opcode 0x0d. */
7675FNIEMOP_DEF(iemOp_or_eAX_Iz)
7676{
7677 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
7678 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7679 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7680}
7681
7682
7683/** Opcode 0x0e. */
7684FNIEMOP_DEF(iemOp_push_CS)
7685{
7686 IEMOP_MNEMONIC(push_cs, "push cs");
7687 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7688}
7689
7690
7691/** Opcode 0x0f. */
7692FNIEMOP_DEF(iemOp_2byteEscape)
7693{
7694 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7695 /** @todo PUSH CS on 8086, undefined on 80186. */
7696 IEMOP_HLP_MIN_286();
7697 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7698}
7699
7700/** Opcode 0x10. */
7701FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7702{
7703 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
7704 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7705}
7706
7707
7708/** Opcode 0x11. */
7709FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7710{
7711 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
7712 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7713}
7714
7715
7716/** Opcode 0x12. */
7717FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7718{
7719 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
7720 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7721}
7722
7723
7724/** Opcode 0x13. */
7725FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7726{
7727 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
7728 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7729}
7730
7731
7732/** Opcode 0x14. */
7733FNIEMOP_DEF(iemOp_adc_Al_Ib)
7734{
7735 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
7736 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7737}
7738
7739
7740/** Opcode 0x15. */
7741FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7742{
7743 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
7744 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7745}
7746
7747
7748/** Opcode 0x16. */
7749FNIEMOP_DEF(iemOp_push_SS)
7750{
7751 IEMOP_MNEMONIC(push_ss, "push ss");
7752 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7753}
7754
7755
7756/** Opcode 0x17. */
7757FNIEMOP_DEF(iemOp_pop_SS)
7758{
7759 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
7760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7761 IEMOP_HLP_NO_64BIT();
7762 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
7763}
7764
7765
7766/** Opcode 0x18. */
7767FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7768{
7769 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
7770 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7771}
7772
7773
7774/** Opcode 0x19. */
7775FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7776{
7777 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
7778 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7779}
7780
7781
7782/** Opcode 0x1a. */
7783FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7784{
7785 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
7786 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7787}
7788
7789
7790/** Opcode 0x1b. */
7791FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7792{
7793 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
7794 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7795}
7796
7797
7798/** Opcode 0x1c. */
7799FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7800{
7801 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
7802 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7803}
7804
7805
7806/** Opcode 0x1d. */
7807FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7808{
7809 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
7810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7811}
7812
7813
7814/** Opcode 0x1e. */
7815FNIEMOP_DEF(iemOp_push_DS)
7816{
7817 IEMOP_MNEMONIC(push_ds, "push ds");
7818 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7819}
7820
7821
7822/** Opcode 0x1f. */
7823FNIEMOP_DEF(iemOp_pop_DS)
7824{
7825 IEMOP_MNEMONIC(pop_ds, "pop ds");
7826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7827 IEMOP_HLP_NO_64BIT();
7828 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
7829}
7830
7831
7832/** Opcode 0x20. */
7833FNIEMOP_DEF(iemOp_and_Eb_Gb)
7834{
7835 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
7836 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7837 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7838}
7839
7840
7841/** Opcode 0x21. */
7842FNIEMOP_DEF(iemOp_and_Ev_Gv)
7843{
7844 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
7845 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7846 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7847}
7848
7849
7850/** Opcode 0x22. */
7851FNIEMOP_DEF(iemOp_and_Gb_Eb)
7852{
7853 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
7854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7856}
7857
7858
7859/** Opcode 0x23. */
7860FNIEMOP_DEF(iemOp_and_Gv_Ev)
7861{
7862 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
7863 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7864 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7865}
7866
7867
7868/** Opcode 0x24. */
7869FNIEMOP_DEF(iemOp_and_Al_Ib)
7870{
7871 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
7872 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7873 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7874}
7875
7876
7877/** Opcode 0x25. */
7878FNIEMOP_DEF(iemOp_and_eAX_Iz)
7879{
7880 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
7881 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7882 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7883}
7884
7885
7886/** Opcode 0x26. */
7887FNIEMOP_DEF(iemOp_seg_ES)
7888{
7889 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7890 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
7891 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
7892
7893 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7894 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7895}
7896
7897
7898/** Opcode 0x27. */
7899FNIEMOP_DEF(iemOp_daa)
7900{
7901 IEMOP_MNEMONIC(daa_AL, "daa AL");
7902 IEMOP_HLP_NO_64BIT();
7903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7904 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7905 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7906}
7907
7908
7909/** Opcode 0x28. */
7910FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7911{
7912 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
7913 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7914}
7915
7916
7917/** Opcode 0x29. */
7918FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7919{
7920 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
7921 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7922}
7923
7924
7925/** Opcode 0x2a. */
7926FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7927{
7928 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
7929 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7930}
7931
7932
7933/** Opcode 0x2b. */
7934FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7935{
7936 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
7937 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7938}
7939
7940
7941/** Opcode 0x2c. */
7942FNIEMOP_DEF(iemOp_sub_Al_Ib)
7943{
7944 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
7945 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7946}
7947
7948
7949/** Opcode 0x2d. */
7950FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7951{
7952 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
7953 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7954}
7955
7956
7957/** Opcode 0x2e. */
7958FNIEMOP_DEF(iemOp_seg_CS)
7959{
7960 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7961 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
7962 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
7963
7964 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7965 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7966}
7967
7968
7969/** Opcode 0x2f. */
7970FNIEMOP_DEF(iemOp_das)
7971{
7972 IEMOP_MNEMONIC(das_AL, "das AL");
7973 IEMOP_HLP_NO_64BIT();
7974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7975 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7976 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7977}
7978
7979
7980/** Opcode 0x30. */
7981FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7982{
7983 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
7984 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7985 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7986}
7987
7988
7989/** Opcode 0x31. */
7990FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7991{
7992 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
7993 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7995}
7996
7997
7998/** Opcode 0x32. */
7999FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8000{
8001 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8002 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8003 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8004}
8005
8006
8007/** Opcode 0x33. */
8008FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8009{
8010 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8011 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8012 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8013}
8014
8015
8016/** Opcode 0x34. */
8017FNIEMOP_DEF(iemOp_xor_Al_Ib)
8018{
8019 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8020 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8021 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8022}
8023
8024
8025/** Opcode 0x35. */
8026FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8027{
8028 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8029 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8030 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8031}
8032
8033
8034/** Opcode 0x36. */
8035FNIEMOP_DEF(iemOp_seg_SS)
8036{
8037 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8038 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8039 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8040
8041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8043}
8044
8045
8046/** Opcode 0x37. */
8047FNIEMOP_STUB(iemOp_aaa);
8048
8049
8050/** Opcode 0x38. */
8051FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8052{
8053 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8054 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8055}
8056
8057
8058/** Opcode 0x39. */
8059FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8060{
8061 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8062 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8063}
8064
8065
8066/** Opcode 0x3a. */
8067FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8068{
8069 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8070 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8071}
8072
8073
8074/** Opcode 0x3b. */
8075FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8076{
8077 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8078 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8079}
8080
8081
8082/** Opcode 0x3c. */
8083FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8084{
8085 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8086 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8087}
8088
8089
8090/** Opcode 0x3d. */
8091FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8092{
8093 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8094 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8095}
8096
8097
8098/** Opcode 0x3e. */
8099FNIEMOP_DEF(iemOp_seg_DS)
8100{
8101 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8102 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8103 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8104
8105 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8106 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8107}
8108
8109
8110/** Opcode 0x3f. */
8111FNIEMOP_STUB(iemOp_aas);
8112
8113/**
8114 * Common 'inc/dec/not/neg register' helper.
8115 */
8116FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8117{
8118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8119 switch (pVCpu->iem.s.enmEffOpSize)
8120 {
8121 case IEMMODE_16BIT:
8122 IEM_MC_BEGIN(2, 0);
8123 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8124 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8125 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8126 IEM_MC_REF_EFLAGS(pEFlags);
8127 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8128 IEM_MC_ADVANCE_RIP();
8129 IEM_MC_END();
8130 return VINF_SUCCESS;
8131
8132 case IEMMODE_32BIT:
8133 IEM_MC_BEGIN(2, 0);
8134 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8135 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8136 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8137 IEM_MC_REF_EFLAGS(pEFlags);
8138 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8139 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8140 IEM_MC_ADVANCE_RIP();
8141 IEM_MC_END();
8142 return VINF_SUCCESS;
8143
8144 case IEMMODE_64BIT:
8145 IEM_MC_BEGIN(2, 0);
8146 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8147 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8148 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8149 IEM_MC_REF_EFLAGS(pEFlags);
8150 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8151 IEM_MC_ADVANCE_RIP();
8152 IEM_MC_END();
8153 return VINF_SUCCESS;
8154 }
8155 return VINF_SUCCESS;
8156}
8157
8158
8159/** Opcode 0x40. */
8160FNIEMOP_DEF(iemOp_inc_eAX)
8161{
8162 /*
8163 * This is a REX prefix in 64-bit mode.
8164 */
8165 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8166 {
8167 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8168 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8169
8170 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8171 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8172 }
8173
8174 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8175 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8176}
8177
8178
8179/** Opcode 0x41. */
8180FNIEMOP_DEF(iemOp_inc_eCX)
8181{
8182 /*
8183 * This is a REX prefix in 64-bit mode.
8184 */
8185 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8186 {
8187 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8188 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8189 pVCpu->iem.s.uRexB = 1 << 3;
8190
8191 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8192 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8193 }
8194
8195 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8196 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8197}
8198
8199
8200/** Opcode 0x42. */
8201FNIEMOP_DEF(iemOp_inc_eDX)
8202{
8203 /*
8204 * This is a REX prefix in 64-bit mode.
8205 */
8206 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8207 {
8208 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8209 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8210 pVCpu->iem.s.uRexIndex = 1 << 3;
8211
8212 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8213 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8214 }
8215
8216 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8217 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8218}
8219
8220
8221
8222/** Opcode 0x43. */
8223FNIEMOP_DEF(iemOp_inc_eBX)
8224{
8225 /*
8226 * This is a REX prefix in 64-bit mode.
8227 */
8228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8229 {
8230 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8231 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8232 pVCpu->iem.s.uRexB = 1 << 3;
8233 pVCpu->iem.s.uRexIndex = 1 << 3;
8234
8235 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8236 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8237 }
8238
8239 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8240 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8241}
8242
8243
8244/** Opcode 0x44. */
8245FNIEMOP_DEF(iemOp_inc_eSP)
8246{
8247 /*
8248 * This is a REX prefix in 64-bit mode.
8249 */
8250 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8251 {
8252 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8253 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8254 pVCpu->iem.s.uRexReg = 1 << 3;
8255
8256 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8257 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8258 }
8259
8260 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8261 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8262}
8263
8264
8265/** Opcode 0x45. */
8266FNIEMOP_DEF(iemOp_inc_eBP)
8267{
8268 /*
8269 * This is a REX prefix in 64-bit mode.
8270 */
8271 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8272 {
8273 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8274 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8275 pVCpu->iem.s.uRexReg = 1 << 3;
8276 pVCpu->iem.s.uRexB = 1 << 3;
8277
8278 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8279 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8280 }
8281
8282 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8283 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8284}
8285
8286
8287/** Opcode 0x46. */
8288FNIEMOP_DEF(iemOp_inc_eSI)
8289{
8290 /*
8291 * This is a REX prefix in 64-bit mode.
8292 */
8293 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8294 {
8295 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8296 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8297 pVCpu->iem.s.uRexReg = 1 << 3;
8298 pVCpu->iem.s.uRexIndex = 1 << 3;
8299
8300 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8301 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8302 }
8303
8304 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8305 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8306}
8307
8308
8309/** Opcode 0x47. */
8310FNIEMOP_DEF(iemOp_inc_eDI)
8311{
8312 /*
8313 * This is a REX prefix in 64-bit mode.
8314 */
8315 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8316 {
8317 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8318 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8319 pVCpu->iem.s.uRexReg = 1 << 3;
8320 pVCpu->iem.s.uRexB = 1 << 3;
8321 pVCpu->iem.s.uRexIndex = 1 << 3;
8322
8323 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8324 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8325 }
8326
8327 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
8328 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8329}
8330
8331
8332/** Opcode 0x48. */
8333FNIEMOP_DEF(iemOp_dec_eAX)
8334{
8335 /*
8336 * This is a REX prefix in 64-bit mode.
8337 */
8338 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8339 {
8340 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8341 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8342 iemRecalEffOpSize(pVCpu);
8343
8344 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8345 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8346 }
8347
8348 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
8349 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8350}
8351
8352
8353/** Opcode 0x49. */
8354FNIEMOP_DEF(iemOp_dec_eCX)
8355{
8356 /*
8357 * This is a REX prefix in 64-bit mode.
8358 */
8359 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8360 {
8361 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8362 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8363 pVCpu->iem.s.uRexB = 1 << 3;
8364 iemRecalEffOpSize(pVCpu);
8365
8366 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8367 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8368 }
8369
8370 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
8371 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8372}
8373
8374
8375/** Opcode 0x4a. */
8376FNIEMOP_DEF(iemOp_dec_eDX)
8377{
8378 /*
8379 * This is a REX prefix in 64-bit mode.
8380 */
8381 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8382 {
8383 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8384 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8385 pVCpu->iem.s.uRexIndex = 1 << 3;
8386 iemRecalEffOpSize(pVCpu);
8387
8388 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8389 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8390 }
8391
8392 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
8393 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8394}
8395
8396
8397/** Opcode 0x4b. */
8398FNIEMOP_DEF(iemOp_dec_eBX)
8399{
8400 /*
8401 * This is a REX prefix in 64-bit mode.
8402 */
8403 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8404 {
8405 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8406 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8407 pVCpu->iem.s.uRexB = 1 << 3;
8408 pVCpu->iem.s.uRexIndex = 1 << 3;
8409 iemRecalEffOpSize(pVCpu);
8410
8411 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8412 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8413 }
8414
8415 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
8416 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8417}
8418
8419
8420/** Opcode 0x4c. */
8421FNIEMOP_DEF(iemOp_dec_eSP)
8422{
8423 /*
8424 * This is a REX prefix in 64-bit mode.
8425 */
8426 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8427 {
8428 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8429 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8430 pVCpu->iem.s.uRexReg = 1 << 3;
8431 iemRecalEffOpSize(pVCpu);
8432
8433 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8434 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8435 }
8436
8437 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
8438 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8439}
8440
8441
8442/** Opcode 0x4d. */
8443FNIEMOP_DEF(iemOp_dec_eBP)
8444{
8445 /*
8446 * This is a REX prefix in 64-bit mode.
8447 */
8448 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8449 {
8450 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8451 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8452 pVCpu->iem.s.uRexReg = 1 << 3;
8453 pVCpu->iem.s.uRexB = 1 << 3;
8454 iemRecalEffOpSize(pVCpu);
8455
8456 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8457 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8458 }
8459
8460 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
8461 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8462}
8463
8464
8465/** Opcode 0x4e. */
8466FNIEMOP_DEF(iemOp_dec_eSI)
8467{
8468 /*
8469 * This is a REX prefix in 64-bit mode.
8470 */
8471 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8472 {
8473 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8474 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8475 pVCpu->iem.s.uRexReg = 1 << 3;
8476 pVCpu->iem.s.uRexIndex = 1 << 3;
8477 iemRecalEffOpSize(pVCpu);
8478
8479 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8480 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8481 }
8482
8483 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
8484 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8485}
8486
8487
8488/** Opcode 0x4f. */
8489FNIEMOP_DEF(iemOp_dec_eDI)
8490{
8491 /*
8492 * This is a REX prefix in 64-bit mode.
8493 */
8494 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8495 {
8496 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8497 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8498 pVCpu->iem.s.uRexReg = 1 << 3;
8499 pVCpu->iem.s.uRexB = 1 << 3;
8500 pVCpu->iem.s.uRexIndex = 1 << 3;
8501 iemRecalEffOpSize(pVCpu);
8502
8503 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8504 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8505 }
8506
8507 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
8508 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8509}
8510
8511
8512/**
8513 * Common 'push register' helper.
8514 */
8515FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8516{
8517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8518 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8519 {
8520 iReg |= pVCpu->iem.s.uRexB;
8521 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8522 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8523 }
8524
8525 switch (pVCpu->iem.s.enmEffOpSize)
8526 {
8527 case IEMMODE_16BIT:
8528 IEM_MC_BEGIN(0, 1);
8529 IEM_MC_LOCAL(uint16_t, u16Value);
8530 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8531 IEM_MC_PUSH_U16(u16Value);
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 break;
8535
8536 case IEMMODE_32BIT:
8537 IEM_MC_BEGIN(0, 1);
8538 IEM_MC_LOCAL(uint32_t, u32Value);
8539 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8540 IEM_MC_PUSH_U32(u32Value);
8541 IEM_MC_ADVANCE_RIP();
8542 IEM_MC_END();
8543 break;
8544
8545 case IEMMODE_64BIT:
8546 IEM_MC_BEGIN(0, 1);
8547 IEM_MC_LOCAL(uint64_t, u64Value);
8548 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8549 IEM_MC_PUSH_U64(u64Value);
8550 IEM_MC_ADVANCE_RIP();
8551 IEM_MC_END();
8552 break;
8553 }
8554
8555 return VINF_SUCCESS;
8556}
8557
8558
8559/** Opcode 0x50. */
8560FNIEMOP_DEF(iemOp_push_eAX)
8561{
8562 IEMOP_MNEMONIC(push_rAX, "push rAX");
8563 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8564}
8565
8566
8567/** Opcode 0x51. */
8568FNIEMOP_DEF(iemOp_push_eCX)
8569{
8570 IEMOP_MNEMONIC(push_rCX, "push rCX");
8571 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8572}
8573
8574
8575/** Opcode 0x52. */
8576FNIEMOP_DEF(iemOp_push_eDX)
8577{
8578 IEMOP_MNEMONIC(push_rDX, "push rDX");
8579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8580}
8581
8582
8583/** Opcode 0x53. */
8584FNIEMOP_DEF(iemOp_push_eBX)
8585{
8586 IEMOP_MNEMONIC(push_rBX, "push rBX");
8587 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8588}
8589
8590
8591/** Opcode 0x54. */
8592FNIEMOP_DEF(iemOp_push_eSP)
8593{
8594 IEMOP_MNEMONIC(push_rSP, "push rSP");
8595 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
8596 {
8597 IEM_MC_BEGIN(0, 1);
8598 IEM_MC_LOCAL(uint16_t, u16Value);
8599 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8600 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8601 IEM_MC_PUSH_U16(u16Value);
8602 IEM_MC_ADVANCE_RIP();
8603 IEM_MC_END();
8604 }
8605 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8606}
8607
8608
8609/** Opcode 0x55. */
8610FNIEMOP_DEF(iemOp_push_eBP)
8611{
8612 IEMOP_MNEMONIC(push_rBP, "push rBP");
8613 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8614}
8615
8616
8617/** Opcode 0x56. */
8618FNIEMOP_DEF(iemOp_push_eSI)
8619{
8620 IEMOP_MNEMONIC(push_rSI, "push rSI");
8621 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8622}
8623
8624
8625/** Opcode 0x57. */
8626FNIEMOP_DEF(iemOp_push_eDI)
8627{
8628 IEMOP_MNEMONIC(push_rDI, "push rDI");
8629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8630}
8631
8632
8633/**
8634 * Common 'pop register' helper.
8635 */
8636FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8637{
8638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8639 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8640 {
8641 iReg |= pVCpu->iem.s.uRexB;
8642 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8643 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8644 }
8645
8646 switch (pVCpu->iem.s.enmEffOpSize)
8647 {
8648 case IEMMODE_16BIT:
8649 IEM_MC_BEGIN(0, 1);
8650 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8651 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8652 IEM_MC_POP_U16(pu16Dst);
8653 IEM_MC_ADVANCE_RIP();
8654 IEM_MC_END();
8655 break;
8656
8657 case IEMMODE_32BIT:
8658 IEM_MC_BEGIN(0, 1);
8659 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8660 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8661 IEM_MC_POP_U32(pu32Dst);
8662 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8663 IEM_MC_ADVANCE_RIP();
8664 IEM_MC_END();
8665 break;
8666
8667 case IEMMODE_64BIT:
8668 IEM_MC_BEGIN(0, 1);
8669 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8670 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8671 IEM_MC_POP_U64(pu64Dst);
8672 IEM_MC_ADVANCE_RIP();
8673 IEM_MC_END();
8674 break;
8675 }
8676
8677 return VINF_SUCCESS;
8678}
8679
8680
8681/** Opcode 0x58. */
8682FNIEMOP_DEF(iemOp_pop_eAX)
8683{
8684 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
8685 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8686}
8687
8688
8689/** Opcode 0x59. */
8690FNIEMOP_DEF(iemOp_pop_eCX)
8691{
8692 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
8693 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8694}
8695
8696
8697/** Opcode 0x5a. */
8698FNIEMOP_DEF(iemOp_pop_eDX)
8699{
8700 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
8701 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8702}
8703
8704
8705/** Opcode 0x5b. */
8706FNIEMOP_DEF(iemOp_pop_eBX)
8707{
8708 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
8709 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8710}
8711
8712
8713/** Opcode 0x5c. */
8714FNIEMOP_DEF(iemOp_pop_eSP)
8715{
8716 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
8717 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8718 {
8719 if (pVCpu->iem.s.uRexB)
8720 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8721 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8722 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8723 }
8724
8725 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8726 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8727 /** @todo add testcase for this instruction. */
8728 switch (pVCpu->iem.s.enmEffOpSize)
8729 {
8730 case IEMMODE_16BIT:
8731 IEM_MC_BEGIN(0, 1);
8732 IEM_MC_LOCAL(uint16_t, u16Dst);
8733 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8734 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8735 IEM_MC_ADVANCE_RIP();
8736 IEM_MC_END();
8737 break;
8738
8739 case IEMMODE_32BIT:
8740 IEM_MC_BEGIN(0, 1);
8741 IEM_MC_LOCAL(uint32_t, u32Dst);
8742 IEM_MC_POP_U32(&u32Dst);
8743 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8744 IEM_MC_ADVANCE_RIP();
8745 IEM_MC_END();
8746 break;
8747
8748 case IEMMODE_64BIT:
8749 IEM_MC_BEGIN(0, 1);
8750 IEM_MC_LOCAL(uint64_t, u64Dst);
8751 IEM_MC_POP_U64(&u64Dst);
8752 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8753 IEM_MC_ADVANCE_RIP();
8754 IEM_MC_END();
8755 break;
8756 }
8757
8758 return VINF_SUCCESS;
8759}
8760
8761
8762/** Opcode 0x5d. */
8763FNIEMOP_DEF(iemOp_pop_eBP)
8764{
8765 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
8766 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8767}
8768
8769
8770/** Opcode 0x5e. */
8771FNIEMOP_DEF(iemOp_pop_eSI)
8772{
8773 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
8774 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8775}
8776
8777
8778/** Opcode 0x5f. */
8779FNIEMOP_DEF(iemOp_pop_eDI)
8780{
8781 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
8782 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8783}
8784
8785
8786/** Opcode 0x60. */
8787FNIEMOP_DEF(iemOp_pusha)
8788{
8789 IEMOP_MNEMONIC(pusha, "pusha");
8790 IEMOP_HLP_MIN_186();
8791 IEMOP_HLP_NO_64BIT();
8792 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8793 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8794 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8796}
8797
8798
8799/** Opcode 0x61. */
8800FNIEMOP_DEF(iemOp_popa)
8801{
8802 IEMOP_MNEMONIC(popa, "popa");
8803 IEMOP_HLP_MIN_186();
8804 IEMOP_HLP_NO_64BIT();
8805 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8806 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8807 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8808 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8809}
8810
8811
8812/** Opcode 0x62. */
8813FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8814// IEMOP_HLP_MIN_186();
8815
8816
8817/** Opcode 0x63 - non-64-bit modes. */
8818FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8819{
8820 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
8821 IEMOP_HLP_MIN_286();
8822 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8824
8825 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8826 {
8827 /* Register */
8828 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8829 IEM_MC_BEGIN(3, 0);
8830 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8831 IEM_MC_ARG(uint16_t, u16Src, 1);
8832 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8833
8834 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8835 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8836 IEM_MC_REF_EFLAGS(pEFlags);
8837 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8838
8839 IEM_MC_ADVANCE_RIP();
8840 IEM_MC_END();
8841 }
8842 else
8843 {
8844 /* Memory */
8845 IEM_MC_BEGIN(3, 2);
8846 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8847 IEM_MC_ARG(uint16_t, u16Src, 1);
8848 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8850
8851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8852 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8853 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8854 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8855 IEM_MC_FETCH_EFLAGS(EFlags);
8856 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8857
8858 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8859 IEM_MC_COMMIT_EFLAGS(EFlags);
8860 IEM_MC_ADVANCE_RIP();
8861 IEM_MC_END();
8862 }
8863 return VINF_SUCCESS;
8864
8865}
8866
8867
8868/** Opcode 0x63.
8869 * @note This is a weird one. It works like a regular move instruction if
8870 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8871 * @todo This definitely needs a testcase to verify the odd cases. */
8872FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8873{
8874 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8875
8876 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
8877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8878
8879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8880 {
8881 /*
8882 * Register to register.
8883 */
8884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8885 IEM_MC_BEGIN(0, 1);
8886 IEM_MC_LOCAL(uint64_t, u64Value);
8887 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8888 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8889 IEM_MC_ADVANCE_RIP();
8890 IEM_MC_END();
8891 }
8892 else
8893 {
8894 /*
8895 * We're loading a register from memory.
8896 */
8897 IEM_MC_BEGIN(0, 2);
8898 IEM_MC_LOCAL(uint64_t, u64Value);
8899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8902 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8903 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8904 IEM_MC_ADVANCE_RIP();
8905 IEM_MC_END();
8906 }
8907 return VINF_SUCCESS;
8908}
8909
8910
8911/** Opcode 0x64. */
8912FNIEMOP_DEF(iemOp_seg_FS)
8913{
8914 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8915 IEMOP_HLP_MIN_386();
8916
8917 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
8918 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
8919
8920 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8921 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8922}
8923
8924
8925/** Opcode 0x65. */
8926FNIEMOP_DEF(iemOp_seg_GS)
8927{
8928 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8929 IEMOP_HLP_MIN_386();
8930
8931 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
8932 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
8933
8934 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8935 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8936}
8937
8938
8939/** Opcode 0x66. */
8940FNIEMOP_DEF(iemOp_op_size)
8941{
8942 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8943 IEMOP_HLP_MIN_386();
8944
8945 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
8946 iemRecalEffOpSize(pVCpu);
8947
8948 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8949 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8950}
8951
8952
8953/** Opcode 0x67. */
8954FNIEMOP_DEF(iemOp_addr_size)
8955{
8956 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8957 IEMOP_HLP_MIN_386();
8958
8959 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8960 switch (pVCpu->iem.s.enmDefAddrMode)
8961 {
8962 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8963 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
8964 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8965 default: AssertFailed();
8966 }
8967
8968 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8969 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8970}
8971
8972
8973/** Opcode 0x68. */
8974FNIEMOP_DEF(iemOp_push_Iz)
8975{
8976 IEMOP_MNEMONIC(push_Iz, "push Iz");
8977 IEMOP_HLP_MIN_186();
8978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8979 switch (pVCpu->iem.s.enmEffOpSize)
8980 {
8981 case IEMMODE_16BIT:
8982 {
8983 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8985 IEM_MC_BEGIN(0,0);
8986 IEM_MC_PUSH_U16(u16Imm);
8987 IEM_MC_ADVANCE_RIP();
8988 IEM_MC_END();
8989 return VINF_SUCCESS;
8990 }
8991
8992 case IEMMODE_32BIT:
8993 {
8994 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8996 IEM_MC_BEGIN(0,0);
8997 IEM_MC_PUSH_U32(u32Imm);
8998 IEM_MC_ADVANCE_RIP();
8999 IEM_MC_END();
9000 return VINF_SUCCESS;
9001 }
9002
9003 case IEMMODE_64BIT:
9004 {
9005 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9007 IEM_MC_BEGIN(0,0);
9008 IEM_MC_PUSH_U64(u64Imm);
9009 IEM_MC_ADVANCE_RIP();
9010 IEM_MC_END();
9011 return VINF_SUCCESS;
9012 }
9013
9014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9015 }
9016}
9017
9018
9019/** Opcode 0x69. */
9020FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9021{
9022 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9023 IEMOP_HLP_MIN_186();
9024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9025 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9026
9027 switch (pVCpu->iem.s.enmEffOpSize)
9028 {
9029 case IEMMODE_16BIT:
9030 {
9031 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9032 {
9033 /* register operand */
9034 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9036
9037 IEM_MC_BEGIN(3, 1);
9038 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9039 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9040 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9041 IEM_MC_LOCAL(uint16_t, u16Tmp);
9042
9043 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9044 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9045 IEM_MC_REF_EFLAGS(pEFlags);
9046 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9047 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9048
9049 IEM_MC_ADVANCE_RIP();
9050 IEM_MC_END();
9051 }
9052 else
9053 {
9054 /* memory operand */
9055 IEM_MC_BEGIN(3, 2);
9056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9057 IEM_MC_ARG(uint16_t, u16Src, 1);
9058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9059 IEM_MC_LOCAL(uint16_t, u16Tmp);
9060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9061
9062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9063 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9064 IEM_MC_ASSIGN(u16Src, u16Imm);
9065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9066 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9067 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9068 IEM_MC_REF_EFLAGS(pEFlags);
9069 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9070 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9071
9072 IEM_MC_ADVANCE_RIP();
9073 IEM_MC_END();
9074 }
9075 return VINF_SUCCESS;
9076 }
9077
9078 case IEMMODE_32BIT:
9079 {
9080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9081 {
9082 /* register operand */
9083 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9085
9086 IEM_MC_BEGIN(3, 1);
9087 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9088 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9090 IEM_MC_LOCAL(uint32_t, u32Tmp);
9091
9092 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9093 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9094 IEM_MC_REF_EFLAGS(pEFlags);
9095 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9096 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9097
9098 IEM_MC_ADVANCE_RIP();
9099 IEM_MC_END();
9100 }
9101 else
9102 {
9103 /* memory operand */
9104 IEM_MC_BEGIN(3, 2);
9105 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9106 IEM_MC_ARG(uint32_t, u32Src, 1);
9107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9108 IEM_MC_LOCAL(uint32_t, u32Tmp);
9109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9110
9111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9112 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9113 IEM_MC_ASSIGN(u32Src, u32Imm);
9114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9115 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9116 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9117 IEM_MC_REF_EFLAGS(pEFlags);
9118 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9119 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9120
9121 IEM_MC_ADVANCE_RIP();
9122 IEM_MC_END();
9123 }
9124 return VINF_SUCCESS;
9125 }
9126
9127 case IEMMODE_64BIT:
9128 {
9129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9130 {
9131 /* register operand */
9132 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9134
9135 IEM_MC_BEGIN(3, 1);
9136 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9137 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9138 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9139 IEM_MC_LOCAL(uint64_t, u64Tmp);
9140
9141 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9142 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9143 IEM_MC_REF_EFLAGS(pEFlags);
9144 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9145 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9146
9147 IEM_MC_ADVANCE_RIP();
9148 IEM_MC_END();
9149 }
9150 else
9151 {
9152 /* memory operand */
9153 IEM_MC_BEGIN(3, 2);
9154 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9155 IEM_MC_ARG(uint64_t, u64Src, 1);
9156 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9157 IEM_MC_LOCAL(uint64_t, u64Tmp);
9158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9159
9160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9161 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9162 IEM_MC_ASSIGN(u64Src, u64Imm);
9163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9164 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9165 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9166 IEM_MC_REF_EFLAGS(pEFlags);
9167 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9168 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9169
9170 IEM_MC_ADVANCE_RIP();
9171 IEM_MC_END();
9172 }
9173 return VINF_SUCCESS;
9174 }
9175 }
9176 AssertFailedReturn(VERR_IEM_IPE_9);
9177}
9178
9179
9180/** Opcode 0x6a. */
9181FNIEMOP_DEF(iemOp_push_Ib)
9182{
9183 IEMOP_MNEMONIC(push_Ib, "push Ib");
9184 IEMOP_HLP_MIN_186();
9185 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9188
9189 IEM_MC_BEGIN(0,0);
9190 switch (pVCpu->iem.s.enmEffOpSize)
9191 {
9192 case IEMMODE_16BIT:
9193 IEM_MC_PUSH_U16(i8Imm);
9194 break;
9195 case IEMMODE_32BIT:
9196 IEM_MC_PUSH_U32(i8Imm);
9197 break;
9198 case IEMMODE_64BIT:
9199 IEM_MC_PUSH_U64(i8Imm);
9200 break;
9201 }
9202 IEM_MC_ADVANCE_RIP();
9203 IEM_MC_END();
9204 return VINF_SUCCESS;
9205}
9206
9207
9208/** Opcode 0x6b. */
9209FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9210{
9211 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9212 IEMOP_HLP_MIN_186();
9213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9215
9216 switch (pVCpu->iem.s.enmEffOpSize)
9217 {
9218 case IEMMODE_16BIT:
9219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9220 {
9221 /* register operand */
9222 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9224
9225 IEM_MC_BEGIN(3, 1);
9226 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9227 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9228 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9229 IEM_MC_LOCAL(uint16_t, u16Tmp);
9230
9231 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9232 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9233 IEM_MC_REF_EFLAGS(pEFlags);
9234 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9235 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9236
9237 IEM_MC_ADVANCE_RIP();
9238 IEM_MC_END();
9239 }
9240 else
9241 {
9242 /* memory operand */
9243 IEM_MC_BEGIN(3, 2);
9244 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9245 IEM_MC_ARG(uint16_t, u16Src, 1);
9246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9247 IEM_MC_LOCAL(uint16_t, u16Tmp);
9248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9249
9250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9251 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9252 IEM_MC_ASSIGN(u16Src, u16Imm);
9253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9254 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9255 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9256 IEM_MC_REF_EFLAGS(pEFlags);
9257 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9258 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9259
9260 IEM_MC_ADVANCE_RIP();
9261 IEM_MC_END();
9262 }
9263 return VINF_SUCCESS;
9264
9265 case IEMMODE_32BIT:
9266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9267 {
9268 /* register operand */
9269 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9271
9272 IEM_MC_BEGIN(3, 1);
9273 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9274 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9275 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9276 IEM_MC_LOCAL(uint32_t, u32Tmp);
9277
9278 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9279 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9280 IEM_MC_REF_EFLAGS(pEFlags);
9281 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9282 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9283
9284 IEM_MC_ADVANCE_RIP();
9285 IEM_MC_END();
9286 }
9287 else
9288 {
9289 /* memory operand */
9290 IEM_MC_BEGIN(3, 2);
9291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9292 IEM_MC_ARG(uint32_t, u32Src, 1);
9293 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9294 IEM_MC_LOCAL(uint32_t, u32Tmp);
9295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9296
9297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9298 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9299 IEM_MC_ASSIGN(u32Src, u32Imm);
9300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9301 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9302 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9303 IEM_MC_REF_EFLAGS(pEFlags);
9304 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9305 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9306
9307 IEM_MC_ADVANCE_RIP();
9308 IEM_MC_END();
9309 }
9310 return VINF_SUCCESS;
9311
9312 case IEMMODE_64BIT:
9313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9314 {
9315 /* register operand */
9316 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9318
9319 IEM_MC_BEGIN(3, 1);
9320 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9321 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9322 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9323 IEM_MC_LOCAL(uint64_t, u64Tmp);
9324
9325 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9326 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9327 IEM_MC_REF_EFLAGS(pEFlags);
9328 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9329 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9330
9331 IEM_MC_ADVANCE_RIP();
9332 IEM_MC_END();
9333 }
9334 else
9335 {
9336 /* memory operand */
9337 IEM_MC_BEGIN(3, 2);
9338 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9339 IEM_MC_ARG(uint64_t, u64Src, 1);
9340 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9341 IEM_MC_LOCAL(uint64_t, u64Tmp);
9342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9343
9344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9345 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9346 IEM_MC_ASSIGN(u64Src, u64Imm);
9347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9349 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9350 IEM_MC_REF_EFLAGS(pEFlags);
9351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9352 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9353
9354 IEM_MC_ADVANCE_RIP();
9355 IEM_MC_END();
9356 }
9357 return VINF_SUCCESS;
9358 }
9359 AssertFailedReturn(VERR_IEM_IPE_8);
9360}
9361
9362
9363/** Opcode 0x6c. */
9364FNIEMOP_DEF(iemOp_insb_Yb_DX)
9365{
9366 IEMOP_HLP_MIN_186();
9367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9368 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9369 {
9370 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
9371 switch (pVCpu->iem.s.enmEffAddrMode)
9372 {
9373 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9374 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9375 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9377 }
9378 }
9379 else
9380 {
9381 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
9382 switch (pVCpu->iem.s.enmEffAddrMode)
9383 {
9384 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9385 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9386 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9388 }
9389 }
9390}
9391
9392
9393/** Opcode 0x6d. */
9394FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9395{
9396 IEMOP_HLP_MIN_186();
9397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9398 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9399 {
9400 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
9401 switch (pVCpu->iem.s.enmEffOpSize)
9402 {
9403 case IEMMODE_16BIT:
9404 switch (pVCpu->iem.s.enmEffAddrMode)
9405 {
9406 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9407 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9408 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9410 }
9411 break;
9412 case IEMMODE_64BIT:
9413 case IEMMODE_32BIT:
9414 switch (pVCpu->iem.s.enmEffAddrMode)
9415 {
9416 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9417 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9418 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9420 }
9421 break;
9422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9423 }
9424 }
9425 else
9426 {
9427 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
9428 switch (pVCpu->iem.s.enmEffOpSize)
9429 {
9430 case IEMMODE_16BIT:
9431 switch (pVCpu->iem.s.enmEffAddrMode)
9432 {
9433 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9434 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9435 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9437 }
9438 break;
9439 case IEMMODE_64BIT:
9440 case IEMMODE_32BIT:
9441 switch (pVCpu->iem.s.enmEffAddrMode)
9442 {
9443 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9444 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9445 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9447 }
9448 break;
9449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9450 }
9451 }
9452}
9453
9454
9455/** Opcode 0x6e. */
9456FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9457{
9458 IEMOP_HLP_MIN_186();
9459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9460 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9461 {
9462 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
9463 switch (pVCpu->iem.s.enmEffAddrMode)
9464 {
9465 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9466 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9467 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9469 }
9470 }
9471 else
9472 {
9473 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
9474 switch (pVCpu->iem.s.enmEffAddrMode)
9475 {
9476 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9477 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9478 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9480 }
9481 }
9482}
9483
9484
9485/** Opcode 0x6f. */
9486FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9487{
9488 IEMOP_HLP_MIN_186();
9489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9490 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9491 {
9492 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
9493 switch (pVCpu->iem.s.enmEffOpSize)
9494 {
9495 case IEMMODE_16BIT:
9496 switch (pVCpu->iem.s.enmEffAddrMode)
9497 {
9498 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9499 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9500 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9502 }
9503 break;
9504 case IEMMODE_64BIT:
9505 case IEMMODE_32BIT:
9506 switch (pVCpu->iem.s.enmEffAddrMode)
9507 {
9508 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9509 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9510 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9512 }
9513 break;
9514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9515 }
9516 }
9517 else
9518 {
9519 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
9520 switch (pVCpu->iem.s.enmEffOpSize)
9521 {
9522 case IEMMODE_16BIT:
9523 switch (pVCpu->iem.s.enmEffAddrMode)
9524 {
9525 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9526 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9527 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9529 }
9530 break;
9531 case IEMMODE_64BIT:
9532 case IEMMODE_32BIT:
9533 switch (pVCpu->iem.s.enmEffAddrMode)
9534 {
9535 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9536 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9537 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9539 }
9540 break;
9541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9542 }
9543 }
9544}
9545
9546
9547/** Opcode 0x70. */
9548FNIEMOP_DEF(iemOp_jo_Jb)
9549{
9550 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
9551 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9554
9555 IEM_MC_BEGIN(0, 0);
9556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9557 IEM_MC_REL_JMP_S8(i8Imm);
9558 } IEM_MC_ELSE() {
9559 IEM_MC_ADVANCE_RIP();
9560 } IEM_MC_ENDIF();
9561 IEM_MC_END();
9562 return VINF_SUCCESS;
9563}
9564
9565
9566/** Opcode 0x71. */
9567FNIEMOP_DEF(iemOp_jno_Jb)
9568{
9569 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
9570 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9572 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9573
9574 IEM_MC_BEGIN(0, 0);
9575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9576 IEM_MC_ADVANCE_RIP();
9577 } IEM_MC_ELSE() {
9578 IEM_MC_REL_JMP_S8(i8Imm);
9579 } IEM_MC_ENDIF();
9580 IEM_MC_END();
9581 return VINF_SUCCESS;
9582}
9583
9584/** Opcode 0x72. */
9585FNIEMOP_DEF(iemOp_jc_Jb)
9586{
9587 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
9588 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9591
9592 IEM_MC_BEGIN(0, 0);
9593 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9594 IEM_MC_REL_JMP_S8(i8Imm);
9595 } IEM_MC_ELSE() {
9596 IEM_MC_ADVANCE_RIP();
9597 } IEM_MC_ENDIF();
9598 IEM_MC_END();
9599 return VINF_SUCCESS;
9600}
9601
9602
9603/** Opcode 0x73. */
9604FNIEMOP_DEF(iemOp_jnc_Jb)
9605{
9606 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
9607 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9610
9611 IEM_MC_BEGIN(0, 0);
9612 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9613 IEM_MC_ADVANCE_RIP();
9614 } IEM_MC_ELSE() {
9615 IEM_MC_REL_JMP_S8(i8Imm);
9616 } IEM_MC_ENDIF();
9617 IEM_MC_END();
9618 return VINF_SUCCESS;
9619}
9620
9621
9622/** Opcode 0x74. */
9623FNIEMOP_DEF(iemOp_je_Jb)
9624{
9625 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
9626 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9628 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9629
9630 IEM_MC_BEGIN(0, 0);
9631 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9632 IEM_MC_REL_JMP_S8(i8Imm);
9633 } IEM_MC_ELSE() {
9634 IEM_MC_ADVANCE_RIP();
9635 } IEM_MC_ENDIF();
9636 IEM_MC_END();
9637 return VINF_SUCCESS;
9638}
9639
9640
9641/** Opcode 0x75. */
9642FNIEMOP_DEF(iemOp_jne_Jb)
9643{
9644 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
9645 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9648
9649 IEM_MC_BEGIN(0, 0);
9650 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9651 IEM_MC_ADVANCE_RIP();
9652 } IEM_MC_ELSE() {
9653 IEM_MC_REL_JMP_S8(i8Imm);
9654 } IEM_MC_ENDIF();
9655 IEM_MC_END();
9656 return VINF_SUCCESS;
9657}
9658
9659
9660/** Opcode 0x76. */
9661FNIEMOP_DEF(iemOp_jbe_Jb)
9662{
9663 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
9664 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9666 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9667
9668 IEM_MC_BEGIN(0, 0);
9669 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9670 IEM_MC_REL_JMP_S8(i8Imm);
9671 } IEM_MC_ELSE() {
9672 IEM_MC_ADVANCE_RIP();
9673 } IEM_MC_ENDIF();
9674 IEM_MC_END();
9675 return VINF_SUCCESS;
9676}
9677
9678
9679/** Opcode 0x77. */
9680FNIEMOP_DEF(iemOp_jnbe_Jb)
9681{
9682 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
9683 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9685 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9686
9687 IEM_MC_BEGIN(0, 0);
9688 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9689 IEM_MC_ADVANCE_RIP();
9690 } IEM_MC_ELSE() {
9691 IEM_MC_REL_JMP_S8(i8Imm);
9692 } IEM_MC_ENDIF();
9693 IEM_MC_END();
9694 return VINF_SUCCESS;
9695}
9696
9697
9698/** Opcode 0x78. */
9699FNIEMOP_DEF(iemOp_js_Jb)
9700{
9701 IEMOP_MNEMONIC(js_Jb, "js Jb");
9702 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9704 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9705
9706 IEM_MC_BEGIN(0, 0);
9707 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9708 IEM_MC_REL_JMP_S8(i8Imm);
9709 } IEM_MC_ELSE() {
9710 IEM_MC_ADVANCE_RIP();
9711 } IEM_MC_ENDIF();
9712 IEM_MC_END();
9713 return VINF_SUCCESS;
9714}
9715
9716
9717/** Opcode 0x79. */
9718FNIEMOP_DEF(iemOp_jns_Jb)
9719{
9720 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
9721 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9723 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9724
9725 IEM_MC_BEGIN(0, 0);
9726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9727 IEM_MC_ADVANCE_RIP();
9728 } IEM_MC_ELSE() {
9729 IEM_MC_REL_JMP_S8(i8Imm);
9730 } IEM_MC_ENDIF();
9731 IEM_MC_END();
9732 return VINF_SUCCESS;
9733}
9734
9735
9736/** Opcode 0x7a. */
9737FNIEMOP_DEF(iemOp_jp_Jb)
9738{
9739 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
9740 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9742 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9743
9744 IEM_MC_BEGIN(0, 0);
9745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9746 IEM_MC_REL_JMP_S8(i8Imm);
9747 } IEM_MC_ELSE() {
9748 IEM_MC_ADVANCE_RIP();
9749 } IEM_MC_ENDIF();
9750 IEM_MC_END();
9751 return VINF_SUCCESS;
9752}
9753
9754
9755/** Opcode 0x7b. */
9756FNIEMOP_DEF(iemOp_jnp_Jb)
9757{
9758 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
9759 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9761 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9762
9763 IEM_MC_BEGIN(0, 0);
9764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9765 IEM_MC_ADVANCE_RIP();
9766 } IEM_MC_ELSE() {
9767 IEM_MC_REL_JMP_S8(i8Imm);
9768 } IEM_MC_ENDIF();
9769 IEM_MC_END();
9770 return VINF_SUCCESS;
9771}
9772
9773
9774/** Opcode 0x7c. */
9775FNIEMOP_DEF(iemOp_jl_Jb)
9776{
9777 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
9778 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9780 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9781
9782 IEM_MC_BEGIN(0, 0);
9783 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9784 IEM_MC_REL_JMP_S8(i8Imm);
9785 } IEM_MC_ELSE() {
9786 IEM_MC_ADVANCE_RIP();
9787 } IEM_MC_ENDIF();
9788 IEM_MC_END();
9789 return VINF_SUCCESS;
9790}
9791
9792
9793/** Opcode 0x7d. */
9794FNIEMOP_DEF(iemOp_jnl_Jb)
9795{
9796 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
9797 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9799 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9800
9801 IEM_MC_BEGIN(0, 0);
9802 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9803 IEM_MC_ADVANCE_RIP();
9804 } IEM_MC_ELSE() {
9805 IEM_MC_REL_JMP_S8(i8Imm);
9806 } IEM_MC_ENDIF();
9807 IEM_MC_END();
9808 return VINF_SUCCESS;
9809}
9810
9811
9812/** Opcode 0x7e. */
9813FNIEMOP_DEF(iemOp_jle_Jb)
9814{
9815 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
9816 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9818 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9819
9820 IEM_MC_BEGIN(0, 0);
9821 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9822 IEM_MC_REL_JMP_S8(i8Imm);
9823 } IEM_MC_ELSE() {
9824 IEM_MC_ADVANCE_RIP();
9825 } IEM_MC_ENDIF();
9826 IEM_MC_END();
9827 return VINF_SUCCESS;
9828}
9829
9830
9831/** Opcode 0x7f. */
9832FNIEMOP_DEF(iemOp_jnle_Jb)
9833{
9834 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
9835 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9837 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9838
9839 IEM_MC_BEGIN(0, 0);
9840 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9841 IEM_MC_ADVANCE_RIP();
9842 } IEM_MC_ELSE() {
9843 IEM_MC_REL_JMP_S8(i8Imm);
9844 } IEM_MC_ENDIF();
9845 IEM_MC_END();
9846 return VINF_SUCCESS;
9847}
9848
9849
9850/** Opcode 0x80. */
9851FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9852{
9853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9854 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9855 {
9856 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
9857 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
9858 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
9859 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
9860 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
9861 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
9862 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
9863 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
9864 }
9865 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9866
9867 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9868 {
9869 /* register target */
9870 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9872 IEM_MC_BEGIN(3, 0);
9873 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9874 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9875 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9876
9877 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9878 IEM_MC_REF_EFLAGS(pEFlags);
9879 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9880
9881 IEM_MC_ADVANCE_RIP();
9882 IEM_MC_END();
9883 }
9884 else
9885 {
9886 /* memory target */
9887 uint32_t fAccess;
9888 if (pImpl->pfnLockedU8)
9889 fAccess = IEM_ACCESS_DATA_RW;
9890 else /* CMP */
9891 fAccess = IEM_ACCESS_DATA_R;
9892 IEM_MC_BEGIN(3, 2);
9893 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9894 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9896
9897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9898 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9899 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9900 if (pImpl->pfnLockedU8)
9901 IEMOP_HLP_DONE_DECODING();
9902 else
9903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9904
9905 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9906 IEM_MC_FETCH_EFLAGS(EFlags);
9907 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9908 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9909 else
9910 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9911
9912 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9913 IEM_MC_COMMIT_EFLAGS(EFlags);
9914 IEM_MC_ADVANCE_RIP();
9915 IEM_MC_END();
9916 }
9917 return VINF_SUCCESS;
9918}
9919
9920
9921/** Opcode 0x81. */
9922FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9923{
9924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9925 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9926 {
9927 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
9928 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
9929 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
9930 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
9931 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
9932 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
9933 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
9934 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
9935 }
9936 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9937
9938 switch (pVCpu->iem.s.enmEffOpSize)
9939 {
9940 case IEMMODE_16BIT:
9941 {
9942 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9943 {
9944 /* register target */
9945 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9947 IEM_MC_BEGIN(3, 0);
9948 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9949 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9950 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9951
9952 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9953 IEM_MC_REF_EFLAGS(pEFlags);
9954 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9955
9956 IEM_MC_ADVANCE_RIP();
9957 IEM_MC_END();
9958 }
9959 else
9960 {
9961 /* memory target */
9962 uint32_t fAccess;
9963 if (pImpl->pfnLockedU16)
9964 fAccess = IEM_ACCESS_DATA_RW;
9965 else /* CMP, TEST */
9966 fAccess = IEM_ACCESS_DATA_R;
9967 IEM_MC_BEGIN(3, 2);
9968 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9969 IEM_MC_ARG(uint16_t, u16Src, 1);
9970 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9972
9973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9974 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9975 IEM_MC_ASSIGN(u16Src, u16Imm);
9976 if (pImpl->pfnLockedU16)
9977 IEMOP_HLP_DONE_DECODING();
9978 else
9979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9980 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9981 IEM_MC_FETCH_EFLAGS(EFlags);
9982 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9983 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9984 else
9985 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9986
9987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9988 IEM_MC_COMMIT_EFLAGS(EFlags);
9989 IEM_MC_ADVANCE_RIP();
9990 IEM_MC_END();
9991 }
9992 break;
9993 }
9994
9995 case IEMMODE_32BIT:
9996 {
9997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9998 {
9999 /* register target */
10000 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10002 IEM_MC_BEGIN(3, 0);
10003 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10004 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10005 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10006
10007 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10008 IEM_MC_REF_EFLAGS(pEFlags);
10009 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10010 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10011
10012 IEM_MC_ADVANCE_RIP();
10013 IEM_MC_END();
10014 }
10015 else
10016 {
10017 /* memory target */
10018 uint32_t fAccess;
10019 if (pImpl->pfnLockedU32)
10020 fAccess = IEM_ACCESS_DATA_RW;
10021 else /* CMP, TEST */
10022 fAccess = IEM_ACCESS_DATA_R;
10023 IEM_MC_BEGIN(3, 2);
10024 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10025 IEM_MC_ARG(uint32_t, u32Src, 1);
10026 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10028
10029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10030 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10031 IEM_MC_ASSIGN(u32Src, u32Imm);
10032 if (pImpl->pfnLockedU32)
10033 IEMOP_HLP_DONE_DECODING();
10034 else
10035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10036 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10037 IEM_MC_FETCH_EFLAGS(EFlags);
10038 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10039 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10040 else
10041 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10042
10043 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10044 IEM_MC_COMMIT_EFLAGS(EFlags);
10045 IEM_MC_ADVANCE_RIP();
10046 IEM_MC_END();
10047 }
10048 break;
10049 }
10050
10051 case IEMMODE_64BIT:
10052 {
10053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10054 {
10055 /* register target */
10056 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10058 IEM_MC_BEGIN(3, 0);
10059 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10060 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10061 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10062
10063 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10064 IEM_MC_REF_EFLAGS(pEFlags);
10065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10066
10067 IEM_MC_ADVANCE_RIP();
10068 IEM_MC_END();
10069 }
10070 else
10071 {
10072 /* memory target */
10073 uint32_t fAccess;
10074 if (pImpl->pfnLockedU64)
10075 fAccess = IEM_ACCESS_DATA_RW;
10076 else /* CMP */
10077 fAccess = IEM_ACCESS_DATA_R;
10078 IEM_MC_BEGIN(3, 2);
10079 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10080 IEM_MC_ARG(uint64_t, u64Src, 1);
10081 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10083
10084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10085 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10086 if (pImpl->pfnLockedU64)
10087 IEMOP_HLP_DONE_DECODING();
10088 else
10089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10090 IEM_MC_ASSIGN(u64Src, u64Imm);
10091 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10092 IEM_MC_FETCH_EFLAGS(EFlags);
10093 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10094 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10095 else
10096 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10097
10098 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10099 IEM_MC_COMMIT_EFLAGS(EFlags);
10100 IEM_MC_ADVANCE_RIP();
10101 IEM_MC_END();
10102 }
10103 break;
10104 }
10105 }
10106 return VINF_SUCCESS;
10107}
10108
10109
10110/** Opcode 0x82. */
10111FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10112{
10113 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10114 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10115}
10116
10117
10118/** Opcode 0x83. */
10119FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10120{
10121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10122 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10123 {
10124 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10125 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10126 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10127 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10128 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10129 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10130 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10131 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10132 }
10133 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10134 to the 386 even if absent in the intel reference manuals and some
10135 3rd party opcode listings. */
10136 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10137
10138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10139 {
10140 /*
10141 * Register target
10142 */
10143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10144 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10145 switch (pVCpu->iem.s.enmEffOpSize)
10146 {
10147 case IEMMODE_16BIT:
10148 {
10149 IEM_MC_BEGIN(3, 0);
10150 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10151 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10153
10154 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10155 IEM_MC_REF_EFLAGS(pEFlags);
10156 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10157
10158 IEM_MC_ADVANCE_RIP();
10159 IEM_MC_END();
10160 break;
10161 }
10162
10163 case IEMMODE_32BIT:
10164 {
10165 IEM_MC_BEGIN(3, 0);
10166 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10167 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10168 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10169
10170 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10171 IEM_MC_REF_EFLAGS(pEFlags);
10172 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10173 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10174
10175 IEM_MC_ADVANCE_RIP();
10176 IEM_MC_END();
10177 break;
10178 }
10179
10180 case IEMMODE_64BIT:
10181 {
10182 IEM_MC_BEGIN(3, 0);
10183 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10184 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10185 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10186
10187 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10188 IEM_MC_REF_EFLAGS(pEFlags);
10189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10190
10191 IEM_MC_ADVANCE_RIP();
10192 IEM_MC_END();
10193 break;
10194 }
10195 }
10196 }
10197 else
10198 {
10199 /*
10200 * Memory target.
10201 */
10202 uint32_t fAccess;
10203 if (pImpl->pfnLockedU16)
10204 fAccess = IEM_ACCESS_DATA_RW;
10205 else /* CMP */
10206 fAccess = IEM_ACCESS_DATA_R;
10207
10208 switch (pVCpu->iem.s.enmEffOpSize)
10209 {
10210 case IEMMODE_16BIT:
10211 {
10212 IEM_MC_BEGIN(3, 2);
10213 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10214 IEM_MC_ARG(uint16_t, u16Src, 1);
10215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10217
10218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10219 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10220 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10221 if (pImpl->pfnLockedU16)
10222 IEMOP_HLP_DONE_DECODING();
10223 else
10224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10225 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10226 IEM_MC_FETCH_EFLAGS(EFlags);
10227 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10229 else
10230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10231
10232 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10233 IEM_MC_COMMIT_EFLAGS(EFlags);
10234 IEM_MC_ADVANCE_RIP();
10235 IEM_MC_END();
10236 break;
10237 }
10238
10239 case IEMMODE_32BIT:
10240 {
10241 IEM_MC_BEGIN(3, 2);
10242 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10243 IEM_MC_ARG(uint32_t, u32Src, 1);
10244 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10246
10247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10248 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10249 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10250 if (pImpl->pfnLockedU32)
10251 IEMOP_HLP_DONE_DECODING();
10252 else
10253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10254 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10255 IEM_MC_FETCH_EFLAGS(EFlags);
10256 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10258 else
10259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10260
10261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10262 IEM_MC_COMMIT_EFLAGS(EFlags);
10263 IEM_MC_ADVANCE_RIP();
10264 IEM_MC_END();
10265 break;
10266 }
10267
10268 case IEMMODE_64BIT:
10269 {
10270 IEM_MC_BEGIN(3, 2);
10271 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10272 IEM_MC_ARG(uint64_t, u64Src, 1);
10273 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10275
10276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10277 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10278 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10279 if (pImpl->pfnLockedU64)
10280 IEMOP_HLP_DONE_DECODING();
10281 else
10282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10283 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10284 IEM_MC_FETCH_EFLAGS(EFlags);
10285 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10286 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10287 else
10288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10289
10290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10291 IEM_MC_COMMIT_EFLAGS(EFlags);
10292 IEM_MC_ADVANCE_RIP();
10293 IEM_MC_END();
10294 break;
10295 }
10296 }
10297 }
10298 return VINF_SUCCESS;
10299}
10300
10301
10302/** Opcode 0x84. */
10303FNIEMOP_DEF(iemOp_test_Eb_Gb)
10304{
10305 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
10306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10307 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10308}
10309
10310
10311/** Opcode 0x85. */
10312FNIEMOP_DEF(iemOp_test_Ev_Gv)
10313{
10314 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
10315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10317}
10318
10319
10320/** Opcode 0x86. */
10321FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10322{
10323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10324 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
10325
10326 /*
10327 * If rm is denoting a register, no more instruction bytes.
10328 */
10329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10330 {
10331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10332
10333 IEM_MC_BEGIN(0, 2);
10334 IEM_MC_LOCAL(uint8_t, uTmp1);
10335 IEM_MC_LOCAL(uint8_t, uTmp2);
10336
10337 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10338 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10339 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10340 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10341
10342 IEM_MC_ADVANCE_RIP();
10343 IEM_MC_END();
10344 }
10345 else
10346 {
10347 /*
10348 * We're accessing memory.
10349 */
10350/** @todo the register must be committed separately! */
10351 IEM_MC_BEGIN(2, 2);
10352 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10353 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10355
10356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10357 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10358 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10359 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10360 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10361
10362 IEM_MC_ADVANCE_RIP();
10363 IEM_MC_END();
10364 }
10365 return VINF_SUCCESS;
10366}
10367
10368
10369/** Opcode 0x87. */
10370FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10371{
10372 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
10373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10374
10375 /*
10376 * If rm is denoting a register, no more instruction bytes.
10377 */
10378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10379 {
10380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10381
10382 switch (pVCpu->iem.s.enmEffOpSize)
10383 {
10384 case IEMMODE_16BIT:
10385 IEM_MC_BEGIN(0, 2);
10386 IEM_MC_LOCAL(uint16_t, uTmp1);
10387 IEM_MC_LOCAL(uint16_t, uTmp2);
10388
10389 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10390 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10391 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10392 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10393
10394 IEM_MC_ADVANCE_RIP();
10395 IEM_MC_END();
10396 return VINF_SUCCESS;
10397
10398 case IEMMODE_32BIT:
10399 IEM_MC_BEGIN(0, 2);
10400 IEM_MC_LOCAL(uint32_t, uTmp1);
10401 IEM_MC_LOCAL(uint32_t, uTmp2);
10402
10403 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10404 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10405 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10406 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10407
10408 IEM_MC_ADVANCE_RIP();
10409 IEM_MC_END();
10410 return VINF_SUCCESS;
10411
10412 case IEMMODE_64BIT:
10413 IEM_MC_BEGIN(0, 2);
10414 IEM_MC_LOCAL(uint64_t, uTmp1);
10415 IEM_MC_LOCAL(uint64_t, uTmp2);
10416
10417 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10418 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10419 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10420 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10421
10422 IEM_MC_ADVANCE_RIP();
10423 IEM_MC_END();
10424 return VINF_SUCCESS;
10425
10426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10427 }
10428 }
10429 else
10430 {
10431 /*
10432 * We're accessing memory.
10433 */
10434 switch (pVCpu->iem.s.enmEffOpSize)
10435 {
10436/** @todo the register must be committed separately! */
10437 case IEMMODE_16BIT:
10438 IEM_MC_BEGIN(2, 2);
10439 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10440 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10442
10443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10444 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10445 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10446 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10447 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10448
10449 IEM_MC_ADVANCE_RIP();
10450 IEM_MC_END();
10451 return VINF_SUCCESS;
10452
10453 case IEMMODE_32BIT:
10454 IEM_MC_BEGIN(2, 2);
10455 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10456 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10458
10459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10460 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10461 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10462 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10463 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10464
10465 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10466 IEM_MC_ADVANCE_RIP();
10467 IEM_MC_END();
10468 return VINF_SUCCESS;
10469
10470 case IEMMODE_64BIT:
10471 IEM_MC_BEGIN(2, 2);
10472 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10473 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10475
10476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10477 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10478 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10479 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10481
10482 IEM_MC_ADVANCE_RIP();
10483 IEM_MC_END();
10484 return VINF_SUCCESS;
10485
10486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10487 }
10488 }
10489}
10490
10491
10492/** Opcode 0x88. */
10493FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10494{
10495 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
10496
10497 uint8_t bRm;
10498 IEM_OPCODE_GET_NEXT_U8(&bRm);
10499
10500 /*
10501 * If rm is denoting a register, no more instruction bytes.
10502 */
10503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10504 {
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 IEM_MC_BEGIN(0, 1);
10507 IEM_MC_LOCAL(uint8_t, u8Value);
10508 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10509 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
10510 IEM_MC_ADVANCE_RIP();
10511 IEM_MC_END();
10512 }
10513 else
10514 {
10515 /*
10516 * We're writing a register to memory.
10517 */
10518 IEM_MC_BEGIN(0, 2);
10519 IEM_MC_LOCAL(uint8_t, u8Value);
10520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10523 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10524 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
10525 IEM_MC_ADVANCE_RIP();
10526 IEM_MC_END();
10527 }
10528 return VINF_SUCCESS;
10529
10530}
10531
10532
10533/** Opcode 0x89. */
10534FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10535{
10536 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
10537
10538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10539
10540 /*
10541 * If rm is denoting a register, no more instruction bytes.
10542 */
10543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10544 {
10545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10546 switch (pVCpu->iem.s.enmEffOpSize)
10547 {
10548 case IEMMODE_16BIT:
10549 IEM_MC_BEGIN(0, 1);
10550 IEM_MC_LOCAL(uint16_t, u16Value);
10551 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10552 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10553 IEM_MC_ADVANCE_RIP();
10554 IEM_MC_END();
10555 break;
10556
10557 case IEMMODE_32BIT:
10558 IEM_MC_BEGIN(0, 1);
10559 IEM_MC_LOCAL(uint32_t, u32Value);
10560 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10561 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10562 IEM_MC_ADVANCE_RIP();
10563 IEM_MC_END();
10564 break;
10565
10566 case IEMMODE_64BIT:
10567 IEM_MC_BEGIN(0, 1);
10568 IEM_MC_LOCAL(uint64_t, u64Value);
10569 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10570 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10571 IEM_MC_ADVANCE_RIP();
10572 IEM_MC_END();
10573 break;
10574 }
10575 }
10576 else
10577 {
10578 /*
10579 * We're writing a register to memory.
10580 */
10581 switch (pVCpu->iem.s.enmEffOpSize)
10582 {
10583 case IEMMODE_16BIT:
10584 IEM_MC_BEGIN(0, 2);
10585 IEM_MC_LOCAL(uint16_t, u16Value);
10586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10589 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10590 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10591 IEM_MC_ADVANCE_RIP();
10592 IEM_MC_END();
10593 break;
10594
10595 case IEMMODE_32BIT:
10596 IEM_MC_BEGIN(0, 2);
10597 IEM_MC_LOCAL(uint32_t, u32Value);
10598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10601 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10602 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
10603 IEM_MC_ADVANCE_RIP();
10604 IEM_MC_END();
10605 break;
10606
10607 case IEMMODE_64BIT:
10608 IEM_MC_BEGIN(0, 2);
10609 IEM_MC_LOCAL(uint64_t, u64Value);
10610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10613 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
10615 IEM_MC_ADVANCE_RIP();
10616 IEM_MC_END();
10617 break;
10618 }
10619 }
10620 return VINF_SUCCESS;
10621}
10622
10623
10624/** Opcode 0x8a. */
10625FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10626{
10627 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
10628
10629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10630
10631 /*
10632 * If rm is denoting a register, no more instruction bytes.
10633 */
10634 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10635 {
10636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10637 IEM_MC_BEGIN(0, 1);
10638 IEM_MC_LOCAL(uint8_t, u8Value);
10639 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10640 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10641 IEM_MC_ADVANCE_RIP();
10642 IEM_MC_END();
10643 }
10644 else
10645 {
10646 /*
10647 * We're loading a register from memory.
10648 */
10649 IEM_MC_BEGIN(0, 2);
10650 IEM_MC_LOCAL(uint8_t, u8Value);
10651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10654 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10655 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10656 IEM_MC_ADVANCE_RIP();
10657 IEM_MC_END();
10658 }
10659 return VINF_SUCCESS;
10660}
10661
10662
10663/** Opcode 0x8b. */
10664FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10665{
10666 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
10667
10668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10669
10670 /*
10671 * If rm is denoting a register, no more instruction bytes.
10672 */
10673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10674 {
10675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10676 switch (pVCpu->iem.s.enmEffOpSize)
10677 {
10678 case IEMMODE_16BIT:
10679 IEM_MC_BEGIN(0, 1);
10680 IEM_MC_LOCAL(uint16_t, u16Value);
10681 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10682 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10683 IEM_MC_ADVANCE_RIP();
10684 IEM_MC_END();
10685 break;
10686
10687 case IEMMODE_32BIT:
10688 IEM_MC_BEGIN(0, 1);
10689 IEM_MC_LOCAL(uint32_t, u32Value);
10690 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10691 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10692 IEM_MC_ADVANCE_RIP();
10693 IEM_MC_END();
10694 break;
10695
10696 case IEMMODE_64BIT:
10697 IEM_MC_BEGIN(0, 1);
10698 IEM_MC_LOCAL(uint64_t, u64Value);
10699 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10700 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10701 IEM_MC_ADVANCE_RIP();
10702 IEM_MC_END();
10703 break;
10704 }
10705 }
10706 else
10707 {
10708 /*
10709 * We're loading a register from memory.
10710 */
10711 switch (pVCpu->iem.s.enmEffOpSize)
10712 {
10713 case IEMMODE_16BIT:
10714 IEM_MC_BEGIN(0, 2);
10715 IEM_MC_LOCAL(uint16_t, u16Value);
10716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10719 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10720 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10721 IEM_MC_ADVANCE_RIP();
10722 IEM_MC_END();
10723 break;
10724
10725 case IEMMODE_32BIT:
10726 IEM_MC_BEGIN(0, 2);
10727 IEM_MC_LOCAL(uint32_t, u32Value);
10728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10731 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10732 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10733 IEM_MC_ADVANCE_RIP();
10734 IEM_MC_END();
10735 break;
10736
10737 case IEMMODE_64BIT:
10738 IEM_MC_BEGIN(0, 2);
10739 IEM_MC_LOCAL(uint64_t, u64Value);
10740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10743 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10744 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10745 IEM_MC_ADVANCE_RIP();
10746 IEM_MC_END();
10747 break;
10748 }
10749 }
10750 return VINF_SUCCESS;
10751}
10752
10753
10754/** Opcode 0x63. */
10755FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10756{
10757 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
10758 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10759 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10760 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10761 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10762}
10763
10764
10765/** Opcode 0x8c. */
10766FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10767{
10768 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
10769
10770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10771
10772 /*
10773 * Check that the destination register exists. The REX.R prefix is ignored.
10774 */
10775 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10776 if ( iSegReg > X86_SREG_GS)
10777 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10778
10779 /*
10780 * If rm is denoting a register, no more instruction bytes.
10781 * In that case, the operand size is respected and the upper bits are
10782 * cleared (starting with some pentium).
10783 */
10784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10785 {
10786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10787 switch (pVCpu->iem.s.enmEffOpSize)
10788 {
10789 case IEMMODE_16BIT:
10790 IEM_MC_BEGIN(0, 1);
10791 IEM_MC_LOCAL(uint16_t, u16Value);
10792 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10793 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10794 IEM_MC_ADVANCE_RIP();
10795 IEM_MC_END();
10796 break;
10797
10798 case IEMMODE_32BIT:
10799 IEM_MC_BEGIN(0, 1);
10800 IEM_MC_LOCAL(uint32_t, u32Value);
10801 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10802 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10803 IEM_MC_ADVANCE_RIP();
10804 IEM_MC_END();
10805 break;
10806
10807 case IEMMODE_64BIT:
10808 IEM_MC_BEGIN(0, 1);
10809 IEM_MC_LOCAL(uint64_t, u64Value);
10810 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10811 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10812 IEM_MC_ADVANCE_RIP();
10813 IEM_MC_END();
10814 break;
10815 }
10816 }
10817 else
10818 {
10819 /*
10820 * We're saving the register to memory. The access is word sized
10821 * regardless of operand size prefixes.
10822 */
10823#if 0 /* not necessary */
10824 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10825#endif
10826 IEM_MC_BEGIN(0, 2);
10827 IEM_MC_LOCAL(uint16_t, u16Value);
10828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10831 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10832 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10833 IEM_MC_ADVANCE_RIP();
10834 IEM_MC_END();
10835 }
10836 return VINF_SUCCESS;
10837}
10838
10839
10840
10841
10842/** Opcode 0x8d. */
10843FNIEMOP_DEF(iemOp_lea_Gv_M)
10844{
10845 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
10846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10847 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10848 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10849
10850 switch (pVCpu->iem.s.enmEffOpSize)
10851 {
10852 case IEMMODE_16BIT:
10853 IEM_MC_BEGIN(0, 2);
10854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10855 IEM_MC_LOCAL(uint16_t, u16Cast);
10856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10858 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10859 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
10860 IEM_MC_ADVANCE_RIP();
10861 IEM_MC_END();
10862 return VINF_SUCCESS;
10863
10864 case IEMMODE_32BIT:
10865 IEM_MC_BEGIN(0, 2);
10866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10867 IEM_MC_LOCAL(uint32_t, u32Cast);
10868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10870 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10871 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
10872 IEM_MC_ADVANCE_RIP();
10873 IEM_MC_END();
10874 return VINF_SUCCESS;
10875
10876 case IEMMODE_64BIT:
10877 IEM_MC_BEGIN(0, 1);
10878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10881 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
10882 IEM_MC_ADVANCE_RIP();
10883 IEM_MC_END();
10884 return VINF_SUCCESS;
10885 }
10886 AssertFailedReturn(VERR_IEM_IPE_7);
10887}
10888
10889
10890/** Opcode 0x8e. */
10891FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10892{
10893 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
10894
10895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10896
10897 /*
10898 * The practical operand size is 16-bit.
10899 */
10900#if 0 /* not necessary */
10901 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10902#endif
10903
10904 /*
10905 * Check that the destination register exists and can be used with this
10906 * instruction. The REX.R prefix is ignored.
10907 */
10908 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10909 if ( iSegReg == X86_SREG_CS
10910 || iSegReg > X86_SREG_GS)
10911 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10912
10913 /*
10914 * If rm is denoting a register, no more instruction bytes.
10915 */
10916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10917 {
10918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10919 IEM_MC_BEGIN(2, 0);
10920 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10921 IEM_MC_ARG(uint16_t, u16Value, 1);
10922 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10923 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10924 IEM_MC_END();
10925 }
10926 else
10927 {
10928 /*
10929 * We're loading the register from memory. The access is word sized
10930 * regardless of operand size prefixes.
10931 */
10932 IEM_MC_BEGIN(2, 1);
10933 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10934 IEM_MC_ARG(uint16_t, u16Value, 1);
10935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10938 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10939 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10940 IEM_MC_END();
10941 }
10942 return VINF_SUCCESS;
10943}
10944
10945
10946/** Opcode 0x8f /0. */
10947FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10948{
10949 /* This bugger is rather annoying as it requires rSP to be updated before
10950 doing the effective address calculations. Will eventually require a
10951 split between the R/M+SIB decoding and the effective address
10952 calculation - which is something that is required for any attempt at
10953 reusing this code for a recompiler. It may also be good to have if we
10954 need to delay #UD exception caused by invalid lock prefixes.
10955
10956 For now, we'll do a mostly safe interpreter-only implementation here. */
10957 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10958 * now until tests show it's checked.. */
10959 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
10960
10961 /* Register access is relatively easy and can share code. */
10962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10963 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10964
10965 /*
10966 * Memory target.
10967 *
10968 * Intel says that RSP is incremented before it's used in any effective
10969 * address calcuations. This means some serious extra annoyance here since
10970 * we decode and calculate the effective address in one step and like to
10971 * delay committing registers till everything is done.
10972 *
10973 * So, we'll decode and calculate the effective address twice. This will
10974 * require some recoding if turned into a recompiler.
10975 */
10976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10977
10978#ifndef TST_IEM_CHECK_MC
10979 /* Calc effective address with modified ESP. */
10980/** @todo testcase */
10981 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
10982 RTGCPTR GCPtrEff;
10983 VBOXSTRICTRC rcStrict;
10984 switch (pVCpu->iem.s.enmEffOpSize)
10985 {
10986 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
10987 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
10988 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
10989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10990 }
10991 if (rcStrict != VINF_SUCCESS)
10992 return rcStrict;
10993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10994
10995 /* Perform the operation - this should be CImpl. */
10996 RTUINT64U TmpRsp;
10997 TmpRsp.u = pCtx->rsp;
10998 switch (pVCpu->iem.s.enmEffOpSize)
10999 {
11000 case IEMMODE_16BIT:
11001 {
11002 uint16_t u16Value;
11003 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11004 if (rcStrict == VINF_SUCCESS)
11005 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11006 break;
11007 }
11008
11009 case IEMMODE_32BIT:
11010 {
11011 uint32_t u32Value;
11012 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11013 if (rcStrict == VINF_SUCCESS)
11014 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11015 break;
11016 }
11017
11018 case IEMMODE_64BIT:
11019 {
11020 uint64_t u64Value;
11021 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11022 if (rcStrict == VINF_SUCCESS)
11023 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11024 break;
11025 }
11026
11027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11028 }
11029 if (rcStrict == VINF_SUCCESS)
11030 {
11031 pCtx->rsp = TmpRsp.u;
11032 iemRegUpdateRipAndClearRF(pVCpu);
11033 }
11034 return rcStrict;
11035
11036#else
11037 return VERR_IEM_IPE_2;
11038#endif
11039}
11040
11041
11042/** Opcode 0x8f. */
11043FNIEMOP_DEF(iemOp_Grp1A)
11044{
11045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11046 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11047 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11048
11049 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11050 /** @todo XOP decoding. */
11051 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11052 return IEMOP_RAISE_INVALID_OPCODE();
11053}
11054
11055
11056/**
11057 * Common 'xchg reg,rAX' helper.
11058 */
11059FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11060{
11061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11062
11063 iReg |= pVCpu->iem.s.uRexB;
11064 switch (pVCpu->iem.s.enmEffOpSize)
11065 {
11066 case IEMMODE_16BIT:
11067 IEM_MC_BEGIN(0, 2);
11068 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11069 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11070 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11071 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11072 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11073 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11074 IEM_MC_ADVANCE_RIP();
11075 IEM_MC_END();
11076 return VINF_SUCCESS;
11077
11078 case IEMMODE_32BIT:
11079 IEM_MC_BEGIN(0, 2);
11080 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11081 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11082 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11083 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11084 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11085 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11086 IEM_MC_ADVANCE_RIP();
11087 IEM_MC_END();
11088 return VINF_SUCCESS;
11089
11090 case IEMMODE_64BIT:
11091 IEM_MC_BEGIN(0, 2);
11092 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11093 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11094 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11095 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11096 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11097 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11098 IEM_MC_ADVANCE_RIP();
11099 IEM_MC_END();
11100 return VINF_SUCCESS;
11101
11102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11103 }
11104}
11105
11106
11107/** Opcode 0x90. */
11108FNIEMOP_DEF(iemOp_nop)
11109{
11110 /* R8/R8D and RAX/EAX can be exchanged. */
11111 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11112 {
11113 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11114 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11115 }
11116
11117 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11118 IEMOP_MNEMONIC(pause, "pause");
11119 else
11120 IEMOP_MNEMONIC(nop, "nop");
11121 IEM_MC_BEGIN(0, 0);
11122 IEM_MC_ADVANCE_RIP();
11123 IEM_MC_END();
11124 return VINF_SUCCESS;
11125}
11126
11127
11128/** Opcode 0x91. */
11129FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11130{
11131 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11132 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11133}
11134
11135
11136/** Opcode 0x92. */
11137FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11138{
11139 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11140 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11141}
11142
11143
11144/** Opcode 0x93. */
11145FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11146{
11147 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11148 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11149}
11150
11151
11152/** Opcode 0x94. */
11153FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11154{
11155 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11156 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11157}
11158
11159
11160/** Opcode 0x95. */
11161FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11162{
11163 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11164 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11165}
11166
11167
11168/** Opcode 0x96. */
11169FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11170{
11171 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11172 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11173}
11174
11175
11176/** Opcode 0x97. */
11177FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11178{
11179 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11180 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11181}
11182
11183
11184/** Opcode 0x98. */
11185FNIEMOP_DEF(iemOp_cbw)
11186{
11187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11188 switch (pVCpu->iem.s.enmEffOpSize)
11189 {
11190 case IEMMODE_16BIT:
11191 IEMOP_MNEMONIC(cbw, "cbw");
11192 IEM_MC_BEGIN(0, 1);
11193 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11194 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11195 } IEM_MC_ELSE() {
11196 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11197 } IEM_MC_ENDIF();
11198 IEM_MC_ADVANCE_RIP();
11199 IEM_MC_END();
11200 return VINF_SUCCESS;
11201
11202 case IEMMODE_32BIT:
11203 IEMOP_MNEMONIC(cwde, "cwde");
11204 IEM_MC_BEGIN(0, 1);
11205 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11206 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11207 } IEM_MC_ELSE() {
11208 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11209 } IEM_MC_ENDIF();
11210 IEM_MC_ADVANCE_RIP();
11211 IEM_MC_END();
11212 return VINF_SUCCESS;
11213
11214 case IEMMODE_64BIT:
11215 IEMOP_MNEMONIC(cdqe, "cdqe");
11216 IEM_MC_BEGIN(0, 1);
11217 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11218 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11219 } IEM_MC_ELSE() {
11220 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11221 } IEM_MC_ENDIF();
11222 IEM_MC_ADVANCE_RIP();
11223 IEM_MC_END();
11224 return VINF_SUCCESS;
11225
11226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11227 }
11228}
11229
11230
11231/** Opcode 0x99. */
11232FNIEMOP_DEF(iemOp_cwd)
11233{
11234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11235 switch (pVCpu->iem.s.enmEffOpSize)
11236 {
11237 case IEMMODE_16BIT:
11238 IEMOP_MNEMONIC(cwd, "cwd");
11239 IEM_MC_BEGIN(0, 1);
11240 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11241 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11242 } IEM_MC_ELSE() {
11243 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11244 } IEM_MC_ENDIF();
11245 IEM_MC_ADVANCE_RIP();
11246 IEM_MC_END();
11247 return VINF_SUCCESS;
11248
11249 case IEMMODE_32BIT:
11250 IEMOP_MNEMONIC(cdq, "cdq");
11251 IEM_MC_BEGIN(0, 1);
11252 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11253 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11254 } IEM_MC_ELSE() {
11255 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11256 } IEM_MC_ENDIF();
11257 IEM_MC_ADVANCE_RIP();
11258 IEM_MC_END();
11259 return VINF_SUCCESS;
11260
11261 case IEMMODE_64BIT:
11262 IEMOP_MNEMONIC(cqo, "cqo");
11263 IEM_MC_BEGIN(0, 1);
11264 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11265 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11266 } IEM_MC_ELSE() {
11267 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11268 } IEM_MC_ENDIF();
11269 IEM_MC_ADVANCE_RIP();
11270 IEM_MC_END();
11271 return VINF_SUCCESS;
11272
11273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11274 }
11275}
11276
11277
11278/** Opcode 0x9a. */
11279FNIEMOP_DEF(iemOp_call_Ap)
11280{
11281 IEMOP_MNEMONIC(call_Ap, "call Ap");
11282 IEMOP_HLP_NO_64BIT();
11283
11284 /* Decode the far pointer address and pass it on to the far call C implementation. */
11285 uint32_t offSeg;
11286 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11287 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11288 else
11289 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11290 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11292 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11293}
11294
11295
11296/** Opcode 0x9b. (aka fwait) */
11297FNIEMOP_DEF(iemOp_wait)
11298{
11299 IEMOP_MNEMONIC(wait, "wait");
11300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11301
11302 IEM_MC_BEGIN(0, 0);
11303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11305 IEM_MC_ADVANCE_RIP();
11306 IEM_MC_END();
11307 return VINF_SUCCESS;
11308}
11309
11310
11311/** Opcode 0x9c. */
11312FNIEMOP_DEF(iemOp_pushf_Fv)
11313{
11314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11315 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11316 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11317}
11318
11319
11320/** Opcode 0x9d. */
11321FNIEMOP_DEF(iemOp_popf_Fv)
11322{
11323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11324 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11325 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11326}
11327
11328
11329/** Opcode 0x9e. */
11330FNIEMOP_DEF(iemOp_sahf)
11331{
11332 IEMOP_MNEMONIC(sahf, "sahf");
11333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11334 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11335 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11336 return IEMOP_RAISE_INVALID_OPCODE();
11337 IEM_MC_BEGIN(0, 2);
11338 IEM_MC_LOCAL(uint32_t, u32Flags);
11339 IEM_MC_LOCAL(uint32_t, EFlags);
11340 IEM_MC_FETCH_EFLAGS(EFlags);
11341 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11342 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11343 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11344 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11345 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11346 IEM_MC_COMMIT_EFLAGS(EFlags);
11347 IEM_MC_ADVANCE_RIP();
11348 IEM_MC_END();
11349 return VINF_SUCCESS;
11350}
11351
11352
11353/** Opcode 0x9f. */
11354FNIEMOP_DEF(iemOp_lahf)
11355{
11356 IEMOP_MNEMONIC(lahf, "lahf");
11357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11358 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11359 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11360 return IEMOP_RAISE_INVALID_OPCODE();
11361 IEM_MC_BEGIN(0, 1);
11362 IEM_MC_LOCAL(uint8_t, u8Flags);
11363 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11364 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11365 IEM_MC_ADVANCE_RIP();
11366 IEM_MC_END();
11367 return VINF_SUCCESS;
11368}
11369
11370
11371/**
11372 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11373 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11374 * prefixes. Will return on failures.
11375 * @param a_GCPtrMemOff The variable to store the offset in.
11376 */
11377#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11378 do \
11379 { \
11380 switch (pVCpu->iem.s.enmEffAddrMode) \
11381 { \
11382 case IEMMODE_16BIT: \
11383 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11384 break; \
11385 case IEMMODE_32BIT: \
11386 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11387 break; \
11388 case IEMMODE_64BIT: \
11389 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11390 break; \
11391 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11392 } \
11393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11394 } while (0)
11395
11396/** Opcode 0xa0. */
11397FNIEMOP_DEF(iemOp_mov_Al_Ob)
11398{
11399 /*
11400 * Get the offset and fend of lock prefixes.
11401 */
11402 RTGCPTR GCPtrMemOff;
11403 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11404
11405 /*
11406 * Fetch AL.
11407 */
11408 IEM_MC_BEGIN(0,1);
11409 IEM_MC_LOCAL(uint8_t, u8Tmp);
11410 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11411 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11412 IEM_MC_ADVANCE_RIP();
11413 IEM_MC_END();
11414 return VINF_SUCCESS;
11415}
11416
11417
11418/** Opcode 0xa1. */
11419FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11420{
11421 /*
11422 * Get the offset and fend of lock prefixes.
11423 */
11424 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
11425 RTGCPTR GCPtrMemOff;
11426 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11427
11428 /*
11429 * Fetch rAX.
11430 */
11431 switch (pVCpu->iem.s.enmEffOpSize)
11432 {
11433 case IEMMODE_16BIT:
11434 IEM_MC_BEGIN(0,1);
11435 IEM_MC_LOCAL(uint16_t, u16Tmp);
11436 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11437 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11438 IEM_MC_ADVANCE_RIP();
11439 IEM_MC_END();
11440 return VINF_SUCCESS;
11441
11442 case IEMMODE_32BIT:
11443 IEM_MC_BEGIN(0,1);
11444 IEM_MC_LOCAL(uint32_t, u32Tmp);
11445 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11446 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11447 IEM_MC_ADVANCE_RIP();
11448 IEM_MC_END();
11449 return VINF_SUCCESS;
11450
11451 case IEMMODE_64BIT:
11452 IEM_MC_BEGIN(0,1);
11453 IEM_MC_LOCAL(uint64_t, u64Tmp);
11454 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11455 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11456 IEM_MC_ADVANCE_RIP();
11457 IEM_MC_END();
11458 return VINF_SUCCESS;
11459
11460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11461 }
11462}
11463
11464
11465/** Opcode 0xa2. */
11466FNIEMOP_DEF(iemOp_mov_Ob_AL)
11467{
11468 /*
11469 * Get the offset and fend of lock prefixes.
11470 */
11471 RTGCPTR GCPtrMemOff;
11472 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11473
11474 /*
11475 * Store AL.
11476 */
11477 IEM_MC_BEGIN(0,1);
11478 IEM_MC_LOCAL(uint8_t, u8Tmp);
11479 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11480 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
11481 IEM_MC_ADVANCE_RIP();
11482 IEM_MC_END();
11483 return VINF_SUCCESS;
11484}
11485
11486
11487/** Opcode 0xa3. */
11488FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11489{
11490 /*
11491 * Get the offset and fend of lock prefixes.
11492 */
11493 RTGCPTR GCPtrMemOff;
11494 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11495
11496 /*
11497 * Store rAX.
11498 */
11499 switch (pVCpu->iem.s.enmEffOpSize)
11500 {
11501 case IEMMODE_16BIT:
11502 IEM_MC_BEGIN(0,1);
11503 IEM_MC_LOCAL(uint16_t, u16Tmp);
11504 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11505 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
11506 IEM_MC_ADVANCE_RIP();
11507 IEM_MC_END();
11508 return VINF_SUCCESS;
11509
11510 case IEMMODE_32BIT:
11511 IEM_MC_BEGIN(0,1);
11512 IEM_MC_LOCAL(uint32_t, u32Tmp);
11513 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11514 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
11515 IEM_MC_ADVANCE_RIP();
11516 IEM_MC_END();
11517 return VINF_SUCCESS;
11518
11519 case IEMMODE_64BIT:
11520 IEM_MC_BEGIN(0,1);
11521 IEM_MC_LOCAL(uint64_t, u64Tmp);
11522 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11523 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
11524 IEM_MC_ADVANCE_RIP();
11525 IEM_MC_END();
11526 return VINF_SUCCESS;
11527
11528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11529 }
11530}
11531
11532/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11533#define IEM_MOVS_CASE(ValBits, AddrBits) \
11534 IEM_MC_BEGIN(0, 2); \
11535 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11536 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11537 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11538 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11539 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11540 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11541 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11542 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11543 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11544 } IEM_MC_ELSE() { \
11545 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11546 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11547 } IEM_MC_ENDIF(); \
11548 IEM_MC_ADVANCE_RIP(); \
11549 IEM_MC_END();
11550
11551/** Opcode 0xa4. */
11552FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11553{
11554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11555
11556 /*
11557 * Use the C implementation if a repeat prefix is encountered.
11558 */
11559 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11560 {
11561 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
11562 switch (pVCpu->iem.s.enmEffAddrMode)
11563 {
11564 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
11565 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
11566 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
11567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11568 }
11569 }
11570 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
11571
11572 /*
11573 * Sharing case implementation with movs[wdq] below.
11574 */
11575 switch (pVCpu->iem.s.enmEffAddrMode)
11576 {
11577 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11578 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11579 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11581 }
11582 return VINF_SUCCESS;
11583}
11584
11585
11586/** Opcode 0xa5. */
11587FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11588{
11589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11590
11591 /*
11592 * Use the C implementation if a repeat prefix is encountered.
11593 */
11594 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11595 {
11596 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
11597 switch (pVCpu->iem.s.enmEffOpSize)
11598 {
11599 case IEMMODE_16BIT:
11600 switch (pVCpu->iem.s.enmEffAddrMode)
11601 {
11602 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
11603 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
11604 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
11605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11606 }
11607 break;
11608 case IEMMODE_32BIT:
11609 switch (pVCpu->iem.s.enmEffAddrMode)
11610 {
11611 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
11612 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
11613 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
11614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11615 }
11616 case IEMMODE_64BIT:
11617 switch (pVCpu->iem.s.enmEffAddrMode)
11618 {
11619 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11620 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
11621 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
11622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11623 }
11624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11625 }
11626 }
11627 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
11628
11629 /*
11630 * Annoying double switch here.
11631 * Using ugly macro for implementing the cases, sharing it with movsb.
11632 */
11633 switch (pVCpu->iem.s.enmEffOpSize)
11634 {
11635 case IEMMODE_16BIT:
11636 switch (pVCpu->iem.s.enmEffAddrMode)
11637 {
11638 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11639 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11640 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11642 }
11643 break;
11644
11645 case IEMMODE_32BIT:
11646 switch (pVCpu->iem.s.enmEffAddrMode)
11647 {
11648 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11649 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11650 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11652 }
11653 break;
11654
11655 case IEMMODE_64BIT:
11656 switch (pVCpu->iem.s.enmEffAddrMode)
11657 {
11658 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11659 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11660 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11662 }
11663 break;
11664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11665 }
11666 return VINF_SUCCESS;
11667}
11668
11669#undef IEM_MOVS_CASE
11670
11671/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11672#define IEM_CMPS_CASE(ValBits, AddrBits) \
11673 IEM_MC_BEGIN(3, 3); \
11674 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11675 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11676 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11677 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11678 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11679 \
11680 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11681 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
11682 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11683 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11684 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11685 IEM_MC_REF_EFLAGS(pEFlags); \
11686 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11687 \
11688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11689 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11690 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11691 } IEM_MC_ELSE() { \
11692 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11693 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11694 } IEM_MC_ENDIF(); \
11695 IEM_MC_ADVANCE_RIP(); \
11696 IEM_MC_END(); \
11697
11698/** Opcode 0xa6. */
11699FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11700{
11701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11702
11703 /*
11704 * Use the C implementation if a repeat prefix is encountered.
11705 */
11706 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11707 {
11708 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
11709 switch (pVCpu->iem.s.enmEffAddrMode)
11710 {
11711 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11712 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11713 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11715 }
11716 }
11717 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11718 {
11719 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
11720 switch (pVCpu->iem.s.enmEffAddrMode)
11721 {
11722 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11723 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11724 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11726 }
11727 }
11728 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
11729
11730 /*
11731 * Sharing case implementation with cmps[wdq] below.
11732 */
11733 switch (pVCpu->iem.s.enmEffAddrMode)
11734 {
11735 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11736 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11737 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11739 }
11740 return VINF_SUCCESS;
11741
11742}
11743
11744
11745/** Opcode 0xa7. */
11746FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11747{
11748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11749
11750 /*
11751 * Use the C implementation if a repeat prefix is encountered.
11752 */
11753 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11754 {
11755 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
11756 switch (pVCpu->iem.s.enmEffOpSize)
11757 {
11758 case IEMMODE_16BIT:
11759 switch (pVCpu->iem.s.enmEffAddrMode)
11760 {
11761 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11762 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11763 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11765 }
11766 break;
11767 case IEMMODE_32BIT:
11768 switch (pVCpu->iem.s.enmEffAddrMode)
11769 {
11770 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11771 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11772 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11774 }
11775 case IEMMODE_64BIT:
11776 switch (pVCpu->iem.s.enmEffAddrMode)
11777 {
11778 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11779 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11780 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11782 }
11783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11784 }
11785 }
11786
11787 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11788 {
11789 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
11790 switch (pVCpu->iem.s.enmEffOpSize)
11791 {
11792 case IEMMODE_16BIT:
11793 switch (pVCpu->iem.s.enmEffAddrMode)
11794 {
11795 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11796 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11797 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11799 }
11800 break;
11801 case IEMMODE_32BIT:
11802 switch (pVCpu->iem.s.enmEffAddrMode)
11803 {
11804 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11805 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11806 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11808 }
11809 case IEMMODE_64BIT:
11810 switch (pVCpu->iem.s.enmEffAddrMode)
11811 {
11812 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11813 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11814 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11816 }
11817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11818 }
11819 }
11820
11821 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
11822
11823 /*
11824 * Annoying double switch here.
11825 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11826 */
11827 switch (pVCpu->iem.s.enmEffOpSize)
11828 {
11829 case IEMMODE_16BIT:
11830 switch (pVCpu->iem.s.enmEffAddrMode)
11831 {
11832 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11833 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11834 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11836 }
11837 break;
11838
11839 case IEMMODE_32BIT:
11840 switch (pVCpu->iem.s.enmEffAddrMode)
11841 {
11842 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11843 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11844 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11846 }
11847 break;
11848
11849 case IEMMODE_64BIT:
11850 switch (pVCpu->iem.s.enmEffAddrMode)
11851 {
11852 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11853 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11854 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11856 }
11857 break;
11858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11859 }
11860 return VINF_SUCCESS;
11861
11862}
11863
11864#undef IEM_CMPS_CASE
11865
11866/** Opcode 0xa8. */
11867FNIEMOP_DEF(iemOp_test_AL_Ib)
11868{
11869 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
11870 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11871 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11872}
11873
11874
11875/** Opcode 0xa9. */
11876FNIEMOP_DEF(iemOp_test_eAX_Iz)
11877{
11878 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
11879 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11880 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11881}
11882
11883
11884/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11885#define IEM_STOS_CASE(ValBits, AddrBits) \
11886 IEM_MC_BEGIN(0, 2); \
11887 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11888 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11889 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11890 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11891 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11892 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11893 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11894 } IEM_MC_ELSE() { \
11895 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11896 } IEM_MC_ENDIF(); \
11897 IEM_MC_ADVANCE_RIP(); \
11898 IEM_MC_END(); \
11899
11900/** Opcode 0xaa. */
11901FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11902{
11903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11904
11905 /*
11906 * Use the C implementation if a repeat prefix is encountered.
11907 */
11908 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11909 {
11910 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
11911 switch (pVCpu->iem.s.enmEffAddrMode)
11912 {
11913 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11914 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11915 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11917 }
11918 }
11919 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
11920
11921 /*
11922 * Sharing case implementation with stos[wdq] below.
11923 */
11924 switch (pVCpu->iem.s.enmEffAddrMode)
11925 {
11926 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11927 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11928 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11930 }
11931 return VINF_SUCCESS;
11932}
11933
11934
11935/** Opcode 0xab. */
11936FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11937{
11938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11939
11940 /*
11941 * Use the C implementation if a repeat prefix is encountered.
11942 */
11943 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11944 {
11945 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
11946 switch (pVCpu->iem.s.enmEffOpSize)
11947 {
11948 case IEMMODE_16BIT:
11949 switch (pVCpu->iem.s.enmEffAddrMode)
11950 {
11951 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11952 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11953 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11955 }
11956 break;
11957 case IEMMODE_32BIT:
11958 switch (pVCpu->iem.s.enmEffAddrMode)
11959 {
11960 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11961 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11962 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11964 }
11965 case IEMMODE_64BIT:
11966 switch (pVCpu->iem.s.enmEffAddrMode)
11967 {
11968 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11969 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11970 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11972 }
11973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11974 }
11975 }
11976 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
11977
11978 /*
11979 * Annoying double switch here.
11980 * Using ugly macro for implementing the cases, sharing it with stosb.
11981 */
11982 switch (pVCpu->iem.s.enmEffOpSize)
11983 {
11984 case IEMMODE_16BIT:
11985 switch (pVCpu->iem.s.enmEffAddrMode)
11986 {
11987 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11988 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11989 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11991 }
11992 break;
11993
11994 case IEMMODE_32BIT:
11995 switch (pVCpu->iem.s.enmEffAddrMode)
11996 {
11997 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11998 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11999 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12001 }
12002 break;
12003
12004 case IEMMODE_64BIT:
12005 switch (pVCpu->iem.s.enmEffAddrMode)
12006 {
12007 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12008 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12009 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12011 }
12012 break;
12013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12014 }
12015 return VINF_SUCCESS;
12016}
12017
12018#undef IEM_STOS_CASE
12019
12020/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12021#define IEM_LODS_CASE(ValBits, AddrBits) \
12022 IEM_MC_BEGIN(0, 2); \
12023 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12024 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12025 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12026 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12027 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12029 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12030 } IEM_MC_ELSE() { \
12031 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12032 } IEM_MC_ENDIF(); \
12033 IEM_MC_ADVANCE_RIP(); \
12034 IEM_MC_END();
12035
12036/** Opcode 0xac. */
12037FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12038{
12039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12040
12041 /*
12042 * Use the C implementation if a repeat prefix is encountered.
12043 */
12044 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12045 {
12046 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12047 switch (pVCpu->iem.s.enmEffAddrMode)
12048 {
12049 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12050 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12051 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12053 }
12054 }
12055 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12056
12057 /*
12058 * Sharing case implementation with stos[wdq] below.
12059 */
12060 switch (pVCpu->iem.s.enmEffAddrMode)
12061 {
12062 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12063 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12064 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12066 }
12067 return VINF_SUCCESS;
12068}
12069
12070
12071/** Opcode 0xad. */
12072FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12073{
12074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12075
12076 /*
12077 * Use the C implementation if a repeat prefix is encountered.
12078 */
12079 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12080 {
12081 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12082 switch (pVCpu->iem.s.enmEffOpSize)
12083 {
12084 case IEMMODE_16BIT:
12085 switch (pVCpu->iem.s.enmEffAddrMode)
12086 {
12087 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12088 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12089 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12091 }
12092 break;
12093 case IEMMODE_32BIT:
12094 switch (pVCpu->iem.s.enmEffAddrMode)
12095 {
12096 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12097 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12098 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12100 }
12101 case IEMMODE_64BIT:
12102 switch (pVCpu->iem.s.enmEffAddrMode)
12103 {
12104 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12105 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12106 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12108 }
12109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12110 }
12111 }
12112 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12113
12114 /*
12115 * Annoying double switch here.
12116 * Using ugly macro for implementing the cases, sharing it with lodsb.
12117 */
12118 switch (pVCpu->iem.s.enmEffOpSize)
12119 {
12120 case IEMMODE_16BIT:
12121 switch (pVCpu->iem.s.enmEffAddrMode)
12122 {
12123 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12124 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12125 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12127 }
12128 break;
12129
12130 case IEMMODE_32BIT:
12131 switch (pVCpu->iem.s.enmEffAddrMode)
12132 {
12133 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12134 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12135 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12137 }
12138 break;
12139
12140 case IEMMODE_64BIT:
12141 switch (pVCpu->iem.s.enmEffAddrMode)
12142 {
12143 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12144 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12145 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12147 }
12148 break;
12149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12150 }
12151 return VINF_SUCCESS;
12152}
12153
12154#undef IEM_LODS_CASE
12155
12156/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12157#define IEM_SCAS_CASE(ValBits, AddrBits) \
12158 IEM_MC_BEGIN(3, 2); \
12159 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12160 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12161 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12162 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12163 \
12164 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12165 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12166 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12167 IEM_MC_REF_EFLAGS(pEFlags); \
12168 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12169 \
12170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12171 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12172 } IEM_MC_ELSE() { \
12173 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12174 } IEM_MC_ENDIF(); \
12175 IEM_MC_ADVANCE_RIP(); \
12176 IEM_MC_END();
12177
12178/** Opcode 0xae. */
12179FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12180{
12181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12182
12183 /*
12184 * Use the C implementation if a repeat prefix is encountered.
12185 */
12186 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12187 {
12188 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12189 switch (pVCpu->iem.s.enmEffAddrMode)
12190 {
12191 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12192 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12193 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12195 }
12196 }
12197 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12198 {
12199 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12200 switch (pVCpu->iem.s.enmEffAddrMode)
12201 {
12202 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12203 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12204 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12206 }
12207 }
12208 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12209
12210 /*
12211 * Sharing case implementation with stos[wdq] below.
12212 */
12213 switch (pVCpu->iem.s.enmEffAddrMode)
12214 {
12215 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12216 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12217 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12219 }
12220 return VINF_SUCCESS;
12221}
12222
12223
12224/** Opcode 0xaf. */
12225FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12226{
12227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12228
12229 /*
12230 * Use the C implementation if a repeat prefix is encountered.
12231 */
12232 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12233 {
12234 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12235 switch (pVCpu->iem.s.enmEffOpSize)
12236 {
12237 case IEMMODE_16BIT:
12238 switch (pVCpu->iem.s.enmEffAddrMode)
12239 {
12240 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12241 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12242 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12244 }
12245 break;
12246 case IEMMODE_32BIT:
12247 switch (pVCpu->iem.s.enmEffAddrMode)
12248 {
12249 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12250 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12251 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12253 }
12254 case IEMMODE_64BIT:
12255 switch (pVCpu->iem.s.enmEffAddrMode)
12256 {
12257 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12258 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12259 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12261 }
12262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12263 }
12264 }
12265 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12266 {
12267 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12268 switch (pVCpu->iem.s.enmEffOpSize)
12269 {
12270 case IEMMODE_16BIT:
12271 switch (pVCpu->iem.s.enmEffAddrMode)
12272 {
12273 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12274 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12275 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12277 }
12278 break;
12279 case IEMMODE_32BIT:
12280 switch (pVCpu->iem.s.enmEffAddrMode)
12281 {
12282 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12283 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12284 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12286 }
12287 case IEMMODE_64BIT:
12288 switch (pVCpu->iem.s.enmEffAddrMode)
12289 {
12290 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12291 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12292 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12294 }
12295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12296 }
12297 }
12298 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12299
12300 /*
12301 * Annoying double switch here.
12302 * Using ugly macro for implementing the cases, sharing it with scasb.
12303 */
12304 switch (pVCpu->iem.s.enmEffOpSize)
12305 {
12306 case IEMMODE_16BIT:
12307 switch (pVCpu->iem.s.enmEffAddrMode)
12308 {
12309 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12310 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12311 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12313 }
12314 break;
12315
12316 case IEMMODE_32BIT:
12317 switch (pVCpu->iem.s.enmEffAddrMode)
12318 {
12319 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12320 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12321 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12323 }
12324 break;
12325
12326 case IEMMODE_64BIT:
12327 switch (pVCpu->iem.s.enmEffAddrMode)
12328 {
12329 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12330 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12331 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12333 }
12334 break;
12335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12336 }
12337 return VINF_SUCCESS;
12338}
12339
12340#undef IEM_SCAS_CASE
12341
12342/**
12343 * Common 'mov r8, imm8' helper.
12344 */
12345FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12346{
12347 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12349
12350 IEM_MC_BEGIN(0, 1);
12351 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12352 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12353 IEM_MC_ADVANCE_RIP();
12354 IEM_MC_END();
12355
12356 return VINF_SUCCESS;
12357}
12358
12359
12360/** Opcode 0xb0. */
12361FNIEMOP_DEF(iemOp_mov_AL_Ib)
12362{
12363 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
12364 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12365}
12366
12367
12368/** Opcode 0xb1. */
12369FNIEMOP_DEF(iemOp_CL_Ib)
12370{
12371 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
12372 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12373}
12374
12375
12376/** Opcode 0xb2. */
12377FNIEMOP_DEF(iemOp_DL_Ib)
12378{
12379 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
12380 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12381}
12382
12383
12384/** Opcode 0xb3. */
12385FNIEMOP_DEF(iemOp_BL_Ib)
12386{
12387 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
12388 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12389}
12390
12391
12392/** Opcode 0xb4. */
12393FNIEMOP_DEF(iemOp_mov_AH_Ib)
12394{
12395 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
12396 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12397}
12398
12399
12400/** Opcode 0xb5. */
12401FNIEMOP_DEF(iemOp_CH_Ib)
12402{
12403 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
12404 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12405}
12406
12407
12408/** Opcode 0xb6. */
12409FNIEMOP_DEF(iemOp_DH_Ib)
12410{
12411 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
12412 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12413}
12414
12415
12416/** Opcode 0xb7. */
12417FNIEMOP_DEF(iemOp_BH_Ib)
12418{
12419 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
12420 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12421}
12422
12423
12424/**
12425 * Common 'mov regX,immX' helper.
12426 */
12427FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12428{
12429 switch (pVCpu->iem.s.enmEffOpSize)
12430 {
12431 case IEMMODE_16BIT:
12432 {
12433 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12435
12436 IEM_MC_BEGIN(0, 1);
12437 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12438 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12439 IEM_MC_ADVANCE_RIP();
12440 IEM_MC_END();
12441 break;
12442 }
12443
12444 case IEMMODE_32BIT:
12445 {
12446 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12448
12449 IEM_MC_BEGIN(0, 1);
12450 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12451 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12452 IEM_MC_ADVANCE_RIP();
12453 IEM_MC_END();
12454 break;
12455 }
12456 case IEMMODE_64BIT:
12457 {
12458 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12460
12461 IEM_MC_BEGIN(0, 1);
12462 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12463 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12464 IEM_MC_ADVANCE_RIP();
12465 IEM_MC_END();
12466 break;
12467 }
12468 }
12469
12470 return VINF_SUCCESS;
12471}
12472
12473
12474/** Opcode 0xb8. */
12475FNIEMOP_DEF(iemOp_eAX_Iv)
12476{
12477 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
12478 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12479}
12480
12481
12482/** Opcode 0xb9. */
12483FNIEMOP_DEF(iemOp_eCX_Iv)
12484{
12485 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
12486 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12487}
12488
12489
12490/** Opcode 0xba. */
12491FNIEMOP_DEF(iemOp_eDX_Iv)
12492{
12493 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
12494 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12495}
12496
12497
12498/** Opcode 0xbb. */
12499FNIEMOP_DEF(iemOp_eBX_Iv)
12500{
12501 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
12502 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12503}
12504
12505
12506/** Opcode 0xbc. */
12507FNIEMOP_DEF(iemOp_eSP_Iv)
12508{
12509 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
12510 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12511}
12512
12513
12514/** Opcode 0xbd. */
12515FNIEMOP_DEF(iemOp_eBP_Iv)
12516{
12517 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
12518 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12519}
12520
12521
12522/** Opcode 0xbe. */
12523FNIEMOP_DEF(iemOp_eSI_Iv)
12524{
12525 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
12526 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12527}
12528
12529
12530/** Opcode 0xbf. */
12531FNIEMOP_DEF(iemOp_eDI_Iv)
12532{
12533 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
12534 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12535}
12536
12537
12538/** Opcode 0xc0. */
12539FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12540{
12541 IEMOP_HLP_MIN_186();
12542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12543 PCIEMOPSHIFTSIZES pImpl;
12544 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12545 {
12546 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
12547 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
12548 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
12549 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
12550 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
12551 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
12552 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
12553 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12554 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12555 }
12556 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12557
12558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12559 {
12560 /* register */
12561 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12563 IEM_MC_BEGIN(3, 0);
12564 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12565 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12566 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12567 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12568 IEM_MC_REF_EFLAGS(pEFlags);
12569 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12570 IEM_MC_ADVANCE_RIP();
12571 IEM_MC_END();
12572 }
12573 else
12574 {
12575 /* memory */
12576 IEM_MC_BEGIN(3, 2);
12577 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12578 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12579 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12581
12582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12583 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12584 IEM_MC_ASSIGN(cShiftArg, cShift);
12585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12586 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12587 IEM_MC_FETCH_EFLAGS(EFlags);
12588 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12589
12590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12591 IEM_MC_COMMIT_EFLAGS(EFlags);
12592 IEM_MC_ADVANCE_RIP();
12593 IEM_MC_END();
12594 }
12595 return VINF_SUCCESS;
12596}
12597
12598
12599/** Opcode 0xc1. */
12600FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12601{
12602 IEMOP_HLP_MIN_186();
12603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12604 PCIEMOPSHIFTSIZES pImpl;
12605 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12606 {
12607 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
12608 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
12609 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
12610 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
12611 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
12612 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
12613 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
12614 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12615 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12616 }
12617 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12618
12619 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12620 {
12621 /* register */
12622 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12624 switch (pVCpu->iem.s.enmEffOpSize)
12625 {
12626 case IEMMODE_16BIT:
12627 IEM_MC_BEGIN(3, 0);
12628 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12629 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12630 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12631 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12632 IEM_MC_REF_EFLAGS(pEFlags);
12633 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12634 IEM_MC_ADVANCE_RIP();
12635 IEM_MC_END();
12636 return VINF_SUCCESS;
12637
12638 case IEMMODE_32BIT:
12639 IEM_MC_BEGIN(3, 0);
12640 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12641 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12642 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12643 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12644 IEM_MC_REF_EFLAGS(pEFlags);
12645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12646 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12647 IEM_MC_ADVANCE_RIP();
12648 IEM_MC_END();
12649 return VINF_SUCCESS;
12650
12651 case IEMMODE_64BIT:
12652 IEM_MC_BEGIN(3, 0);
12653 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12654 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12655 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12656 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12657 IEM_MC_REF_EFLAGS(pEFlags);
12658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12659 IEM_MC_ADVANCE_RIP();
12660 IEM_MC_END();
12661 return VINF_SUCCESS;
12662
12663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12664 }
12665 }
12666 else
12667 {
12668 /* memory */
12669 switch (pVCpu->iem.s.enmEffOpSize)
12670 {
12671 case IEMMODE_16BIT:
12672 IEM_MC_BEGIN(3, 2);
12673 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12674 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12675 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12677
12678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12679 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12680 IEM_MC_ASSIGN(cShiftArg, cShift);
12681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12682 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12683 IEM_MC_FETCH_EFLAGS(EFlags);
12684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12685
12686 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12687 IEM_MC_COMMIT_EFLAGS(EFlags);
12688 IEM_MC_ADVANCE_RIP();
12689 IEM_MC_END();
12690 return VINF_SUCCESS;
12691
12692 case IEMMODE_32BIT:
12693 IEM_MC_BEGIN(3, 2);
12694 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12695 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12696 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12698
12699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12700 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12701 IEM_MC_ASSIGN(cShiftArg, cShift);
12702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12703 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12704 IEM_MC_FETCH_EFLAGS(EFlags);
12705 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12706
12707 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12708 IEM_MC_COMMIT_EFLAGS(EFlags);
12709 IEM_MC_ADVANCE_RIP();
12710 IEM_MC_END();
12711 return VINF_SUCCESS;
12712
12713 case IEMMODE_64BIT:
12714 IEM_MC_BEGIN(3, 2);
12715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12716 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12717 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12719
12720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12721 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12722 IEM_MC_ASSIGN(cShiftArg, cShift);
12723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12724 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12725 IEM_MC_FETCH_EFLAGS(EFlags);
12726 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12727
12728 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12729 IEM_MC_COMMIT_EFLAGS(EFlags);
12730 IEM_MC_ADVANCE_RIP();
12731 IEM_MC_END();
12732 return VINF_SUCCESS;
12733
12734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12735 }
12736 }
12737}
12738
12739
12740/** Opcode 0xc2. */
12741FNIEMOP_DEF(iemOp_retn_Iw)
12742{
12743 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
12744 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12746 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12747 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
12748}
12749
12750
12751/** Opcode 0xc3. */
12752FNIEMOP_DEF(iemOp_retn)
12753{
12754 IEMOP_MNEMONIC(retn, "retn");
12755 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12757 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
12758}
12759
12760
12761/** Opcode 0xc4. */
12762FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12763{
12764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12765 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12766 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12767 {
12768 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
12769 /* The LES instruction is invalid 64-bit mode. In legacy and
12770 compatability mode it is invalid with MOD=3.
12771 The use as a VEX prefix is made possible by assigning the inverted
12772 REX.R to the top MOD bit, and the top bit in the inverted register
12773 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12774 to accessing registers 0..7 in this VEX form. */
12775 /** @todo VEX: Just use new tables for it. */
12776 return IEMOP_RAISE_INVALID_OPCODE();
12777 }
12778 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
12779 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12780}
12781
12782
12783/** Opcode 0xc5. */
12784FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12785{
12786 /* The LDS instruction is invalid 64-bit mode. In legacy and
12787 compatability mode it is invalid with MOD=3.
12788 The use as a VEX prefix is made possible by assigning the inverted
12789 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12790 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12792 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
12793 {
12794 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12795 {
12796 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
12797 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12798 }
12799 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12800 }
12801
12802 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
12803 /** @todo Test when exctly the VEX conformance checks kick in during
12804 * instruction decoding and fetching (using \#PF). */
12805 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12806 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12807 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12808#if 0 /* will make sense of this next week... */
12809 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12810 &&
12811 )
12812 {
12813
12814 }
12815#endif
12816
12817 /** @todo VEX: Just use new tables for it. */
12818 return IEMOP_RAISE_INVALID_OPCODE();
12819}
12820
12821
12822/** Opcode 0xc6. */
12823FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12824{
12825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12826 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12827 return IEMOP_RAISE_INVALID_OPCODE();
12828 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
12829
12830 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12831 {
12832 /* register access */
12833 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12835 IEM_MC_BEGIN(0, 0);
12836 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
12837 IEM_MC_ADVANCE_RIP();
12838 IEM_MC_END();
12839 }
12840 else
12841 {
12842 /* memory access. */
12843 IEM_MC_BEGIN(0, 1);
12844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12846 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12848 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
12849 IEM_MC_ADVANCE_RIP();
12850 IEM_MC_END();
12851 }
12852 return VINF_SUCCESS;
12853}
12854
12855
12856/** Opcode 0xc7. */
12857FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12858{
12859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12860 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12861 return IEMOP_RAISE_INVALID_OPCODE();
12862 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
12863
12864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12865 {
12866 /* register access */
12867 switch (pVCpu->iem.s.enmEffOpSize)
12868 {
12869 case IEMMODE_16BIT:
12870 IEM_MC_BEGIN(0, 0);
12871 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12873 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
12874 IEM_MC_ADVANCE_RIP();
12875 IEM_MC_END();
12876 return VINF_SUCCESS;
12877
12878 case IEMMODE_32BIT:
12879 IEM_MC_BEGIN(0, 0);
12880 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12882 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
12883 IEM_MC_ADVANCE_RIP();
12884 IEM_MC_END();
12885 return VINF_SUCCESS;
12886
12887 case IEMMODE_64BIT:
12888 IEM_MC_BEGIN(0, 0);
12889 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12891 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
12892 IEM_MC_ADVANCE_RIP();
12893 IEM_MC_END();
12894 return VINF_SUCCESS;
12895
12896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12897 }
12898 }
12899 else
12900 {
12901 /* memory access. */
12902 switch (pVCpu->iem.s.enmEffOpSize)
12903 {
12904 case IEMMODE_16BIT:
12905 IEM_MC_BEGIN(0, 1);
12906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12908 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12910 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
12911 IEM_MC_ADVANCE_RIP();
12912 IEM_MC_END();
12913 return VINF_SUCCESS;
12914
12915 case IEMMODE_32BIT:
12916 IEM_MC_BEGIN(0, 1);
12917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12919 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12921 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
12922 IEM_MC_ADVANCE_RIP();
12923 IEM_MC_END();
12924 return VINF_SUCCESS;
12925
12926 case IEMMODE_64BIT:
12927 IEM_MC_BEGIN(0, 1);
12928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12930 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12932 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
12933 IEM_MC_ADVANCE_RIP();
12934 IEM_MC_END();
12935 return VINF_SUCCESS;
12936
12937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12938 }
12939 }
12940}
12941
12942
12943
12944
12945/** Opcode 0xc8. */
12946FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12947{
12948 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
12949 IEMOP_HLP_MIN_186();
12950 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12951 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12952 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12954 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
12955}
12956
12957
12958/** Opcode 0xc9. */
12959FNIEMOP_DEF(iemOp_leave)
12960{
12961 IEMOP_MNEMONIC(leave, "leave");
12962 IEMOP_HLP_MIN_186();
12963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12965 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
12966}
12967
12968
12969/** Opcode 0xca. */
12970FNIEMOP_DEF(iemOp_retf_Iw)
12971{
12972 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
12973 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12975 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12976 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
12977}
12978
12979
12980/** Opcode 0xcb. */
12981FNIEMOP_DEF(iemOp_retf)
12982{
12983 IEMOP_MNEMONIC(retf, "retf");
12984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12985 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12986 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
12987}
12988
12989
12990/** Opcode 0xcc. */
12991FNIEMOP_DEF(iemOp_int_3)
12992{
12993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12994 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12995}
12996
12997
12998/** Opcode 0xcd. */
12999FNIEMOP_DEF(iemOp_int_Ib)
13000{
13001 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13003 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13004}
13005
13006
13007/** Opcode 0xce. */
13008FNIEMOP_DEF(iemOp_into)
13009{
13010 IEMOP_MNEMONIC(into, "into");
13011 IEMOP_HLP_NO_64BIT();
13012
13013 IEM_MC_BEGIN(2, 0);
13014 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13015 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13016 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13017 IEM_MC_END();
13018 return VINF_SUCCESS;
13019}
13020
13021
13022/** Opcode 0xcf. */
13023FNIEMOP_DEF(iemOp_iret)
13024{
13025 IEMOP_MNEMONIC(iret, "iret");
13026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13027 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13028}
13029
13030
13031/** Opcode 0xd0. */
13032FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13033{
13034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13035 PCIEMOPSHIFTSIZES pImpl;
13036 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13037 {
13038 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13039 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13040 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13041 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13042 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13043 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13044 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13045 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13046 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13047 }
13048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13049
13050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13051 {
13052 /* register */
13053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13054 IEM_MC_BEGIN(3, 0);
13055 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13056 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13057 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13058 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13059 IEM_MC_REF_EFLAGS(pEFlags);
13060 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13061 IEM_MC_ADVANCE_RIP();
13062 IEM_MC_END();
13063 }
13064 else
13065 {
13066 /* memory */
13067 IEM_MC_BEGIN(3, 2);
13068 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13069 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13070 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13072
13073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13075 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13076 IEM_MC_FETCH_EFLAGS(EFlags);
13077 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13078
13079 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13080 IEM_MC_COMMIT_EFLAGS(EFlags);
13081 IEM_MC_ADVANCE_RIP();
13082 IEM_MC_END();
13083 }
13084 return VINF_SUCCESS;
13085}
13086
13087
13088
13089/** Opcode 0xd1. */
13090FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13091{
13092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13093 PCIEMOPSHIFTSIZES pImpl;
13094 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13095 {
13096 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13097 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13098 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13099 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13100 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13101 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13102 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13103 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13104 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13105 }
13106 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13107
13108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13109 {
13110 /* register */
13111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13112 switch (pVCpu->iem.s.enmEffOpSize)
13113 {
13114 case IEMMODE_16BIT:
13115 IEM_MC_BEGIN(3, 0);
13116 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13117 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13118 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13119 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13120 IEM_MC_REF_EFLAGS(pEFlags);
13121 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13122 IEM_MC_ADVANCE_RIP();
13123 IEM_MC_END();
13124 return VINF_SUCCESS;
13125
13126 case IEMMODE_32BIT:
13127 IEM_MC_BEGIN(3, 0);
13128 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13129 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13130 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13131 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13132 IEM_MC_REF_EFLAGS(pEFlags);
13133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13134 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13135 IEM_MC_ADVANCE_RIP();
13136 IEM_MC_END();
13137 return VINF_SUCCESS;
13138
13139 case IEMMODE_64BIT:
13140 IEM_MC_BEGIN(3, 0);
13141 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13142 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13143 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13144 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13145 IEM_MC_REF_EFLAGS(pEFlags);
13146 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13147 IEM_MC_ADVANCE_RIP();
13148 IEM_MC_END();
13149 return VINF_SUCCESS;
13150
13151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13152 }
13153 }
13154 else
13155 {
13156 /* memory */
13157 switch (pVCpu->iem.s.enmEffOpSize)
13158 {
13159 case IEMMODE_16BIT:
13160 IEM_MC_BEGIN(3, 2);
13161 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13162 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13163 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13165
13166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13168 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13169 IEM_MC_FETCH_EFLAGS(EFlags);
13170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13171
13172 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13173 IEM_MC_COMMIT_EFLAGS(EFlags);
13174 IEM_MC_ADVANCE_RIP();
13175 IEM_MC_END();
13176 return VINF_SUCCESS;
13177
13178 case IEMMODE_32BIT:
13179 IEM_MC_BEGIN(3, 2);
13180 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13181 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13182 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13184
13185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13187 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13188 IEM_MC_FETCH_EFLAGS(EFlags);
13189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13190
13191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13192 IEM_MC_COMMIT_EFLAGS(EFlags);
13193 IEM_MC_ADVANCE_RIP();
13194 IEM_MC_END();
13195 return VINF_SUCCESS;
13196
13197 case IEMMODE_64BIT:
13198 IEM_MC_BEGIN(3, 2);
13199 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13200 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13201 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13203
13204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13206 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13207 IEM_MC_FETCH_EFLAGS(EFlags);
13208 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13209
13210 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13211 IEM_MC_COMMIT_EFLAGS(EFlags);
13212 IEM_MC_ADVANCE_RIP();
13213 IEM_MC_END();
13214 return VINF_SUCCESS;
13215
13216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13217 }
13218 }
13219}
13220
13221
13222/** Opcode 0xd2. */
13223FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13224{
13225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13226 PCIEMOPSHIFTSIZES pImpl;
13227 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13228 {
13229 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13230 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13231 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13232 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13233 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13234 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13235 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13236 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13237 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13238 }
13239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13240
13241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13242 {
13243 /* register */
13244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13245 IEM_MC_BEGIN(3, 0);
13246 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13247 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13249 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13250 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13251 IEM_MC_REF_EFLAGS(pEFlags);
13252 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13253 IEM_MC_ADVANCE_RIP();
13254 IEM_MC_END();
13255 }
13256 else
13257 {
13258 /* memory */
13259 IEM_MC_BEGIN(3, 2);
13260 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13261 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13262 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13264
13265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13267 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13268 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13269 IEM_MC_FETCH_EFLAGS(EFlags);
13270 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13271
13272 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13273 IEM_MC_COMMIT_EFLAGS(EFlags);
13274 IEM_MC_ADVANCE_RIP();
13275 IEM_MC_END();
13276 }
13277 return VINF_SUCCESS;
13278}
13279
13280
13281/** Opcode 0xd3. */
13282FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13283{
13284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13285 PCIEMOPSHIFTSIZES pImpl;
13286 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13287 {
13288 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13289 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13290 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13291 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13292 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13293 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13294 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13295 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13296 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13297 }
13298 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13299
13300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13301 {
13302 /* register */
13303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13304 switch (pVCpu->iem.s.enmEffOpSize)
13305 {
13306 case IEMMODE_16BIT:
13307 IEM_MC_BEGIN(3, 0);
13308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13309 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13311 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13312 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13313 IEM_MC_REF_EFLAGS(pEFlags);
13314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13315 IEM_MC_ADVANCE_RIP();
13316 IEM_MC_END();
13317 return VINF_SUCCESS;
13318
13319 case IEMMODE_32BIT:
13320 IEM_MC_BEGIN(3, 0);
13321 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13322 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13323 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13324 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13325 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13326 IEM_MC_REF_EFLAGS(pEFlags);
13327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13328 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13329 IEM_MC_ADVANCE_RIP();
13330 IEM_MC_END();
13331 return VINF_SUCCESS;
13332
13333 case IEMMODE_64BIT:
13334 IEM_MC_BEGIN(3, 0);
13335 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13336 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13337 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13338 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13339 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13340 IEM_MC_REF_EFLAGS(pEFlags);
13341 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13342 IEM_MC_ADVANCE_RIP();
13343 IEM_MC_END();
13344 return VINF_SUCCESS;
13345
13346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13347 }
13348 }
13349 else
13350 {
13351 /* memory */
13352 switch (pVCpu->iem.s.enmEffOpSize)
13353 {
13354 case IEMMODE_16BIT:
13355 IEM_MC_BEGIN(3, 2);
13356 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13357 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13358 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13360
13361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13363 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13364 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13365 IEM_MC_FETCH_EFLAGS(EFlags);
13366 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13367
13368 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13369 IEM_MC_COMMIT_EFLAGS(EFlags);
13370 IEM_MC_ADVANCE_RIP();
13371 IEM_MC_END();
13372 return VINF_SUCCESS;
13373
13374 case IEMMODE_32BIT:
13375 IEM_MC_BEGIN(3, 2);
13376 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13377 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13378 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13380
13381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13383 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13384 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13385 IEM_MC_FETCH_EFLAGS(EFlags);
13386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13387
13388 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13389 IEM_MC_COMMIT_EFLAGS(EFlags);
13390 IEM_MC_ADVANCE_RIP();
13391 IEM_MC_END();
13392 return VINF_SUCCESS;
13393
13394 case IEMMODE_64BIT:
13395 IEM_MC_BEGIN(3, 2);
13396 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13397 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13398 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13400
13401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13403 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13404 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13405 IEM_MC_FETCH_EFLAGS(EFlags);
13406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13407
13408 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13409 IEM_MC_COMMIT_EFLAGS(EFlags);
13410 IEM_MC_ADVANCE_RIP();
13411 IEM_MC_END();
13412 return VINF_SUCCESS;
13413
13414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13415 }
13416 }
13417}
13418
13419/** Opcode 0xd4. */
13420FNIEMOP_DEF(iemOp_aam_Ib)
13421{
13422 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
13423 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13425 IEMOP_HLP_NO_64BIT();
13426 if (!bImm)
13427 return IEMOP_RAISE_DIVIDE_ERROR();
13428 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13429}
13430
13431
13432/** Opcode 0xd5. */
13433FNIEMOP_DEF(iemOp_aad_Ib)
13434{
13435 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
13436 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13438 IEMOP_HLP_NO_64BIT();
13439 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13440}
13441
13442
13443/** Opcode 0xd6. */
13444FNIEMOP_DEF(iemOp_salc)
13445{
13446 IEMOP_MNEMONIC(salc, "salc");
13447 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13448 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13450 IEMOP_HLP_NO_64BIT();
13451
13452 IEM_MC_BEGIN(0, 0);
13453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13454 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13455 } IEM_MC_ELSE() {
13456 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13457 } IEM_MC_ENDIF();
13458 IEM_MC_ADVANCE_RIP();
13459 IEM_MC_END();
13460 return VINF_SUCCESS;
13461}
13462
13463
13464/** Opcode 0xd7. */
13465FNIEMOP_DEF(iemOp_xlat)
13466{
13467 IEMOP_MNEMONIC(xlat, "xlat");
13468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13469 switch (pVCpu->iem.s.enmEffAddrMode)
13470 {
13471 case IEMMODE_16BIT:
13472 IEM_MC_BEGIN(2, 0);
13473 IEM_MC_LOCAL(uint8_t, u8Tmp);
13474 IEM_MC_LOCAL(uint16_t, u16Addr);
13475 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13476 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13477 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
13478 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13479 IEM_MC_ADVANCE_RIP();
13480 IEM_MC_END();
13481 return VINF_SUCCESS;
13482
13483 case IEMMODE_32BIT:
13484 IEM_MC_BEGIN(2, 0);
13485 IEM_MC_LOCAL(uint8_t, u8Tmp);
13486 IEM_MC_LOCAL(uint32_t, u32Addr);
13487 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13488 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13489 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
13490 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13491 IEM_MC_ADVANCE_RIP();
13492 IEM_MC_END();
13493 return VINF_SUCCESS;
13494
13495 case IEMMODE_64BIT:
13496 IEM_MC_BEGIN(2, 0);
13497 IEM_MC_LOCAL(uint8_t, u8Tmp);
13498 IEM_MC_LOCAL(uint64_t, u64Addr);
13499 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13500 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13501 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
13502 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13503 IEM_MC_ADVANCE_RIP();
13504 IEM_MC_END();
13505 return VINF_SUCCESS;
13506
13507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13508 }
13509}
13510
13511
13512/**
13513 * Common worker for FPU instructions working on ST0 and STn, and storing the
13514 * result in ST0.
13515 *
13516 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13517 */
13518FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13519{
13520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13521
13522 IEM_MC_BEGIN(3, 1);
13523 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13524 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13525 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13526 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13527
13528 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13529 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13530 IEM_MC_PREPARE_FPU_USAGE();
13531 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13532 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13533 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13534 IEM_MC_ELSE()
13535 IEM_MC_FPU_STACK_UNDERFLOW(0);
13536 IEM_MC_ENDIF();
13537 IEM_MC_ADVANCE_RIP();
13538
13539 IEM_MC_END();
13540 return VINF_SUCCESS;
13541}
13542
13543
13544/**
13545 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13546 * flags.
13547 *
13548 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13549 */
13550FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13551{
13552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13553
13554 IEM_MC_BEGIN(3, 1);
13555 IEM_MC_LOCAL(uint16_t, u16Fsw);
13556 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13557 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13558 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13559
13560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13562 IEM_MC_PREPARE_FPU_USAGE();
13563 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13564 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13565 IEM_MC_UPDATE_FSW(u16Fsw);
13566 IEM_MC_ELSE()
13567 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13568 IEM_MC_ENDIF();
13569 IEM_MC_ADVANCE_RIP();
13570
13571 IEM_MC_END();
13572 return VINF_SUCCESS;
13573}
13574
13575
13576/**
13577 * Common worker for FPU instructions working on ST0 and STn, only affecting
13578 * flags, and popping when done.
13579 *
13580 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13581 */
13582FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13583{
13584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13585
13586 IEM_MC_BEGIN(3, 1);
13587 IEM_MC_LOCAL(uint16_t, u16Fsw);
13588 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13589 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13590 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13591
13592 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13593 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13594 IEM_MC_PREPARE_FPU_USAGE();
13595 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13596 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13597 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13598 IEM_MC_ELSE()
13599 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13600 IEM_MC_ENDIF();
13601 IEM_MC_ADVANCE_RIP();
13602
13603 IEM_MC_END();
13604 return VINF_SUCCESS;
13605}
13606
13607
13608/** Opcode 0xd8 11/0. */
13609FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13610{
13611 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
13612 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13613}
13614
13615
13616/** Opcode 0xd8 11/1. */
13617FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13618{
13619 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
13620 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13621}
13622
13623
13624/** Opcode 0xd8 11/2. */
13625FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13626{
13627 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
13628 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13629}
13630
13631
13632/** Opcode 0xd8 11/3. */
13633FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13634{
13635 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
13636 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13637}
13638
13639
13640/** Opcode 0xd8 11/4. */
13641FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13642{
13643 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
13644 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13645}
13646
13647
13648/** Opcode 0xd8 11/5. */
13649FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13650{
13651 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
13652 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13653}
13654
13655
13656/** Opcode 0xd8 11/6. */
13657FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13658{
13659 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
13660 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13661}
13662
13663
13664/** Opcode 0xd8 11/7. */
13665FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13666{
13667 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
13668 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13669}
13670
13671
13672/**
13673 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13674 * the result in ST0.
13675 *
13676 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13677 */
13678FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13679{
13680 IEM_MC_BEGIN(3, 3);
13681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13682 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13683 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13684 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13685 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13686 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13687
13688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13690
13691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13692 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13693 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13694
13695 IEM_MC_PREPARE_FPU_USAGE();
13696 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13697 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13698 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13699 IEM_MC_ELSE()
13700 IEM_MC_FPU_STACK_UNDERFLOW(0);
13701 IEM_MC_ENDIF();
13702 IEM_MC_ADVANCE_RIP();
13703
13704 IEM_MC_END();
13705 return VINF_SUCCESS;
13706}
13707
13708
13709/** Opcode 0xd8 !11/0. */
13710FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13711{
13712 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
13713 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13714}
13715
13716
13717/** Opcode 0xd8 !11/1. */
13718FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13719{
13720 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
13721 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13722}
13723
13724
13725/** Opcode 0xd8 !11/2. */
13726FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13727{
13728 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
13729
13730 IEM_MC_BEGIN(3, 3);
13731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13732 IEM_MC_LOCAL(uint16_t, u16Fsw);
13733 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13734 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13736 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13737
13738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13740
13741 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13742 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13743 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13744
13745 IEM_MC_PREPARE_FPU_USAGE();
13746 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13747 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13748 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13749 IEM_MC_ELSE()
13750 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13751 IEM_MC_ENDIF();
13752 IEM_MC_ADVANCE_RIP();
13753
13754 IEM_MC_END();
13755 return VINF_SUCCESS;
13756}
13757
13758
13759/** Opcode 0xd8 !11/3. */
13760FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13761{
13762 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
13763
13764 IEM_MC_BEGIN(3, 3);
13765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13766 IEM_MC_LOCAL(uint16_t, u16Fsw);
13767 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13768 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13769 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13770 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13771
13772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13774
13775 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13776 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13777 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13778
13779 IEM_MC_PREPARE_FPU_USAGE();
13780 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13781 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13782 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13783 IEM_MC_ELSE()
13784 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13785 IEM_MC_ENDIF();
13786 IEM_MC_ADVANCE_RIP();
13787
13788 IEM_MC_END();
13789 return VINF_SUCCESS;
13790}
13791
13792
13793/** Opcode 0xd8 !11/4. */
13794FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13795{
13796 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
13797 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13798}
13799
13800
13801/** Opcode 0xd8 !11/5. */
13802FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13803{
13804 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
13805 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13806}
13807
13808
13809/** Opcode 0xd8 !11/6. */
13810FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13811{
13812 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
13813 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13814}
13815
13816
13817/** Opcode 0xd8 !11/7. */
13818FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13819{
13820 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
13821 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13822}
13823
13824
13825/** Opcode 0xd8. */
13826FNIEMOP_DEF(iemOp_EscF0)
13827{
13828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13829 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
13830
13831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13832 {
13833 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13834 {
13835 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13836 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13837 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13838 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13839 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13840 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13841 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13842 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13844 }
13845 }
13846 else
13847 {
13848 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13849 {
13850 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13851 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13852 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13853 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13854 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13855 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13856 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13857 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13859 }
13860 }
13861}
13862
13863
13864/** Opcode 0xd9 /0 mem32real
13865 * @sa iemOp_fld_m64r */
13866FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13867{
13868 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
13869
13870 IEM_MC_BEGIN(2, 3);
13871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13872 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13873 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13874 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13875 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13876
13877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13879
13880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13882 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13883
13884 IEM_MC_PREPARE_FPU_USAGE();
13885 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13886 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13887 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13888 IEM_MC_ELSE()
13889 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13890 IEM_MC_ENDIF();
13891 IEM_MC_ADVANCE_RIP();
13892
13893 IEM_MC_END();
13894 return VINF_SUCCESS;
13895}
13896
13897
13898/** Opcode 0xd9 !11/2 mem32real */
13899FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13900{
13901 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
13902 IEM_MC_BEGIN(3, 2);
13903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13904 IEM_MC_LOCAL(uint16_t, u16Fsw);
13905 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13906 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13907 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13908
13909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13912 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13913
13914 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13915 IEM_MC_PREPARE_FPU_USAGE();
13916 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13917 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13918 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13919 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13920 IEM_MC_ELSE()
13921 IEM_MC_IF_FCW_IM()
13922 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13923 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13924 IEM_MC_ENDIF();
13925 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13926 IEM_MC_ENDIF();
13927 IEM_MC_ADVANCE_RIP();
13928
13929 IEM_MC_END();
13930 return VINF_SUCCESS;
13931}
13932
13933
13934/** Opcode 0xd9 !11/3 */
13935FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13936{
13937 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
13938 IEM_MC_BEGIN(3, 2);
13939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13940 IEM_MC_LOCAL(uint16_t, u16Fsw);
13941 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13942 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13943 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13944
13945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13947 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13948 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13949
13950 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13951 IEM_MC_PREPARE_FPU_USAGE();
13952 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13953 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13954 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13955 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13956 IEM_MC_ELSE()
13957 IEM_MC_IF_FCW_IM()
13958 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13959 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13960 IEM_MC_ENDIF();
13961 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13962 IEM_MC_ENDIF();
13963 IEM_MC_ADVANCE_RIP();
13964
13965 IEM_MC_END();
13966 return VINF_SUCCESS;
13967}
13968
13969
13970/** Opcode 0xd9 !11/4 */
13971FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13972{
13973 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
13974 IEM_MC_BEGIN(3, 0);
13975 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13976 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13977 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13981 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13982 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13983 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13984 IEM_MC_END();
13985 return VINF_SUCCESS;
13986}
13987
13988
13989/** Opcode 0xd9 !11/5 */
13990FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13991{
13992 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
13993 IEM_MC_BEGIN(1, 1);
13994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13995 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13999 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14000 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14001 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14002 IEM_MC_END();
14003 return VINF_SUCCESS;
14004}
14005
14006
14007/** Opcode 0xd9 !11/6 */
14008FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14009{
14010 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14011 IEM_MC_BEGIN(3, 0);
14012 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14013 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14014 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14018 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14019 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14020 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14021 IEM_MC_END();
14022 return VINF_SUCCESS;
14023}
14024
14025
14026/** Opcode 0xd9 !11/7 */
14027FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14028{
14029 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14030 IEM_MC_BEGIN(2, 0);
14031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14032 IEM_MC_LOCAL(uint16_t, u16Fcw);
14033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14036 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14037 IEM_MC_FETCH_FCW(u16Fcw);
14038 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14039 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14040 IEM_MC_END();
14041 return VINF_SUCCESS;
14042}
14043
14044
14045/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14046FNIEMOP_DEF(iemOp_fnop)
14047{
14048 IEMOP_MNEMONIC(fnop, "fnop");
14049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14050
14051 IEM_MC_BEGIN(0, 0);
14052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14053 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14054 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14055 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14056 * intel optimizations. Investigate. */
14057 IEM_MC_UPDATE_FPU_OPCODE_IP();
14058 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14059 IEM_MC_END();
14060 return VINF_SUCCESS;
14061}
14062
14063
14064/** Opcode 0xd9 11/0 stN */
14065FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14066{
14067 IEMOP_MNEMONIC(fld_stN, "fld stN");
14068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14069
14070 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14071 * indicates that it does. */
14072 IEM_MC_BEGIN(0, 2);
14073 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14074 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14075 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14076 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14077
14078 IEM_MC_PREPARE_FPU_USAGE();
14079 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14080 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14081 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14082 IEM_MC_ELSE()
14083 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14084 IEM_MC_ENDIF();
14085
14086 IEM_MC_ADVANCE_RIP();
14087 IEM_MC_END();
14088
14089 return VINF_SUCCESS;
14090}
14091
14092
14093/** Opcode 0xd9 11/3 stN */
14094FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14095{
14096 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14098
14099 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14100 * indicates that it does. */
14101 IEM_MC_BEGIN(1, 3);
14102 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14103 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14104 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14105 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14107 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14108
14109 IEM_MC_PREPARE_FPU_USAGE();
14110 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14111 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14112 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14113 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14114 IEM_MC_ELSE()
14115 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14116 IEM_MC_ENDIF();
14117
14118 IEM_MC_ADVANCE_RIP();
14119 IEM_MC_END();
14120
14121 return VINF_SUCCESS;
14122}
14123
14124
14125/** Opcode 0xd9 11/4, 0xdd 11/2. */
14126FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14127{
14128 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14130
14131 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14132 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14133 if (!iDstReg)
14134 {
14135 IEM_MC_BEGIN(0, 1);
14136 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14137 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14138 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14139
14140 IEM_MC_PREPARE_FPU_USAGE();
14141 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14142 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14143 IEM_MC_ELSE()
14144 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14145 IEM_MC_ENDIF();
14146
14147 IEM_MC_ADVANCE_RIP();
14148 IEM_MC_END();
14149 }
14150 else
14151 {
14152 IEM_MC_BEGIN(0, 2);
14153 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14154 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14157
14158 IEM_MC_PREPARE_FPU_USAGE();
14159 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14160 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14161 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14162 IEM_MC_ELSE()
14163 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14164 IEM_MC_ENDIF();
14165
14166 IEM_MC_ADVANCE_RIP();
14167 IEM_MC_END();
14168 }
14169 return VINF_SUCCESS;
14170}
14171
14172
14173/**
14174 * Common worker for FPU instructions working on ST0 and replaces it with the
14175 * result, i.e. unary operators.
14176 *
14177 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14178 */
14179FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14180{
14181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14182
14183 IEM_MC_BEGIN(2, 1);
14184 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14185 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14186 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14187
14188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14189 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14190 IEM_MC_PREPARE_FPU_USAGE();
14191 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14192 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14193 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14194 IEM_MC_ELSE()
14195 IEM_MC_FPU_STACK_UNDERFLOW(0);
14196 IEM_MC_ENDIF();
14197 IEM_MC_ADVANCE_RIP();
14198
14199 IEM_MC_END();
14200 return VINF_SUCCESS;
14201}
14202
14203
14204/** Opcode 0xd9 0xe0. */
14205FNIEMOP_DEF(iemOp_fchs)
14206{
14207 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14208 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14209}
14210
14211
14212/** Opcode 0xd9 0xe1. */
14213FNIEMOP_DEF(iemOp_fabs)
14214{
14215 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14216 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14217}
14218
14219
14220/**
14221 * Common worker for FPU instructions working on ST0 and only returns FSW.
14222 *
14223 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14224 */
14225FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14226{
14227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14228
14229 IEM_MC_BEGIN(2, 1);
14230 IEM_MC_LOCAL(uint16_t, u16Fsw);
14231 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14232 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14233
14234 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14235 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14236 IEM_MC_PREPARE_FPU_USAGE();
14237 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14238 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14239 IEM_MC_UPDATE_FSW(u16Fsw);
14240 IEM_MC_ELSE()
14241 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14242 IEM_MC_ENDIF();
14243 IEM_MC_ADVANCE_RIP();
14244
14245 IEM_MC_END();
14246 return VINF_SUCCESS;
14247}
14248
14249
14250/** Opcode 0xd9 0xe4. */
14251FNIEMOP_DEF(iemOp_ftst)
14252{
14253 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14254 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14255}
14256
14257
14258/** Opcode 0xd9 0xe5. */
14259FNIEMOP_DEF(iemOp_fxam)
14260{
14261 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14262 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14263}
14264
14265
14266/**
14267 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14268 *
14269 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14270 */
14271FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14272{
14273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14274
14275 IEM_MC_BEGIN(1, 1);
14276 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14277 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14278
14279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14281 IEM_MC_PREPARE_FPU_USAGE();
14282 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14283 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14284 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14285 IEM_MC_ELSE()
14286 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14287 IEM_MC_ENDIF();
14288 IEM_MC_ADVANCE_RIP();
14289
14290 IEM_MC_END();
14291 return VINF_SUCCESS;
14292}
14293
14294
14295/** Opcode 0xd9 0xe8. */
14296FNIEMOP_DEF(iemOp_fld1)
14297{
14298 IEMOP_MNEMONIC(fld1, "fld1");
14299 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14300}
14301
14302
14303/** Opcode 0xd9 0xe9. */
14304FNIEMOP_DEF(iemOp_fldl2t)
14305{
14306 IEMOP_MNEMONIC(fldl2t, "fldl2t");
14307 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14308}
14309
14310
14311/** Opcode 0xd9 0xea. */
14312FNIEMOP_DEF(iemOp_fldl2e)
14313{
14314 IEMOP_MNEMONIC(fldl2e, "fldl2e");
14315 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14316}
14317
14318/** Opcode 0xd9 0xeb. */
14319FNIEMOP_DEF(iemOp_fldpi)
14320{
14321 IEMOP_MNEMONIC(fldpi, "fldpi");
14322 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14323}
14324
14325
14326/** Opcode 0xd9 0xec. */
14327FNIEMOP_DEF(iemOp_fldlg2)
14328{
14329 IEMOP_MNEMONIC(fldlg2, "fldlg2");
14330 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14331}
14332
14333/** Opcode 0xd9 0xed. */
14334FNIEMOP_DEF(iemOp_fldln2)
14335{
14336 IEMOP_MNEMONIC(fldln2, "fldln2");
14337 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14338}
14339
14340
14341/** Opcode 0xd9 0xee. */
14342FNIEMOP_DEF(iemOp_fldz)
14343{
14344 IEMOP_MNEMONIC(fldz, "fldz");
14345 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14346}
14347
14348
14349/** Opcode 0xd9 0xf0. */
14350FNIEMOP_DEF(iemOp_f2xm1)
14351{
14352 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
14353 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14354}
14355
14356
14357/**
14358 * Common worker for FPU instructions working on STn and ST0, storing the result
14359 * in STn, and popping the stack unless IE, DE or ZE was raised.
14360 *
14361 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14362 */
14363FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14364{
14365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14366
14367 IEM_MC_BEGIN(3, 1);
14368 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14369 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14370 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14371 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14372
14373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14374 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14375
14376 IEM_MC_PREPARE_FPU_USAGE();
14377 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14378 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14379 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14380 IEM_MC_ELSE()
14381 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14382 IEM_MC_ENDIF();
14383 IEM_MC_ADVANCE_RIP();
14384
14385 IEM_MC_END();
14386 return VINF_SUCCESS;
14387}
14388
14389
14390/** Opcode 0xd9 0xf1. */
14391FNIEMOP_DEF(iemOp_fyl2x)
14392{
14393 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
14394 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
14395}
14396
14397
14398/**
14399 * Common worker for FPU instructions working on ST0 and having two outputs, one
14400 * replacing ST0 and one pushed onto the stack.
14401 *
14402 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14403 */
14404FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14405{
14406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14407
14408 IEM_MC_BEGIN(2, 1);
14409 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14410 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14411 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14412
14413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14415 IEM_MC_PREPARE_FPU_USAGE();
14416 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14417 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14418 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14419 IEM_MC_ELSE()
14420 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14421 IEM_MC_ENDIF();
14422 IEM_MC_ADVANCE_RIP();
14423
14424 IEM_MC_END();
14425 return VINF_SUCCESS;
14426}
14427
14428
14429/** Opcode 0xd9 0xf2. */
14430FNIEMOP_DEF(iemOp_fptan)
14431{
14432 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
14433 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14434}
14435
14436
14437/** Opcode 0xd9 0xf3. */
14438FNIEMOP_DEF(iemOp_fpatan)
14439{
14440 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
14441 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14442}
14443
14444
14445/** Opcode 0xd9 0xf4. */
14446FNIEMOP_DEF(iemOp_fxtract)
14447{
14448 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
14449 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14450}
14451
14452
14453/** Opcode 0xd9 0xf5. */
14454FNIEMOP_DEF(iemOp_fprem1)
14455{
14456 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
14457 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14458}
14459
14460
14461/** Opcode 0xd9 0xf6. */
14462FNIEMOP_DEF(iemOp_fdecstp)
14463{
14464 IEMOP_MNEMONIC(fdecstp, "fdecstp");
14465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14466 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14467 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14468 * FINCSTP and FDECSTP. */
14469
14470 IEM_MC_BEGIN(0,0);
14471
14472 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14473 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14474
14475 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14476 IEM_MC_FPU_STACK_DEC_TOP();
14477 IEM_MC_UPDATE_FSW_CONST(0);
14478
14479 IEM_MC_ADVANCE_RIP();
14480 IEM_MC_END();
14481 return VINF_SUCCESS;
14482}
14483
14484
14485/** Opcode 0xd9 0xf7. */
14486FNIEMOP_DEF(iemOp_fincstp)
14487{
14488 IEMOP_MNEMONIC(fincstp, "fincstp");
14489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14490 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14491 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14492 * FINCSTP and FDECSTP. */
14493
14494 IEM_MC_BEGIN(0,0);
14495
14496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14497 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14498
14499 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14500 IEM_MC_FPU_STACK_INC_TOP();
14501 IEM_MC_UPDATE_FSW_CONST(0);
14502
14503 IEM_MC_ADVANCE_RIP();
14504 IEM_MC_END();
14505 return VINF_SUCCESS;
14506}
14507
14508
14509/** Opcode 0xd9 0xf8. */
14510FNIEMOP_DEF(iemOp_fprem)
14511{
14512 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
14513 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14514}
14515
14516
14517/** Opcode 0xd9 0xf9. */
14518FNIEMOP_DEF(iemOp_fyl2xp1)
14519{
14520 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
14521 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14522}
14523
14524
14525/** Opcode 0xd9 0xfa. */
14526FNIEMOP_DEF(iemOp_fsqrt)
14527{
14528 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
14529 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14530}
14531
14532
14533/** Opcode 0xd9 0xfb. */
14534FNIEMOP_DEF(iemOp_fsincos)
14535{
14536 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
14537 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14538}
14539
14540
14541/** Opcode 0xd9 0xfc. */
14542FNIEMOP_DEF(iemOp_frndint)
14543{
14544 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
14545 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14546}
14547
14548
14549/** Opcode 0xd9 0xfd. */
14550FNIEMOP_DEF(iemOp_fscale)
14551{
14552 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
14553 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14554}
14555
14556
14557/** Opcode 0xd9 0xfe. */
14558FNIEMOP_DEF(iemOp_fsin)
14559{
14560 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
14561 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14562}
14563
14564
14565/** Opcode 0xd9 0xff. */
14566FNIEMOP_DEF(iemOp_fcos)
14567{
14568 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
14569 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14570}
14571
14572
14573/** Used by iemOp_EscF1. */
14574IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14575{
14576 /* 0xe0 */ iemOp_fchs,
14577 /* 0xe1 */ iemOp_fabs,
14578 /* 0xe2 */ iemOp_Invalid,
14579 /* 0xe3 */ iemOp_Invalid,
14580 /* 0xe4 */ iemOp_ftst,
14581 /* 0xe5 */ iemOp_fxam,
14582 /* 0xe6 */ iemOp_Invalid,
14583 /* 0xe7 */ iemOp_Invalid,
14584 /* 0xe8 */ iemOp_fld1,
14585 /* 0xe9 */ iemOp_fldl2t,
14586 /* 0xea */ iemOp_fldl2e,
14587 /* 0xeb */ iemOp_fldpi,
14588 /* 0xec */ iemOp_fldlg2,
14589 /* 0xed */ iemOp_fldln2,
14590 /* 0xee */ iemOp_fldz,
14591 /* 0xef */ iemOp_Invalid,
14592 /* 0xf0 */ iemOp_f2xm1,
14593 /* 0xf1 */ iemOp_fyl2x,
14594 /* 0xf2 */ iemOp_fptan,
14595 /* 0xf3 */ iemOp_fpatan,
14596 /* 0xf4 */ iemOp_fxtract,
14597 /* 0xf5 */ iemOp_fprem1,
14598 /* 0xf6 */ iemOp_fdecstp,
14599 /* 0xf7 */ iemOp_fincstp,
14600 /* 0xf8 */ iemOp_fprem,
14601 /* 0xf9 */ iemOp_fyl2xp1,
14602 /* 0xfa */ iemOp_fsqrt,
14603 /* 0xfb */ iemOp_fsincos,
14604 /* 0xfc */ iemOp_frndint,
14605 /* 0xfd */ iemOp_fscale,
14606 /* 0xfe */ iemOp_fsin,
14607 /* 0xff */ iemOp_fcos
14608};
14609
14610
14611/** Opcode 0xd9. */
14612FNIEMOP_DEF(iemOp_EscF1)
14613{
14614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14615 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
14616
14617 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14618 {
14619 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14620 {
14621 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14622 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14623 case 2:
14624 if (bRm == 0xd0)
14625 return FNIEMOP_CALL(iemOp_fnop);
14626 return IEMOP_RAISE_INVALID_OPCODE();
14627 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14628 case 4:
14629 case 5:
14630 case 6:
14631 case 7:
14632 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14633 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14635 }
14636 }
14637 else
14638 {
14639 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14640 {
14641 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14642 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14643 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14644 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14645 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14646 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14647 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14648 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14650 }
14651 }
14652}
14653
14654
14655/** Opcode 0xda 11/0. */
14656FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14657{
14658 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
14659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14660
14661 IEM_MC_BEGIN(0, 1);
14662 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14663
14664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14666
14667 IEM_MC_PREPARE_FPU_USAGE();
14668 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14669 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14670 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14671 IEM_MC_ENDIF();
14672 IEM_MC_UPDATE_FPU_OPCODE_IP();
14673 IEM_MC_ELSE()
14674 IEM_MC_FPU_STACK_UNDERFLOW(0);
14675 IEM_MC_ENDIF();
14676 IEM_MC_ADVANCE_RIP();
14677
14678 IEM_MC_END();
14679 return VINF_SUCCESS;
14680}
14681
14682
14683/** Opcode 0xda 11/1. */
14684FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14685{
14686 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
14687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14688
14689 IEM_MC_BEGIN(0, 1);
14690 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14691
14692 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14693 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14694
14695 IEM_MC_PREPARE_FPU_USAGE();
14696 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14698 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14699 IEM_MC_ENDIF();
14700 IEM_MC_UPDATE_FPU_OPCODE_IP();
14701 IEM_MC_ELSE()
14702 IEM_MC_FPU_STACK_UNDERFLOW(0);
14703 IEM_MC_ENDIF();
14704 IEM_MC_ADVANCE_RIP();
14705
14706 IEM_MC_END();
14707 return VINF_SUCCESS;
14708}
14709
14710
14711/** Opcode 0xda 11/2. */
14712FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14713{
14714 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
14715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14716
14717 IEM_MC_BEGIN(0, 1);
14718 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14719
14720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14722
14723 IEM_MC_PREPARE_FPU_USAGE();
14724 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14725 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14726 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14727 IEM_MC_ENDIF();
14728 IEM_MC_UPDATE_FPU_OPCODE_IP();
14729 IEM_MC_ELSE()
14730 IEM_MC_FPU_STACK_UNDERFLOW(0);
14731 IEM_MC_ENDIF();
14732 IEM_MC_ADVANCE_RIP();
14733
14734 IEM_MC_END();
14735 return VINF_SUCCESS;
14736}
14737
14738
14739/** Opcode 0xda 11/3. */
14740FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14741{
14742 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
14743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14744
14745 IEM_MC_BEGIN(0, 1);
14746 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14747
14748 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14749 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14750
14751 IEM_MC_PREPARE_FPU_USAGE();
14752 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14754 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14755 IEM_MC_ENDIF();
14756 IEM_MC_UPDATE_FPU_OPCODE_IP();
14757 IEM_MC_ELSE()
14758 IEM_MC_FPU_STACK_UNDERFLOW(0);
14759 IEM_MC_ENDIF();
14760 IEM_MC_ADVANCE_RIP();
14761
14762 IEM_MC_END();
14763 return VINF_SUCCESS;
14764}
14765
14766
14767/**
14768 * Common worker for FPU instructions working on ST0 and STn, only affecting
14769 * flags, and popping twice when done.
14770 *
14771 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14772 */
14773FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14774{
14775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14776
14777 IEM_MC_BEGIN(3, 1);
14778 IEM_MC_LOCAL(uint16_t, u16Fsw);
14779 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14780 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14781 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14782
14783 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14784 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14785
14786 IEM_MC_PREPARE_FPU_USAGE();
14787 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14788 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14789 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14790 IEM_MC_ELSE()
14791 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14792 IEM_MC_ENDIF();
14793 IEM_MC_ADVANCE_RIP();
14794
14795 IEM_MC_END();
14796 return VINF_SUCCESS;
14797}
14798
14799
14800/** Opcode 0xda 0xe9. */
14801FNIEMOP_DEF(iemOp_fucompp)
14802{
14803 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
14804 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14805}
14806
14807
14808/**
14809 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14810 * the result in ST0.
14811 *
14812 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14813 */
14814FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14815{
14816 IEM_MC_BEGIN(3, 3);
14817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14818 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14819 IEM_MC_LOCAL(int32_t, i32Val2);
14820 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14821 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14822 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14823
14824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14826
14827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14829 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14830
14831 IEM_MC_PREPARE_FPU_USAGE();
14832 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14833 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14834 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14835 IEM_MC_ELSE()
14836 IEM_MC_FPU_STACK_UNDERFLOW(0);
14837 IEM_MC_ENDIF();
14838 IEM_MC_ADVANCE_RIP();
14839
14840 IEM_MC_END();
14841 return VINF_SUCCESS;
14842}
14843
14844
14845/** Opcode 0xda !11/0. */
14846FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14847{
14848 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
14849 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14850}
14851
14852
14853/** Opcode 0xda !11/1. */
14854FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14855{
14856 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
14857 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14858}
14859
14860
14861/** Opcode 0xda !11/2. */
14862FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14863{
14864 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
14865
14866 IEM_MC_BEGIN(3, 3);
14867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14868 IEM_MC_LOCAL(uint16_t, u16Fsw);
14869 IEM_MC_LOCAL(int32_t, i32Val2);
14870 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14871 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14872 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14873
14874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14876
14877 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14878 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14879 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14880
14881 IEM_MC_PREPARE_FPU_USAGE();
14882 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14883 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14884 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14885 IEM_MC_ELSE()
14886 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14887 IEM_MC_ENDIF();
14888 IEM_MC_ADVANCE_RIP();
14889
14890 IEM_MC_END();
14891 return VINF_SUCCESS;
14892}
14893
14894
14895/** Opcode 0xda !11/3. */
14896FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14897{
14898 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
14899
14900 IEM_MC_BEGIN(3, 3);
14901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14902 IEM_MC_LOCAL(uint16_t, u16Fsw);
14903 IEM_MC_LOCAL(int32_t, i32Val2);
14904 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14905 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14906 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14907
14908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14910
14911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14912 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14913 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14914
14915 IEM_MC_PREPARE_FPU_USAGE();
14916 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14917 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14918 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14919 IEM_MC_ELSE()
14920 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14921 IEM_MC_ENDIF();
14922 IEM_MC_ADVANCE_RIP();
14923
14924 IEM_MC_END();
14925 return VINF_SUCCESS;
14926}
14927
14928
14929/** Opcode 0xda !11/4. */
14930FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14931{
14932 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
14933 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14934}
14935
14936
14937/** Opcode 0xda !11/5. */
14938FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14939{
14940 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
14941 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14942}
14943
14944
14945/** Opcode 0xda !11/6. */
14946FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14947{
14948 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
14949 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14950}
14951
14952
14953/** Opcode 0xda !11/7. */
14954FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14955{
14956 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
14957 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14958}
14959
14960
14961/** Opcode 0xda. */
14962FNIEMOP_DEF(iemOp_EscF2)
14963{
14964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14965 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
14966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14967 {
14968 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14969 {
14970 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14971 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14972 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14973 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14974 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14975 case 5:
14976 if (bRm == 0xe9)
14977 return FNIEMOP_CALL(iemOp_fucompp);
14978 return IEMOP_RAISE_INVALID_OPCODE();
14979 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14980 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14982 }
14983 }
14984 else
14985 {
14986 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14987 {
14988 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14989 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14990 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14991 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14992 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14993 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14994 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14995 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14997 }
14998 }
14999}
15000
15001
15002/** Opcode 0xdb !11/0. */
15003FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15004{
15005 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15006
15007 IEM_MC_BEGIN(2, 3);
15008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15009 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15010 IEM_MC_LOCAL(int32_t, i32Val);
15011 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15012 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15013
15014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15016
15017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15019 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15020
15021 IEM_MC_PREPARE_FPU_USAGE();
15022 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15023 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15024 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15025 IEM_MC_ELSE()
15026 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15027 IEM_MC_ENDIF();
15028 IEM_MC_ADVANCE_RIP();
15029
15030 IEM_MC_END();
15031 return VINF_SUCCESS;
15032}
15033
15034
15035/** Opcode 0xdb !11/1. */
15036FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15037{
15038 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15039 IEM_MC_BEGIN(3, 2);
15040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15041 IEM_MC_LOCAL(uint16_t, u16Fsw);
15042 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15043 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15044 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15045
15046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15050
15051 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15052 IEM_MC_PREPARE_FPU_USAGE();
15053 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15054 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15055 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15056 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15057 IEM_MC_ELSE()
15058 IEM_MC_IF_FCW_IM()
15059 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15060 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15061 IEM_MC_ENDIF();
15062 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15063 IEM_MC_ENDIF();
15064 IEM_MC_ADVANCE_RIP();
15065
15066 IEM_MC_END();
15067 return VINF_SUCCESS;
15068}
15069
15070
15071/** Opcode 0xdb !11/2. */
15072FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15073{
15074 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15075 IEM_MC_BEGIN(3, 2);
15076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15077 IEM_MC_LOCAL(uint16_t, u16Fsw);
15078 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15079 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15080 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15081
15082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15086
15087 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15088 IEM_MC_PREPARE_FPU_USAGE();
15089 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15090 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15091 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15092 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15093 IEM_MC_ELSE()
15094 IEM_MC_IF_FCW_IM()
15095 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15096 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15097 IEM_MC_ENDIF();
15098 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15099 IEM_MC_ENDIF();
15100 IEM_MC_ADVANCE_RIP();
15101
15102 IEM_MC_END();
15103 return VINF_SUCCESS;
15104}
15105
15106
15107/** Opcode 0xdb !11/3. */
15108FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15109{
15110 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15111 IEM_MC_BEGIN(3, 2);
15112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15113 IEM_MC_LOCAL(uint16_t, u16Fsw);
15114 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15115 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15116 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15117
15118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15122
15123 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15124 IEM_MC_PREPARE_FPU_USAGE();
15125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15126 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15127 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15128 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15129 IEM_MC_ELSE()
15130 IEM_MC_IF_FCW_IM()
15131 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15132 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15133 IEM_MC_ENDIF();
15134 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15135 IEM_MC_ENDIF();
15136 IEM_MC_ADVANCE_RIP();
15137
15138 IEM_MC_END();
15139 return VINF_SUCCESS;
15140}
15141
15142
15143/** Opcode 0xdb !11/5. */
15144FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15145{
15146 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15147
15148 IEM_MC_BEGIN(2, 3);
15149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15150 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15151 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15152 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15153 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15154
15155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15157
15158 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15159 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15160 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15161
15162 IEM_MC_PREPARE_FPU_USAGE();
15163 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15164 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15165 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15166 IEM_MC_ELSE()
15167 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15168 IEM_MC_ENDIF();
15169 IEM_MC_ADVANCE_RIP();
15170
15171 IEM_MC_END();
15172 return VINF_SUCCESS;
15173}
15174
15175
15176/** Opcode 0xdb !11/7. */
15177FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15178{
15179 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15180 IEM_MC_BEGIN(3, 2);
15181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15182 IEM_MC_LOCAL(uint16_t, u16Fsw);
15183 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15184 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15185 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15186
15187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15190 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15191
15192 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15193 IEM_MC_PREPARE_FPU_USAGE();
15194 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15195 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15196 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15197 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15198 IEM_MC_ELSE()
15199 IEM_MC_IF_FCW_IM()
15200 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15201 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15202 IEM_MC_ENDIF();
15203 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15204 IEM_MC_ENDIF();
15205 IEM_MC_ADVANCE_RIP();
15206
15207 IEM_MC_END();
15208 return VINF_SUCCESS;
15209}
15210
15211
15212/** Opcode 0xdb 11/0. */
15213FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15214{
15215 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15217
15218 IEM_MC_BEGIN(0, 1);
15219 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15220
15221 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15222 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15223
15224 IEM_MC_PREPARE_FPU_USAGE();
15225 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15226 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15227 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15228 IEM_MC_ENDIF();
15229 IEM_MC_UPDATE_FPU_OPCODE_IP();
15230 IEM_MC_ELSE()
15231 IEM_MC_FPU_STACK_UNDERFLOW(0);
15232 IEM_MC_ENDIF();
15233 IEM_MC_ADVANCE_RIP();
15234
15235 IEM_MC_END();
15236 return VINF_SUCCESS;
15237}
15238
15239
15240/** Opcode 0xdb 11/1. */
15241FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15242{
15243 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15245
15246 IEM_MC_BEGIN(0, 1);
15247 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15248
15249 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15250 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15251
15252 IEM_MC_PREPARE_FPU_USAGE();
15253 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15254 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15255 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15256 IEM_MC_ENDIF();
15257 IEM_MC_UPDATE_FPU_OPCODE_IP();
15258 IEM_MC_ELSE()
15259 IEM_MC_FPU_STACK_UNDERFLOW(0);
15260 IEM_MC_ENDIF();
15261 IEM_MC_ADVANCE_RIP();
15262
15263 IEM_MC_END();
15264 return VINF_SUCCESS;
15265}
15266
15267
15268/** Opcode 0xdb 11/2. */
15269FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15270{
15271 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15273
15274 IEM_MC_BEGIN(0, 1);
15275 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15276
15277 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15278 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15279
15280 IEM_MC_PREPARE_FPU_USAGE();
15281 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15282 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15283 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15284 IEM_MC_ENDIF();
15285 IEM_MC_UPDATE_FPU_OPCODE_IP();
15286 IEM_MC_ELSE()
15287 IEM_MC_FPU_STACK_UNDERFLOW(0);
15288 IEM_MC_ENDIF();
15289 IEM_MC_ADVANCE_RIP();
15290
15291 IEM_MC_END();
15292 return VINF_SUCCESS;
15293}
15294
15295
15296/** Opcode 0xdb 11/3. */
15297FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15298{
15299 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15301
15302 IEM_MC_BEGIN(0, 1);
15303 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15304
15305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15307
15308 IEM_MC_PREPARE_FPU_USAGE();
15309 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15310 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15311 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15312 IEM_MC_ENDIF();
15313 IEM_MC_UPDATE_FPU_OPCODE_IP();
15314 IEM_MC_ELSE()
15315 IEM_MC_FPU_STACK_UNDERFLOW(0);
15316 IEM_MC_ENDIF();
15317 IEM_MC_ADVANCE_RIP();
15318
15319 IEM_MC_END();
15320 return VINF_SUCCESS;
15321}
15322
15323
15324/** Opcode 0xdb 0xe0. */
15325FNIEMOP_DEF(iemOp_fneni)
15326{
15327 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
15328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15329 IEM_MC_BEGIN(0,0);
15330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15331 IEM_MC_ADVANCE_RIP();
15332 IEM_MC_END();
15333 return VINF_SUCCESS;
15334}
15335
15336
15337/** Opcode 0xdb 0xe1. */
15338FNIEMOP_DEF(iemOp_fndisi)
15339{
15340 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
15341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15342 IEM_MC_BEGIN(0,0);
15343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15344 IEM_MC_ADVANCE_RIP();
15345 IEM_MC_END();
15346 return VINF_SUCCESS;
15347}
15348
15349
15350/** Opcode 0xdb 0xe2. */
15351FNIEMOP_DEF(iemOp_fnclex)
15352{
15353 IEMOP_MNEMONIC(fnclex, "fnclex");
15354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15355
15356 IEM_MC_BEGIN(0,0);
15357 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15358 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15359 IEM_MC_CLEAR_FSW_EX();
15360 IEM_MC_ADVANCE_RIP();
15361 IEM_MC_END();
15362 return VINF_SUCCESS;
15363}
15364
15365
15366/** Opcode 0xdb 0xe3. */
15367FNIEMOP_DEF(iemOp_fninit)
15368{
15369 IEMOP_MNEMONIC(fninit, "fninit");
15370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15371 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15372}
15373
15374
15375/** Opcode 0xdb 0xe4. */
15376FNIEMOP_DEF(iemOp_fnsetpm)
15377{
15378 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15380 IEM_MC_BEGIN(0,0);
15381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15382 IEM_MC_ADVANCE_RIP();
15383 IEM_MC_END();
15384 return VINF_SUCCESS;
15385}
15386
15387
15388/** Opcode 0xdb 0xe5. */
15389FNIEMOP_DEF(iemOp_frstpm)
15390{
15391 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15392#if 0 /* #UDs on newer CPUs */
15393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15394 IEM_MC_BEGIN(0,0);
15395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15396 IEM_MC_ADVANCE_RIP();
15397 IEM_MC_END();
15398 return VINF_SUCCESS;
15399#else
15400 return IEMOP_RAISE_INVALID_OPCODE();
15401#endif
15402}
15403
15404
15405/** Opcode 0xdb 11/5. */
15406FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15407{
15408 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
15409 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15410}
15411
15412
15413/** Opcode 0xdb 11/6. */
15414FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15415{
15416 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
15417 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15418}
15419
15420
15421/** Opcode 0xdb. */
15422FNIEMOP_DEF(iemOp_EscF3)
15423{
15424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15425 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
15426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15427 {
15428 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15429 {
15430 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15431 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15432 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15433 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15434 case 4:
15435 switch (bRm)
15436 {
15437 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15438 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15439 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15440 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15441 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15442 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15443 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15444 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15446 }
15447 break;
15448 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15449 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15450 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15452 }
15453 }
15454 else
15455 {
15456 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15457 {
15458 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15459 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15460 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15461 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15462 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15463 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15464 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15465 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15467 }
15468 }
15469}
15470
15471
15472/**
15473 * Common worker for FPU instructions working on STn and ST0, and storing the
15474 * result in STn unless IE, DE or ZE was raised.
15475 *
15476 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15477 */
15478FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15479{
15480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15481
15482 IEM_MC_BEGIN(3, 1);
15483 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15484 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15485 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15486 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15487
15488 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15489 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15490
15491 IEM_MC_PREPARE_FPU_USAGE();
15492 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15493 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15494 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15495 IEM_MC_ELSE()
15496 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15497 IEM_MC_ENDIF();
15498 IEM_MC_ADVANCE_RIP();
15499
15500 IEM_MC_END();
15501 return VINF_SUCCESS;
15502}
15503
15504
15505/** Opcode 0xdc 11/0. */
15506FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15507{
15508 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
15509 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15510}
15511
15512
15513/** Opcode 0xdc 11/1. */
15514FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15515{
15516 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
15517 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15518}
15519
15520
15521/** Opcode 0xdc 11/4. */
15522FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15523{
15524 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
15525 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15526}
15527
15528
15529/** Opcode 0xdc 11/5. */
15530FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15531{
15532 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
15533 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15534}
15535
15536
15537/** Opcode 0xdc 11/6. */
15538FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15539{
15540 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
15541 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15542}
15543
15544
15545/** Opcode 0xdc 11/7. */
15546FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15547{
15548 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
15549 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15550}
15551
15552
15553/**
15554 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15555 * memory operand, and storing the result in ST0.
15556 *
15557 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15558 */
15559FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15560{
15561 IEM_MC_BEGIN(3, 3);
15562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15563 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15564 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15565 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15566 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15567 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15568
15569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15571 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15572 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15573
15574 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15575 IEM_MC_PREPARE_FPU_USAGE();
15576 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15577 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15578 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15579 IEM_MC_ELSE()
15580 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15581 IEM_MC_ENDIF();
15582 IEM_MC_ADVANCE_RIP();
15583
15584 IEM_MC_END();
15585 return VINF_SUCCESS;
15586}
15587
15588
15589/** Opcode 0xdc !11/0. */
15590FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15591{
15592 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
15593 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15594}
15595
15596
15597/** Opcode 0xdc !11/1. */
15598FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15599{
15600 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
15601 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15602}
15603
15604
15605/** Opcode 0xdc !11/2. */
15606FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15607{
15608 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
15609
15610 IEM_MC_BEGIN(3, 3);
15611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15612 IEM_MC_LOCAL(uint16_t, u16Fsw);
15613 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15614 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15615 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15616 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15617
15618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15620
15621 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15623 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15624
15625 IEM_MC_PREPARE_FPU_USAGE();
15626 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15627 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15628 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15629 IEM_MC_ELSE()
15630 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15631 IEM_MC_ENDIF();
15632 IEM_MC_ADVANCE_RIP();
15633
15634 IEM_MC_END();
15635 return VINF_SUCCESS;
15636}
15637
15638
15639/** Opcode 0xdc !11/3. */
15640FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15641{
15642 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
15643
15644 IEM_MC_BEGIN(3, 3);
15645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15646 IEM_MC_LOCAL(uint16_t, u16Fsw);
15647 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15648 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15649 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15650 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15651
15652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15654
15655 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15656 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15657 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15658
15659 IEM_MC_PREPARE_FPU_USAGE();
15660 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15661 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15662 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15663 IEM_MC_ELSE()
15664 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15665 IEM_MC_ENDIF();
15666 IEM_MC_ADVANCE_RIP();
15667
15668 IEM_MC_END();
15669 return VINF_SUCCESS;
15670}
15671
15672
15673/** Opcode 0xdc !11/4. */
15674FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15675{
15676 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
15677 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15678}
15679
15680
15681/** Opcode 0xdc !11/5. */
15682FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15683{
15684 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
15685 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15686}
15687
15688
15689/** Opcode 0xdc !11/6. */
15690FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15691{
15692 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
15693 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15694}
15695
15696
15697/** Opcode 0xdc !11/7. */
15698FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15699{
15700 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
15701 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15702}
15703
15704
15705/** Opcode 0xdc. */
15706FNIEMOP_DEF(iemOp_EscF4)
15707{
15708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15709 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
15710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15711 {
15712 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15713 {
15714 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15715 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15716 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15717 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15718 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15719 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15720 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15721 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15723 }
15724 }
15725 else
15726 {
15727 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15728 {
15729 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15730 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15731 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15732 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15733 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15734 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15735 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15736 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15738 }
15739 }
15740}
15741
15742
15743/** Opcode 0xdd !11/0.
15744 * @sa iemOp_fld_m32r */
15745FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15746{
15747 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
15748
15749 IEM_MC_BEGIN(2, 3);
15750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15751 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15752 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15753 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15754 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15755
15756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15760
15761 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15762 IEM_MC_PREPARE_FPU_USAGE();
15763 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15764 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15765 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15766 IEM_MC_ELSE()
15767 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15768 IEM_MC_ENDIF();
15769 IEM_MC_ADVANCE_RIP();
15770
15771 IEM_MC_END();
15772 return VINF_SUCCESS;
15773}
15774
15775
15776/** Opcode 0xdd !11/0. */
15777FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15778{
15779 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
15780 IEM_MC_BEGIN(3, 2);
15781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15782 IEM_MC_LOCAL(uint16_t, u16Fsw);
15783 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15784 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15785 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15786
15787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15789 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15790 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15791
15792 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15793 IEM_MC_PREPARE_FPU_USAGE();
15794 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15795 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15796 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15797 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15798 IEM_MC_ELSE()
15799 IEM_MC_IF_FCW_IM()
15800 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15801 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15802 IEM_MC_ENDIF();
15803 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15804 IEM_MC_ENDIF();
15805 IEM_MC_ADVANCE_RIP();
15806
15807 IEM_MC_END();
15808 return VINF_SUCCESS;
15809}
15810
15811
15812/** Opcode 0xdd !11/0. */
15813FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15814{
15815 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
15816 IEM_MC_BEGIN(3, 2);
15817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15818 IEM_MC_LOCAL(uint16_t, u16Fsw);
15819 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15820 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15821 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15822
15823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15827
15828 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15829 IEM_MC_PREPARE_FPU_USAGE();
15830 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15831 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15832 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15833 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15834 IEM_MC_ELSE()
15835 IEM_MC_IF_FCW_IM()
15836 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15837 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15838 IEM_MC_ENDIF();
15839 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15840 IEM_MC_ENDIF();
15841 IEM_MC_ADVANCE_RIP();
15842
15843 IEM_MC_END();
15844 return VINF_SUCCESS;
15845}
15846
15847
15848
15849
15850/** Opcode 0xdd !11/0. */
15851FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15852{
15853 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
15854 IEM_MC_BEGIN(3, 2);
15855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15856 IEM_MC_LOCAL(uint16_t, u16Fsw);
15857 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15858 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15859 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15860
15861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15863 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15864 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15865
15866 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15867 IEM_MC_PREPARE_FPU_USAGE();
15868 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15869 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15870 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15871 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15872 IEM_MC_ELSE()
15873 IEM_MC_IF_FCW_IM()
15874 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15875 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15876 IEM_MC_ENDIF();
15877 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15878 IEM_MC_ENDIF();
15879 IEM_MC_ADVANCE_RIP();
15880
15881 IEM_MC_END();
15882 return VINF_SUCCESS;
15883}
15884
15885
15886/** Opcode 0xdd !11/0. */
15887FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15888{
15889 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
15890 IEM_MC_BEGIN(3, 0);
15891 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15892 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15893 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15896 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15897 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15898 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15899 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15900 IEM_MC_END();
15901 return VINF_SUCCESS;
15902}
15903
15904
15905/** Opcode 0xdd !11/0. */
15906FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15907{
15908 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
15909 IEM_MC_BEGIN(3, 0);
15910 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15911 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15912 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15916 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15917 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15918 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15919 IEM_MC_END();
15920 return VINF_SUCCESS;
15921
15922}
15923
15924/** Opcode 0xdd !11/0. */
15925FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15926{
15927 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
15928
15929 IEM_MC_BEGIN(0, 2);
15930 IEM_MC_LOCAL(uint16_t, u16Tmp);
15931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15932
15933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15935 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15936
15937 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15938 IEM_MC_FETCH_FSW(u16Tmp);
15939 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
15940 IEM_MC_ADVANCE_RIP();
15941
15942/** @todo Debug / drop a hint to the verifier that things may differ
15943 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15944 * NT4SP1. (X86_FSW_PE) */
15945 IEM_MC_END();
15946 return VINF_SUCCESS;
15947}
15948
15949
15950/** Opcode 0xdd 11/0. */
15951FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15952{
15953 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
15954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15955 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15956 unmodified. */
15957
15958 IEM_MC_BEGIN(0, 0);
15959
15960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15961 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15962
15963 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15964 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15965 IEM_MC_UPDATE_FPU_OPCODE_IP();
15966
15967 IEM_MC_ADVANCE_RIP();
15968 IEM_MC_END();
15969 return VINF_SUCCESS;
15970}
15971
15972
15973/** Opcode 0xdd 11/1. */
15974FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15975{
15976 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
15977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15978
15979 IEM_MC_BEGIN(0, 2);
15980 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15981 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15984
15985 IEM_MC_PREPARE_FPU_USAGE();
15986 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15987 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15988 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15989 IEM_MC_ELSE()
15990 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15991 IEM_MC_ENDIF();
15992
15993 IEM_MC_ADVANCE_RIP();
15994 IEM_MC_END();
15995 return VINF_SUCCESS;
15996}
15997
15998
15999/** Opcode 0xdd 11/3. */
16000FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16001{
16002 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16003 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16004}
16005
16006
16007/** Opcode 0xdd 11/4. */
16008FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16009{
16010 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16011 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16012}
16013
16014
16015/** Opcode 0xdd. */
16016FNIEMOP_DEF(iemOp_EscF5)
16017{
16018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16019 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16020 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16021 {
16022 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16023 {
16024 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16025 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16026 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16027 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16028 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16029 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16030 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16031 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16033 }
16034 }
16035 else
16036 {
16037 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16038 {
16039 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16040 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16041 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16042 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16043 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16044 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16045 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16046 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16048 }
16049 }
16050}
16051
16052
16053/** Opcode 0xde 11/0. */
16054FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16055{
16056 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16057 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16058}
16059
16060
16061/** Opcode 0xde 11/0. */
16062FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16063{
16064 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16065 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16066}
16067
16068
16069/** Opcode 0xde 0xd9. */
16070FNIEMOP_DEF(iemOp_fcompp)
16071{
16072 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16073 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16074}
16075
16076
16077/** Opcode 0xde 11/4. */
16078FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16079{
16080 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16081 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16082}
16083
16084
16085/** Opcode 0xde 11/5. */
16086FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16087{
16088 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16089 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16090}
16091
16092
16093/** Opcode 0xde 11/6. */
16094FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16095{
16096 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16097 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16098}
16099
16100
16101/** Opcode 0xde 11/7. */
16102FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16103{
16104 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16105 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16106}
16107
16108
16109/**
16110 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16111 * the result in ST0.
16112 *
16113 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16114 */
16115FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16116{
16117 IEM_MC_BEGIN(3, 3);
16118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16119 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16120 IEM_MC_LOCAL(int16_t, i16Val2);
16121 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16123 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16124
16125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16127
16128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16129 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16130 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16131
16132 IEM_MC_PREPARE_FPU_USAGE();
16133 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16134 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16135 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16136 IEM_MC_ELSE()
16137 IEM_MC_FPU_STACK_UNDERFLOW(0);
16138 IEM_MC_ENDIF();
16139 IEM_MC_ADVANCE_RIP();
16140
16141 IEM_MC_END();
16142 return VINF_SUCCESS;
16143}
16144
16145
16146/** Opcode 0xde !11/0. */
16147FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16148{
16149 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16150 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16151}
16152
16153
16154/** Opcode 0xde !11/1. */
16155FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16156{
16157 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16158 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16159}
16160
16161
16162/** Opcode 0xde !11/2. */
16163FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16164{
16165 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16166
16167 IEM_MC_BEGIN(3, 3);
16168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16169 IEM_MC_LOCAL(uint16_t, u16Fsw);
16170 IEM_MC_LOCAL(int16_t, i16Val2);
16171 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16172 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16173 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16174
16175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16177
16178 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16179 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16180 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16181
16182 IEM_MC_PREPARE_FPU_USAGE();
16183 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16184 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16185 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16186 IEM_MC_ELSE()
16187 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16188 IEM_MC_ENDIF();
16189 IEM_MC_ADVANCE_RIP();
16190
16191 IEM_MC_END();
16192 return VINF_SUCCESS;
16193}
16194
16195
16196/** Opcode 0xde !11/3. */
16197FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16198{
16199 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16200
16201 IEM_MC_BEGIN(3, 3);
16202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16203 IEM_MC_LOCAL(uint16_t, u16Fsw);
16204 IEM_MC_LOCAL(int16_t, i16Val2);
16205 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16206 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16207 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16208
16209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16211
16212 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16213 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16214 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16215
16216 IEM_MC_PREPARE_FPU_USAGE();
16217 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16218 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16219 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16220 IEM_MC_ELSE()
16221 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16222 IEM_MC_ENDIF();
16223 IEM_MC_ADVANCE_RIP();
16224
16225 IEM_MC_END();
16226 return VINF_SUCCESS;
16227}
16228
16229
16230/** Opcode 0xde !11/4. */
16231FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16232{
16233 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16234 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16235}
16236
16237
16238/** Opcode 0xde !11/5. */
16239FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16240{
16241 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16242 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16243}
16244
16245
16246/** Opcode 0xde !11/6. */
16247FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16248{
16249 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16250 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16251}
16252
16253
16254/** Opcode 0xde !11/7. */
16255FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16256{
16257 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16258 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16259}
16260
16261
16262/** Opcode 0xde. */
16263FNIEMOP_DEF(iemOp_EscF6)
16264{
16265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16266 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16268 {
16269 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16270 {
16271 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16272 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16273 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16274 case 3: if (bRm == 0xd9)
16275 return FNIEMOP_CALL(iemOp_fcompp);
16276 return IEMOP_RAISE_INVALID_OPCODE();
16277 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16278 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16279 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16280 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16282 }
16283 }
16284 else
16285 {
16286 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16287 {
16288 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16289 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16290 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16291 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16292 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16293 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16294 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16295 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16297 }
16298 }
16299}
16300
16301
16302/** Opcode 0xdf 11/0.
16303 * Undocument instruction, assumed to work like ffree + fincstp. */
16304FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16305{
16306 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
16307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16308
16309 IEM_MC_BEGIN(0, 0);
16310
16311 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16312 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16313
16314 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16315 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16316 IEM_MC_FPU_STACK_INC_TOP();
16317 IEM_MC_UPDATE_FPU_OPCODE_IP();
16318
16319 IEM_MC_ADVANCE_RIP();
16320 IEM_MC_END();
16321 return VINF_SUCCESS;
16322}
16323
16324
16325/** Opcode 0xdf 0xe0. */
16326FNIEMOP_DEF(iemOp_fnstsw_ax)
16327{
16328 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
16329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16330
16331 IEM_MC_BEGIN(0, 1);
16332 IEM_MC_LOCAL(uint16_t, u16Tmp);
16333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16334 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16335 IEM_MC_FETCH_FSW(u16Tmp);
16336 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16337 IEM_MC_ADVANCE_RIP();
16338 IEM_MC_END();
16339 return VINF_SUCCESS;
16340}
16341
16342
16343/** Opcode 0xdf 11/5. */
16344FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16345{
16346 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
16347 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16348}
16349
16350
16351/** Opcode 0xdf 11/6. */
16352FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16353{
16354 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
16355 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16356}
16357
16358
16359/** Opcode 0xdf !11/0. */
16360FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16361{
16362 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
16363
16364 IEM_MC_BEGIN(2, 3);
16365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16366 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16367 IEM_MC_LOCAL(int16_t, i16Val);
16368 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16369 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16370
16371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16373
16374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16376 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16377
16378 IEM_MC_PREPARE_FPU_USAGE();
16379 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16380 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16381 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16382 IEM_MC_ELSE()
16383 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16384 IEM_MC_ENDIF();
16385 IEM_MC_ADVANCE_RIP();
16386
16387 IEM_MC_END();
16388 return VINF_SUCCESS;
16389}
16390
16391
16392/** Opcode 0xdf !11/1. */
16393FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16394{
16395 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
16396 IEM_MC_BEGIN(3, 2);
16397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16398 IEM_MC_LOCAL(uint16_t, u16Fsw);
16399 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16400 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16401 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16402
16403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16405 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16406 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16407
16408 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16409 IEM_MC_PREPARE_FPU_USAGE();
16410 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16411 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16412 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16413 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16414 IEM_MC_ELSE()
16415 IEM_MC_IF_FCW_IM()
16416 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16417 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16418 IEM_MC_ENDIF();
16419 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16420 IEM_MC_ENDIF();
16421 IEM_MC_ADVANCE_RIP();
16422
16423 IEM_MC_END();
16424 return VINF_SUCCESS;
16425}
16426
16427
16428/** Opcode 0xdf !11/2. */
16429FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16430{
16431 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
16432 IEM_MC_BEGIN(3, 2);
16433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16434 IEM_MC_LOCAL(uint16_t, u16Fsw);
16435 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16436 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16437 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16438
16439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16442 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16443
16444 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16445 IEM_MC_PREPARE_FPU_USAGE();
16446 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16447 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16448 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16449 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16450 IEM_MC_ELSE()
16451 IEM_MC_IF_FCW_IM()
16452 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16453 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16454 IEM_MC_ENDIF();
16455 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16456 IEM_MC_ENDIF();
16457 IEM_MC_ADVANCE_RIP();
16458
16459 IEM_MC_END();
16460 return VINF_SUCCESS;
16461}
16462
16463
16464/** Opcode 0xdf !11/3. */
16465FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16466{
16467 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
16468 IEM_MC_BEGIN(3, 2);
16469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16470 IEM_MC_LOCAL(uint16_t, u16Fsw);
16471 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16472 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16473 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16474
16475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16478 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16479
16480 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16481 IEM_MC_PREPARE_FPU_USAGE();
16482 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16483 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16484 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16485 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16486 IEM_MC_ELSE()
16487 IEM_MC_IF_FCW_IM()
16488 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16489 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16490 IEM_MC_ENDIF();
16491 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16492 IEM_MC_ENDIF();
16493 IEM_MC_ADVANCE_RIP();
16494
16495 IEM_MC_END();
16496 return VINF_SUCCESS;
16497}
16498
16499
16500/** Opcode 0xdf !11/4. */
16501FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16502
16503
16504/** Opcode 0xdf !11/5. */
16505FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16506{
16507 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
16508
16509 IEM_MC_BEGIN(2, 3);
16510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16511 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16512 IEM_MC_LOCAL(int64_t, i64Val);
16513 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16514 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16515
16516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16518
16519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16521 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16522
16523 IEM_MC_PREPARE_FPU_USAGE();
16524 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16525 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16526 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16527 IEM_MC_ELSE()
16528 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16529 IEM_MC_ENDIF();
16530 IEM_MC_ADVANCE_RIP();
16531
16532 IEM_MC_END();
16533 return VINF_SUCCESS;
16534}
16535
16536
16537/** Opcode 0xdf !11/6. */
16538FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16539
16540
16541/** Opcode 0xdf !11/7. */
16542FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16543{
16544 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
16545 IEM_MC_BEGIN(3, 2);
16546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16547 IEM_MC_LOCAL(uint16_t, u16Fsw);
16548 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16549 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16550 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16551
16552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16554 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16555 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16556
16557 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16558 IEM_MC_PREPARE_FPU_USAGE();
16559 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16560 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16561 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16562 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16563 IEM_MC_ELSE()
16564 IEM_MC_IF_FCW_IM()
16565 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16566 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16567 IEM_MC_ENDIF();
16568 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16569 IEM_MC_ENDIF();
16570 IEM_MC_ADVANCE_RIP();
16571
16572 IEM_MC_END();
16573 return VINF_SUCCESS;
16574}
16575
16576
16577/** Opcode 0xdf. */
16578FNIEMOP_DEF(iemOp_EscF7)
16579{
16580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16582 {
16583 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16584 {
16585 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16586 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16587 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16588 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16589 case 4: if (bRm == 0xe0)
16590 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16591 return IEMOP_RAISE_INVALID_OPCODE();
16592 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16593 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16594 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16596 }
16597 }
16598 else
16599 {
16600 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16601 {
16602 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16603 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16604 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16605 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16606 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16607 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16608 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16609 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16611 }
16612 }
16613}
16614
16615
16616/** Opcode 0xe0. */
16617FNIEMOP_DEF(iemOp_loopne_Jb)
16618{
16619 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
16620 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16622 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16623
16624 switch (pVCpu->iem.s.enmEffAddrMode)
16625 {
16626 case IEMMODE_16BIT:
16627 IEM_MC_BEGIN(0,0);
16628 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16629 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16630 IEM_MC_REL_JMP_S8(i8Imm);
16631 } IEM_MC_ELSE() {
16632 IEM_MC_ADVANCE_RIP();
16633 } IEM_MC_ENDIF();
16634 IEM_MC_END();
16635 return VINF_SUCCESS;
16636
16637 case IEMMODE_32BIT:
16638 IEM_MC_BEGIN(0,0);
16639 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16640 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16641 IEM_MC_REL_JMP_S8(i8Imm);
16642 } IEM_MC_ELSE() {
16643 IEM_MC_ADVANCE_RIP();
16644 } IEM_MC_ENDIF();
16645 IEM_MC_END();
16646 return VINF_SUCCESS;
16647
16648 case IEMMODE_64BIT:
16649 IEM_MC_BEGIN(0,0);
16650 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16651 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16652 IEM_MC_REL_JMP_S8(i8Imm);
16653 } IEM_MC_ELSE() {
16654 IEM_MC_ADVANCE_RIP();
16655 } IEM_MC_ENDIF();
16656 IEM_MC_END();
16657 return VINF_SUCCESS;
16658
16659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16660 }
16661}
16662
16663
16664/** Opcode 0xe1. */
16665FNIEMOP_DEF(iemOp_loope_Jb)
16666{
16667 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
16668 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16670 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16671
16672 switch (pVCpu->iem.s.enmEffAddrMode)
16673 {
16674 case IEMMODE_16BIT:
16675 IEM_MC_BEGIN(0,0);
16676 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16677 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16678 IEM_MC_REL_JMP_S8(i8Imm);
16679 } IEM_MC_ELSE() {
16680 IEM_MC_ADVANCE_RIP();
16681 } IEM_MC_ENDIF();
16682 IEM_MC_END();
16683 return VINF_SUCCESS;
16684
16685 case IEMMODE_32BIT:
16686 IEM_MC_BEGIN(0,0);
16687 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16688 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16689 IEM_MC_REL_JMP_S8(i8Imm);
16690 } IEM_MC_ELSE() {
16691 IEM_MC_ADVANCE_RIP();
16692 } IEM_MC_ENDIF();
16693 IEM_MC_END();
16694 return VINF_SUCCESS;
16695
16696 case IEMMODE_64BIT:
16697 IEM_MC_BEGIN(0,0);
16698 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16699 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16700 IEM_MC_REL_JMP_S8(i8Imm);
16701 } IEM_MC_ELSE() {
16702 IEM_MC_ADVANCE_RIP();
16703 } IEM_MC_ENDIF();
16704 IEM_MC_END();
16705 return VINF_SUCCESS;
16706
16707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16708 }
16709}
16710
16711
16712/** Opcode 0xe2. */
16713FNIEMOP_DEF(iemOp_loop_Jb)
16714{
16715 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
16716 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16718 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16719
16720 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16721 * using the 32-bit operand size override. How can that be restarted? See
16722 * weird pseudo code in intel manual. */
16723 switch (pVCpu->iem.s.enmEffAddrMode)
16724 {
16725 case IEMMODE_16BIT:
16726 IEM_MC_BEGIN(0,0);
16727 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16728 {
16729 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16730 IEM_MC_IF_CX_IS_NZ() {
16731 IEM_MC_REL_JMP_S8(i8Imm);
16732 } IEM_MC_ELSE() {
16733 IEM_MC_ADVANCE_RIP();
16734 } IEM_MC_ENDIF();
16735 }
16736 else
16737 {
16738 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16739 IEM_MC_ADVANCE_RIP();
16740 }
16741 IEM_MC_END();
16742 return VINF_SUCCESS;
16743
16744 case IEMMODE_32BIT:
16745 IEM_MC_BEGIN(0,0);
16746 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16747 {
16748 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16749 IEM_MC_IF_ECX_IS_NZ() {
16750 IEM_MC_REL_JMP_S8(i8Imm);
16751 } IEM_MC_ELSE() {
16752 IEM_MC_ADVANCE_RIP();
16753 } IEM_MC_ENDIF();
16754 }
16755 else
16756 {
16757 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16758 IEM_MC_ADVANCE_RIP();
16759 }
16760 IEM_MC_END();
16761 return VINF_SUCCESS;
16762
16763 case IEMMODE_64BIT:
16764 IEM_MC_BEGIN(0,0);
16765 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16766 {
16767 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16768 IEM_MC_IF_RCX_IS_NZ() {
16769 IEM_MC_REL_JMP_S8(i8Imm);
16770 } IEM_MC_ELSE() {
16771 IEM_MC_ADVANCE_RIP();
16772 } IEM_MC_ENDIF();
16773 }
16774 else
16775 {
16776 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16777 IEM_MC_ADVANCE_RIP();
16778 }
16779 IEM_MC_END();
16780 return VINF_SUCCESS;
16781
16782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16783 }
16784}
16785
16786
16787/** Opcode 0xe3. */
16788FNIEMOP_DEF(iemOp_jecxz_Jb)
16789{
16790 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
16791 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16793 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16794
16795 switch (pVCpu->iem.s.enmEffAddrMode)
16796 {
16797 case IEMMODE_16BIT:
16798 IEM_MC_BEGIN(0,0);
16799 IEM_MC_IF_CX_IS_NZ() {
16800 IEM_MC_ADVANCE_RIP();
16801 } IEM_MC_ELSE() {
16802 IEM_MC_REL_JMP_S8(i8Imm);
16803 } IEM_MC_ENDIF();
16804 IEM_MC_END();
16805 return VINF_SUCCESS;
16806
16807 case IEMMODE_32BIT:
16808 IEM_MC_BEGIN(0,0);
16809 IEM_MC_IF_ECX_IS_NZ() {
16810 IEM_MC_ADVANCE_RIP();
16811 } IEM_MC_ELSE() {
16812 IEM_MC_REL_JMP_S8(i8Imm);
16813 } IEM_MC_ENDIF();
16814 IEM_MC_END();
16815 return VINF_SUCCESS;
16816
16817 case IEMMODE_64BIT:
16818 IEM_MC_BEGIN(0,0);
16819 IEM_MC_IF_RCX_IS_NZ() {
16820 IEM_MC_ADVANCE_RIP();
16821 } IEM_MC_ELSE() {
16822 IEM_MC_REL_JMP_S8(i8Imm);
16823 } IEM_MC_ENDIF();
16824 IEM_MC_END();
16825 return VINF_SUCCESS;
16826
16827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16828 }
16829}
16830
16831
16832/** Opcode 0xe4 */
16833FNIEMOP_DEF(iemOp_in_AL_Ib)
16834{
16835 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
16836 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16838 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16839}
16840
16841
16842/** Opcode 0xe5 */
16843FNIEMOP_DEF(iemOp_in_eAX_Ib)
16844{
16845 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
16846 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16848 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16849}
16850
16851
16852/** Opcode 0xe6 */
16853FNIEMOP_DEF(iemOp_out_Ib_AL)
16854{
16855 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
16856 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16858 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16859}
16860
16861
16862/** Opcode 0xe7 */
16863FNIEMOP_DEF(iemOp_out_Ib_eAX)
16864{
16865 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
16866 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16868 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16869}
16870
16871
16872/** Opcode 0xe8. */
16873FNIEMOP_DEF(iemOp_call_Jv)
16874{
16875 IEMOP_MNEMONIC(call_Jv, "call Jv");
16876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16877 switch (pVCpu->iem.s.enmEffOpSize)
16878 {
16879 case IEMMODE_16BIT:
16880 {
16881 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16882 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16883 }
16884
16885 case IEMMODE_32BIT:
16886 {
16887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16888 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16889 }
16890
16891 case IEMMODE_64BIT:
16892 {
16893 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16894 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16895 }
16896
16897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16898 }
16899}
16900
16901
16902/** Opcode 0xe9. */
16903FNIEMOP_DEF(iemOp_jmp_Jv)
16904{
16905 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
16906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16907 switch (pVCpu->iem.s.enmEffOpSize)
16908 {
16909 case IEMMODE_16BIT:
16910 {
16911 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16912 IEM_MC_BEGIN(0, 0);
16913 IEM_MC_REL_JMP_S16(i16Imm);
16914 IEM_MC_END();
16915 return VINF_SUCCESS;
16916 }
16917
16918 case IEMMODE_64BIT:
16919 case IEMMODE_32BIT:
16920 {
16921 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16922 IEM_MC_BEGIN(0, 0);
16923 IEM_MC_REL_JMP_S32(i32Imm);
16924 IEM_MC_END();
16925 return VINF_SUCCESS;
16926 }
16927
16928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16929 }
16930}
16931
16932
16933/** Opcode 0xea. */
16934FNIEMOP_DEF(iemOp_jmp_Ap)
16935{
16936 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
16937 IEMOP_HLP_NO_64BIT();
16938
16939 /* Decode the far pointer address and pass it on to the far call C implementation. */
16940 uint32_t offSeg;
16941 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
16942 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16943 else
16944 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16945 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16947 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
16948}
16949
16950
16951/** Opcode 0xeb. */
16952FNIEMOP_DEF(iemOp_jmp_Jb)
16953{
16954 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
16955 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16957 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16958
16959 IEM_MC_BEGIN(0, 0);
16960 IEM_MC_REL_JMP_S8(i8Imm);
16961 IEM_MC_END();
16962 return VINF_SUCCESS;
16963}
16964
16965
16966/** Opcode 0xec */
16967FNIEMOP_DEF(iemOp_in_AL_DX)
16968{
16969 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
16970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16971 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16972}
16973
16974
16975/** Opcode 0xed */
16976FNIEMOP_DEF(iemOp_eAX_DX)
16977{
16978 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
16979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16980 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16981}
16982
16983
16984/** Opcode 0xee */
16985FNIEMOP_DEF(iemOp_out_DX_AL)
16986{
16987 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
16988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16989 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16990}
16991
16992
16993/** Opcode 0xef */
16994FNIEMOP_DEF(iemOp_out_DX_eAX)
16995{
16996 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
16997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16998 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16999}
17000
17001
17002/** Opcode 0xf0. */
17003FNIEMOP_DEF(iemOp_lock)
17004{
17005 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17006 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17007
17008 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17009 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17010}
17011
17012
17013/** Opcode 0xf1. */
17014FNIEMOP_DEF(iemOp_int_1)
17015{
17016 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17017 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17018 /** @todo testcase! */
17019 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17020}
17021
17022
17023/** Opcode 0xf2. */
17024FNIEMOP_DEF(iemOp_repne)
17025{
17026 /* This overrides any previous REPE prefix. */
17027 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17028 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17029 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17030
17031 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17032 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17033}
17034
17035
17036/** Opcode 0xf3. */
17037FNIEMOP_DEF(iemOp_repe)
17038{
17039 /* This overrides any previous REPNE prefix. */
17040 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17041 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17042 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17043
17044 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17045 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17046}
17047
17048
17049/** Opcode 0xf4. */
17050FNIEMOP_DEF(iemOp_hlt)
17051{
17052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17053 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17054}
17055
17056
17057/** Opcode 0xf5. */
17058FNIEMOP_DEF(iemOp_cmc)
17059{
17060 IEMOP_MNEMONIC(cmc, "cmc");
17061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17062 IEM_MC_BEGIN(0, 0);
17063 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17064 IEM_MC_ADVANCE_RIP();
17065 IEM_MC_END();
17066 return VINF_SUCCESS;
17067}
17068
17069
17070/**
17071 * Common implementation of 'inc/dec/not/neg Eb'.
17072 *
17073 * @param bRm The RM byte.
17074 * @param pImpl The instruction implementation.
17075 */
17076FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17077{
17078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17079 {
17080 /* register access */
17081 IEM_MC_BEGIN(2, 0);
17082 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17083 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17084 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17085 IEM_MC_REF_EFLAGS(pEFlags);
17086 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17087 IEM_MC_ADVANCE_RIP();
17088 IEM_MC_END();
17089 }
17090 else
17091 {
17092 /* memory access. */
17093 IEM_MC_BEGIN(2, 2);
17094 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17095 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17097
17098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17099 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17100 IEM_MC_FETCH_EFLAGS(EFlags);
17101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17102 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17103 else
17104 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17105
17106 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17107 IEM_MC_COMMIT_EFLAGS(EFlags);
17108 IEM_MC_ADVANCE_RIP();
17109 IEM_MC_END();
17110 }
17111 return VINF_SUCCESS;
17112}
17113
17114
17115/**
17116 * Common implementation of 'inc/dec/not/neg Ev'.
17117 *
17118 * @param bRm The RM byte.
17119 * @param pImpl The instruction implementation.
17120 */
17121FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17122{
17123 /* Registers are handled by a common worker. */
17124 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17125 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17126
17127 /* Memory we do here. */
17128 switch (pVCpu->iem.s.enmEffOpSize)
17129 {
17130 case IEMMODE_16BIT:
17131 IEM_MC_BEGIN(2, 2);
17132 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17133 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17135
17136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17137 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17138 IEM_MC_FETCH_EFLAGS(EFlags);
17139 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17140 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17141 else
17142 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17143
17144 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17145 IEM_MC_COMMIT_EFLAGS(EFlags);
17146 IEM_MC_ADVANCE_RIP();
17147 IEM_MC_END();
17148 return VINF_SUCCESS;
17149
17150 case IEMMODE_32BIT:
17151 IEM_MC_BEGIN(2, 2);
17152 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17153 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17155
17156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17157 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17158 IEM_MC_FETCH_EFLAGS(EFlags);
17159 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17160 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17161 else
17162 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17163
17164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17165 IEM_MC_COMMIT_EFLAGS(EFlags);
17166 IEM_MC_ADVANCE_RIP();
17167 IEM_MC_END();
17168 return VINF_SUCCESS;
17169
17170 case IEMMODE_64BIT:
17171 IEM_MC_BEGIN(2, 2);
17172 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17173 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17175
17176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17177 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17178 IEM_MC_FETCH_EFLAGS(EFlags);
17179 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17180 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17181 else
17182 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17183
17184 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17185 IEM_MC_COMMIT_EFLAGS(EFlags);
17186 IEM_MC_ADVANCE_RIP();
17187 IEM_MC_END();
17188 return VINF_SUCCESS;
17189
17190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17191 }
17192}
17193
17194
17195/** Opcode 0xf6 /0. */
17196FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17197{
17198 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17199 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17200
17201 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17202 {
17203 /* register access */
17204 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17206
17207 IEM_MC_BEGIN(3, 0);
17208 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17209 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17210 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17211 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17212 IEM_MC_REF_EFLAGS(pEFlags);
17213 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17214 IEM_MC_ADVANCE_RIP();
17215 IEM_MC_END();
17216 }
17217 else
17218 {
17219 /* memory access. */
17220 IEM_MC_BEGIN(3, 2);
17221 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17222 IEM_MC_ARG(uint8_t, u8Src, 1);
17223 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17225
17226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17227 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17228 IEM_MC_ASSIGN(u8Src, u8Imm);
17229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17230 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17231 IEM_MC_FETCH_EFLAGS(EFlags);
17232 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17233
17234 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17235 IEM_MC_COMMIT_EFLAGS(EFlags);
17236 IEM_MC_ADVANCE_RIP();
17237 IEM_MC_END();
17238 }
17239 return VINF_SUCCESS;
17240}
17241
17242
17243/** Opcode 0xf7 /0. */
17244FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17245{
17246 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17248
17249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17250 {
17251 /* register access */
17252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17253 switch (pVCpu->iem.s.enmEffOpSize)
17254 {
17255 case IEMMODE_16BIT:
17256 {
17257 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17258 IEM_MC_BEGIN(3, 0);
17259 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17260 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17261 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17262 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17263 IEM_MC_REF_EFLAGS(pEFlags);
17264 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17265 IEM_MC_ADVANCE_RIP();
17266 IEM_MC_END();
17267 return VINF_SUCCESS;
17268 }
17269
17270 case IEMMODE_32BIT:
17271 {
17272 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17273 IEM_MC_BEGIN(3, 0);
17274 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17275 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17277 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17278 IEM_MC_REF_EFLAGS(pEFlags);
17279 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17280 /* No clearing the high dword here - test doesn't write back the result. */
17281 IEM_MC_ADVANCE_RIP();
17282 IEM_MC_END();
17283 return VINF_SUCCESS;
17284 }
17285
17286 case IEMMODE_64BIT:
17287 {
17288 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17289 IEM_MC_BEGIN(3, 0);
17290 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17291 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17293 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17294 IEM_MC_REF_EFLAGS(pEFlags);
17295 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17296 IEM_MC_ADVANCE_RIP();
17297 IEM_MC_END();
17298 return VINF_SUCCESS;
17299 }
17300
17301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17302 }
17303 }
17304 else
17305 {
17306 /* memory access. */
17307 switch (pVCpu->iem.s.enmEffOpSize)
17308 {
17309 case IEMMODE_16BIT:
17310 {
17311 IEM_MC_BEGIN(3, 2);
17312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17313 IEM_MC_ARG(uint16_t, u16Src, 1);
17314 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17316
17317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17318 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17319 IEM_MC_ASSIGN(u16Src, u16Imm);
17320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17321 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17322 IEM_MC_FETCH_EFLAGS(EFlags);
17323 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17324
17325 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17326 IEM_MC_COMMIT_EFLAGS(EFlags);
17327 IEM_MC_ADVANCE_RIP();
17328 IEM_MC_END();
17329 return VINF_SUCCESS;
17330 }
17331
17332 case IEMMODE_32BIT:
17333 {
17334 IEM_MC_BEGIN(3, 2);
17335 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17336 IEM_MC_ARG(uint32_t, u32Src, 1);
17337 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17339
17340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17341 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17342 IEM_MC_ASSIGN(u32Src, u32Imm);
17343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17344 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17345 IEM_MC_FETCH_EFLAGS(EFlags);
17346 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17347
17348 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17349 IEM_MC_COMMIT_EFLAGS(EFlags);
17350 IEM_MC_ADVANCE_RIP();
17351 IEM_MC_END();
17352 return VINF_SUCCESS;
17353 }
17354
17355 case IEMMODE_64BIT:
17356 {
17357 IEM_MC_BEGIN(3, 2);
17358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17359 IEM_MC_ARG(uint64_t, u64Src, 1);
17360 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17362
17363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17364 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17365 IEM_MC_ASSIGN(u64Src, u64Imm);
17366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17367 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17368 IEM_MC_FETCH_EFLAGS(EFlags);
17369 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17370
17371 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17372 IEM_MC_COMMIT_EFLAGS(EFlags);
17373 IEM_MC_ADVANCE_RIP();
17374 IEM_MC_END();
17375 return VINF_SUCCESS;
17376 }
17377
17378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17379 }
17380 }
17381}
17382
17383
17384/** Opcode 0xf6 /4, /5, /6 and /7. */
17385FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17386{
17387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17388 {
17389 /* register access */
17390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17391 IEM_MC_BEGIN(3, 1);
17392 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17393 IEM_MC_ARG(uint8_t, u8Value, 1);
17394 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17395 IEM_MC_LOCAL(int32_t, rc);
17396
17397 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17398 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17399 IEM_MC_REF_EFLAGS(pEFlags);
17400 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17401 IEM_MC_IF_LOCAL_IS_Z(rc) {
17402 IEM_MC_ADVANCE_RIP();
17403 } IEM_MC_ELSE() {
17404 IEM_MC_RAISE_DIVIDE_ERROR();
17405 } IEM_MC_ENDIF();
17406
17407 IEM_MC_END();
17408 }
17409 else
17410 {
17411 /* memory access. */
17412 IEM_MC_BEGIN(3, 2);
17413 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17414 IEM_MC_ARG(uint8_t, u8Value, 1);
17415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17417 IEM_MC_LOCAL(int32_t, rc);
17418
17419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17421 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17422 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17423 IEM_MC_REF_EFLAGS(pEFlags);
17424 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17425 IEM_MC_IF_LOCAL_IS_Z(rc) {
17426 IEM_MC_ADVANCE_RIP();
17427 } IEM_MC_ELSE() {
17428 IEM_MC_RAISE_DIVIDE_ERROR();
17429 } IEM_MC_ENDIF();
17430
17431 IEM_MC_END();
17432 }
17433 return VINF_SUCCESS;
17434}
17435
17436
17437/** Opcode 0xf7 /4, /5, /6 and /7. */
17438FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17439{
17440 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17441
17442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17443 {
17444 /* register access */
17445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17446 switch (pVCpu->iem.s.enmEffOpSize)
17447 {
17448 case IEMMODE_16BIT:
17449 {
17450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17451 IEM_MC_BEGIN(4, 1);
17452 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17453 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17454 IEM_MC_ARG(uint16_t, u16Value, 2);
17455 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17456 IEM_MC_LOCAL(int32_t, rc);
17457
17458 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17459 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17460 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17461 IEM_MC_REF_EFLAGS(pEFlags);
17462 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17463 IEM_MC_IF_LOCAL_IS_Z(rc) {
17464 IEM_MC_ADVANCE_RIP();
17465 } IEM_MC_ELSE() {
17466 IEM_MC_RAISE_DIVIDE_ERROR();
17467 } IEM_MC_ENDIF();
17468
17469 IEM_MC_END();
17470 return VINF_SUCCESS;
17471 }
17472
17473 case IEMMODE_32BIT:
17474 {
17475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17476 IEM_MC_BEGIN(4, 1);
17477 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17478 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17479 IEM_MC_ARG(uint32_t, u32Value, 2);
17480 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17481 IEM_MC_LOCAL(int32_t, rc);
17482
17483 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17484 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17485 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17486 IEM_MC_REF_EFLAGS(pEFlags);
17487 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17488 IEM_MC_IF_LOCAL_IS_Z(rc) {
17489 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17490 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17491 IEM_MC_ADVANCE_RIP();
17492 } IEM_MC_ELSE() {
17493 IEM_MC_RAISE_DIVIDE_ERROR();
17494 } IEM_MC_ENDIF();
17495
17496 IEM_MC_END();
17497 return VINF_SUCCESS;
17498 }
17499
17500 case IEMMODE_64BIT:
17501 {
17502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17503 IEM_MC_BEGIN(4, 1);
17504 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17505 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17506 IEM_MC_ARG(uint64_t, u64Value, 2);
17507 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17508 IEM_MC_LOCAL(int32_t, rc);
17509
17510 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17511 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17512 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17513 IEM_MC_REF_EFLAGS(pEFlags);
17514 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17515 IEM_MC_IF_LOCAL_IS_Z(rc) {
17516 IEM_MC_ADVANCE_RIP();
17517 } IEM_MC_ELSE() {
17518 IEM_MC_RAISE_DIVIDE_ERROR();
17519 } IEM_MC_ENDIF();
17520
17521 IEM_MC_END();
17522 return VINF_SUCCESS;
17523 }
17524
17525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17526 }
17527 }
17528 else
17529 {
17530 /* memory access. */
17531 switch (pVCpu->iem.s.enmEffOpSize)
17532 {
17533 case IEMMODE_16BIT:
17534 {
17535 IEM_MC_BEGIN(4, 2);
17536 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17537 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17538 IEM_MC_ARG(uint16_t, u16Value, 2);
17539 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17541 IEM_MC_LOCAL(int32_t, rc);
17542
17543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17545 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17546 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17547 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17548 IEM_MC_REF_EFLAGS(pEFlags);
17549 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17550 IEM_MC_IF_LOCAL_IS_Z(rc) {
17551 IEM_MC_ADVANCE_RIP();
17552 } IEM_MC_ELSE() {
17553 IEM_MC_RAISE_DIVIDE_ERROR();
17554 } IEM_MC_ENDIF();
17555
17556 IEM_MC_END();
17557 return VINF_SUCCESS;
17558 }
17559
17560 case IEMMODE_32BIT:
17561 {
17562 IEM_MC_BEGIN(4, 2);
17563 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17564 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17565 IEM_MC_ARG(uint32_t, u32Value, 2);
17566 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17568 IEM_MC_LOCAL(int32_t, rc);
17569
17570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17572 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17573 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17574 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17575 IEM_MC_REF_EFLAGS(pEFlags);
17576 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17577 IEM_MC_IF_LOCAL_IS_Z(rc) {
17578 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17579 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17580 IEM_MC_ADVANCE_RIP();
17581 } IEM_MC_ELSE() {
17582 IEM_MC_RAISE_DIVIDE_ERROR();
17583 } IEM_MC_ENDIF();
17584
17585 IEM_MC_END();
17586 return VINF_SUCCESS;
17587 }
17588
17589 case IEMMODE_64BIT:
17590 {
17591 IEM_MC_BEGIN(4, 2);
17592 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17593 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17594 IEM_MC_ARG(uint64_t, u64Value, 2);
17595 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17597 IEM_MC_LOCAL(int32_t, rc);
17598
17599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17601 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17602 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17603 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17604 IEM_MC_REF_EFLAGS(pEFlags);
17605 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17606 IEM_MC_IF_LOCAL_IS_Z(rc) {
17607 IEM_MC_ADVANCE_RIP();
17608 } IEM_MC_ELSE() {
17609 IEM_MC_RAISE_DIVIDE_ERROR();
17610 } IEM_MC_ENDIF();
17611
17612 IEM_MC_END();
17613 return VINF_SUCCESS;
17614 }
17615
17616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17617 }
17618 }
17619}
17620
17621/** Opcode 0xf6. */
17622FNIEMOP_DEF(iemOp_Grp3_Eb)
17623{
17624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17625 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17626 {
17627 case 0:
17628 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17629 case 1:
17630/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17631 return IEMOP_RAISE_INVALID_OPCODE();
17632 case 2:
17633 IEMOP_MNEMONIC(not_Eb, "not Eb");
17634 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17635 case 3:
17636 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
17637 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17638 case 4:
17639 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
17640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17641 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17642 case 5:
17643 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
17644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17645 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17646 case 6:
17647 IEMOP_MNEMONIC(div_Eb, "div Eb");
17648 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17649 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17650 case 7:
17651 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
17652 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17653 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17655 }
17656}
17657
17658
17659/** Opcode 0xf7. */
17660FNIEMOP_DEF(iemOp_Grp3_Ev)
17661{
17662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17663 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17664 {
17665 case 0:
17666 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17667 case 1:
17668/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17669 return IEMOP_RAISE_INVALID_OPCODE();
17670 case 2:
17671 IEMOP_MNEMONIC(not_Ev, "not Ev");
17672 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17673 case 3:
17674 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
17675 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17676 case 4:
17677 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
17678 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17679 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17680 case 5:
17681 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
17682 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17683 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17684 case 6:
17685 IEMOP_MNEMONIC(div_Ev, "div Ev");
17686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17687 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17688 case 7:
17689 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
17690 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17691 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17693 }
17694}
17695
17696
17697/** Opcode 0xf8. */
17698FNIEMOP_DEF(iemOp_clc)
17699{
17700 IEMOP_MNEMONIC(clc, "clc");
17701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17702 IEM_MC_BEGIN(0, 0);
17703 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17704 IEM_MC_ADVANCE_RIP();
17705 IEM_MC_END();
17706 return VINF_SUCCESS;
17707}
17708
17709
17710/** Opcode 0xf9. */
17711FNIEMOP_DEF(iemOp_stc)
17712{
17713 IEMOP_MNEMONIC(stc, "stc");
17714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17715 IEM_MC_BEGIN(0, 0);
17716 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17717 IEM_MC_ADVANCE_RIP();
17718 IEM_MC_END();
17719 return VINF_SUCCESS;
17720}
17721
17722
17723/** Opcode 0xfa. */
17724FNIEMOP_DEF(iemOp_cli)
17725{
17726 IEMOP_MNEMONIC(cli, "cli");
17727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17728 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17729}
17730
17731
17732FNIEMOP_DEF(iemOp_sti)
17733{
17734 IEMOP_MNEMONIC(sti, "sti");
17735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17736 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17737}
17738
17739
17740/** Opcode 0xfc. */
17741FNIEMOP_DEF(iemOp_cld)
17742{
17743 IEMOP_MNEMONIC(cld, "cld");
17744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17745 IEM_MC_BEGIN(0, 0);
17746 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17747 IEM_MC_ADVANCE_RIP();
17748 IEM_MC_END();
17749 return VINF_SUCCESS;
17750}
17751
17752
17753/** Opcode 0xfd. */
17754FNIEMOP_DEF(iemOp_std)
17755{
17756 IEMOP_MNEMONIC(std, "std");
17757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17758 IEM_MC_BEGIN(0, 0);
17759 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17760 IEM_MC_ADVANCE_RIP();
17761 IEM_MC_END();
17762 return VINF_SUCCESS;
17763}
17764
17765
17766/** Opcode 0xfe. */
17767FNIEMOP_DEF(iemOp_Grp4)
17768{
17769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17770 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17771 {
17772 case 0:
17773 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
17774 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17775 case 1:
17776 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
17777 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17778 default:
17779 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
17780 return IEMOP_RAISE_INVALID_OPCODE();
17781 }
17782}
17783
17784
17785/**
17786 * Opcode 0xff /2.
17787 * @param bRm The RM byte.
17788 */
17789FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17790{
17791 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
17792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17793
17794 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17795 {
17796 /* The new RIP is taken from a register. */
17797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17798 switch (pVCpu->iem.s.enmEffOpSize)
17799 {
17800 case IEMMODE_16BIT:
17801 IEM_MC_BEGIN(1, 0);
17802 IEM_MC_ARG(uint16_t, u16Target, 0);
17803 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17804 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17805 IEM_MC_END()
17806 return VINF_SUCCESS;
17807
17808 case IEMMODE_32BIT:
17809 IEM_MC_BEGIN(1, 0);
17810 IEM_MC_ARG(uint32_t, u32Target, 0);
17811 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17812 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17813 IEM_MC_END()
17814 return VINF_SUCCESS;
17815
17816 case IEMMODE_64BIT:
17817 IEM_MC_BEGIN(1, 0);
17818 IEM_MC_ARG(uint64_t, u64Target, 0);
17819 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17820 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17821 IEM_MC_END()
17822 return VINF_SUCCESS;
17823
17824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17825 }
17826 }
17827 else
17828 {
17829 /* The new RIP is taken from a register. */
17830 switch (pVCpu->iem.s.enmEffOpSize)
17831 {
17832 case IEMMODE_16BIT:
17833 IEM_MC_BEGIN(1, 1);
17834 IEM_MC_ARG(uint16_t, u16Target, 0);
17835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17838 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17839 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17840 IEM_MC_END()
17841 return VINF_SUCCESS;
17842
17843 case IEMMODE_32BIT:
17844 IEM_MC_BEGIN(1, 1);
17845 IEM_MC_ARG(uint32_t, u32Target, 0);
17846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17849 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17850 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17851 IEM_MC_END()
17852 return VINF_SUCCESS;
17853
17854 case IEMMODE_64BIT:
17855 IEM_MC_BEGIN(1, 1);
17856 IEM_MC_ARG(uint64_t, u64Target, 0);
17857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17860 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17861 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17862 IEM_MC_END()
17863 return VINF_SUCCESS;
17864
17865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17866 }
17867 }
17868}
17869
17870typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17871
17872FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17873{
17874 /* Registers? How?? */
17875 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
17876 { /* likely */ }
17877 else
17878 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17879
17880 /* Far pointer loaded from memory. */
17881 switch (pVCpu->iem.s.enmEffOpSize)
17882 {
17883 case IEMMODE_16BIT:
17884 IEM_MC_BEGIN(3, 1);
17885 IEM_MC_ARG(uint16_t, u16Sel, 0);
17886 IEM_MC_ARG(uint16_t, offSeg, 1);
17887 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17891 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17892 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
17893 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17894 IEM_MC_END();
17895 return VINF_SUCCESS;
17896
17897 case IEMMODE_64BIT:
17898 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17899 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17900 * and call far qword [rsp] encodings. */
17901 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
17902 {
17903 IEM_MC_BEGIN(3, 1);
17904 IEM_MC_ARG(uint16_t, u16Sel, 0);
17905 IEM_MC_ARG(uint64_t, offSeg, 1);
17906 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17910 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17911 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
17912 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17913 IEM_MC_END();
17914 return VINF_SUCCESS;
17915 }
17916 /* AMD falls thru. */
17917
17918 case IEMMODE_32BIT:
17919 IEM_MC_BEGIN(3, 1);
17920 IEM_MC_ARG(uint16_t, u16Sel, 0);
17921 IEM_MC_ARG(uint32_t, offSeg, 1);
17922 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17926 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17927 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
17928 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17929 IEM_MC_END();
17930 return VINF_SUCCESS;
17931
17932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17933 }
17934}
17935
17936
17937/**
17938 * Opcode 0xff /3.
17939 * @param bRm The RM byte.
17940 */
17941FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17942{
17943 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
17944 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17945}
17946
17947
17948/**
17949 * Opcode 0xff /4.
17950 * @param bRm The RM byte.
17951 */
17952FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17953{
17954 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
17955 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17956
17957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17958 {
17959 /* The new RIP is taken from a register. */
17960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17961 switch (pVCpu->iem.s.enmEffOpSize)
17962 {
17963 case IEMMODE_16BIT:
17964 IEM_MC_BEGIN(0, 1);
17965 IEM_MC_LOCAL(uint16_t, u16Target);
17966 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17967 IEM_MC_SET_RIP_U16(u16Target);
17968 IEM_MC_END()
17969 return VINF_SUCCESS;
17970
17971 case IEMMODE_32BIT:
17972 IEM_MC_BEGIN(0, 1);
17973 IEM_MC_LOCAL(uint32_t, u32Target);
17974 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17975 IEM_MC_SET_RIP_U32(u32Target);
17976 IEM_MC_END()
17977 return VINF_SUCCESS;
17978
17979 case IEMMODE_64BIT:
17980 IEM_MC_BEGIN(0, 1);
17981 IEM_MC_LOCAL(uint64_t, u64Target);
17982 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17983 IEM_MC_SET_RIP_U64(u64Target);
17984 IEM_MC_END()
17985 return VINF_SUCCESS;
17986
17987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17988 }
17989 }
17990 else
17991 {
17992 /* The new RIP is taken from a memory location. */
17993 switch (pVCpu->iem.s.enmEffOpSize)
17994 {
17995 case IEMMODE_16BIT:
17996 IEM_MC_BEGIN(0, 2);
17997 IEM_MC_LOCAL(uint16_t, u16Target);
17998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18001 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18002 IEM_MC_SET_RIP_U16(u16Target);
18003 IEM_MC_END()
18004 return VINF_SUCCESS;
18005
18006 case IEMMODE_32BIT:
18007 IEM_MC_BEGIN(0, 2);
18008 IEM_MC_LOCAL(uint32_t, u32Target);
18009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18012 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18013 IEM_MC_SET_RIP_U32(u32Target);
18014 IEM_MC_END()
18015 return VINF_SUCCESS;
18016
18017 case IEMMODE_64BIT:
18018 IEM_MC_BEGIN(0, 2);
18019 IEM_MC_LOCAL(uint64_t, u64Target);
18020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18023 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18024 IEM_MC_SET_RIP_U64(u64Target);
18025 IEM_MC_END()
18026 return VINF_SUCCESS;
18027
18028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18029 }
18030 }
18031}
18032
18033
18034/**
18035 * Opcode 0xff /5.
18036 * @param bRm The RM byte.
18037 */
18038FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18039{
18040 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18041 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18042}
18043
18044
18045/**
18046 * Opcode 0xff /6.
18047 * @param bRm The RM byte.
18048 */
18049FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18050{
18051 IEMOP_MNEMONIC(push_Ev, "push Ev");
18052
18053 /* Registers are handled by a common worker. */
18054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18055 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18056
18057 /* Memory we do here. */
18058 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18059 switch (pVCpu->iem.s.enmEffOpSize)
18060 {
18061 case IEMMODE_16BIT:
18062 IEM_MC_BEGIN(0, 2);
18063 IEM_MC_LOCAL(uint16_t, u16Src);
18064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18067 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18068 IEM_MC_PUSH_U16(u16Src);
18069 IEM_MC_ADVANCE_RIP();
18070 IEM_MC_END();
18071 return VINF_SUCCESS;
18072
18073 case IEMMODE_32BIT:
18074 IEM_MC_BEGIN(0, 2);
18075 IEM_MC_LOCAL(uint32_t, u32Src);
18076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18079 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18080 IEM_MC_PUSH_U32(u32Src);
18081 IEM_MC_ADVANCE_RIP();
18082 IEM_MC_END();
18083 return VINF_SUCCESS;
18084
18085 case IEMMODE_64BIT:
18086 IEM_MC_BEGIN(0, 2);
18087 IEM_MC_LOCAL(uint64_t, u64Src);
18088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18091 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18092 IEM_MC_PUSH_U64(u64Src);
18093 IEM_MC_ADVANCE_RIP();
18094 IEM_MC_END();
18095 return VINF_SUCCESS;
18096
18097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18098 }
18099}
18100
18101
18102/** Opcode 0xff. */
18103FNIEMOP_DEF(iemOp_Grp5)
18104{
18105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18106 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18107 {
18108 case 0:
18109 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18110 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18111 case 1:
18112 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18113 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18114 case 2:
18115 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18116 case 3:
18117 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18118 case 4:
18119 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18120 case 5:
18121 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18122 case 6:
18123 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18124 case 7:
18125 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18126 return IEMOP_RAISE_INVALID_OPCODE();
18127 }
18128 AssertFailedReturn(VERR_IEM_IPE_3);
18129}
18130
18131
18132
18133const PFNIEMOP g_apfnOneByteMap[256] =
18134{
18135 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18136 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18137 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18138 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18139 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18140 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18141 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18142 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18143 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18144 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18145 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18146 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18147 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18148 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18149 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18150 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18151 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18152 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18153 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18154 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18155 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18156 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18157 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18158 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18159 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18160 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18161 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18162 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18163 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18164 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18165 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18166 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18167 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18168 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18169 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18170 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18171 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18172 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18173 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18174 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18175 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18176 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18177 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18178 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18179 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18180 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18181 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18182 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18183 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18184 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18185 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18186 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18187 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18188 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18189 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18190 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18191 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18192 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18193 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18194 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18195 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18196 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18197 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18198 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18199};
18200
18201
18202/** @} */
18203
18204#ifdef _MSC_VER
18205# pragma warning(pop)
18206#endif
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette