VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65644

Last change on this file since 65644 was 65621, checked in by vboxsync, 8 years ago

IEM: Typo fix (BT instruction family with 16-bit operand).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 664.8 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65621 2017-02-06 13:54:40Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for SSE2 and MMX instructions on the forms:
2823 * pxxxx xmm1, xmm2/mem128
2824 * pxxxx mm1, mm2/mem64
2825 *
2826 * The 2nd operand is the second half of a register, which in the memory case
2827 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2828 * where it may read the full 128 bits or only the upper 64 bits.
2829 *
2830 * Exceptions type 4.
2831 */
2832FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2833{
2834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2835 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2836 {
2837 case IEM_OP_PRF_SIZE_OP: /* SSE */
2838 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2839 {
2840 /*
2841 * Register, register.
2842 */
2843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2844 IEM_MC_BEGIN(2, 0);
2845 IEM_MC_ARG(uint128_t *, pDst, 0);
2846 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2847 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2848 IEM_MC_PREPARE_SSE_USAGE();
2849 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2850 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2851 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2852 IEM_MC_ADVANCE_RIP();
2853 IEM_MC_END();
2854 }
2855 else
2856 {
2857 /*
2858 * Register, memory.
2859 */
2860 IEM_MC_BEGIN(2, 2);
2861 IEM_MC_ARG(uint128_t *, pDst, 0);
2862 IEM_MC_LOCAL(uint128_t, uSrc);
2863 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2865
2866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2868 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2869 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2870
2871 IEM_MC_PREPARE_SSE_USAGE();
2872 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2873 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2874
2875 IEM_MC_ADVANCE_RIP();
2876 IEM_MC_END();
2877 }
2878 return VINF_SUCCESS;
2879
2880 case 0: /* MMX */
2881 if (!pImpl->pfnU64)
2882 return IEMOP_RAISE_INVALID_OPCODE();
2883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2884 {
2885 /*
2886 * Register, register.
2887 */
2888 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2889 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2891 IEM_MC_BEGIN(2, 0);
2892 IEM_MC_ARG(uint64_t *, pDst, 0);
2893 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2894 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2895 IEM_MC_PREPARE_FPU_USAGE();
2896 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2897 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2898 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2899 IEM_MC_ADVANCE_RIP();
2900 IEM_MC_END();
2901 }
2902 else
2903 {
2904 /*
2905 * Register, memory.
2906 */
2907 IEM_MC_BEGIN(2, 2);
2908 IEM_MC_ARG(uint64_t *, pDst, 0);
2909 IEM_MC_LOCAL(uint64_t, uSrc);
2910 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2912
2913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2915 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2916 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2917
2918 IEM_MC_PREPARE_FPU_USAGE();
2919 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2920 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2921
2922 IEM_MC_ADVANCE_RIP();
2923 IEM_MC_END();
2924 }
2925 return VINF_SUCCESS;
2926
2927 default:
2928 return IEMOP_RAISE_INVALID_OPCODE();
2929 }
2930}
2931
2932
2933/** Opcode 0x0f 0x68. */
2934FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2935{
2936 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2937 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2938}
2939
2940
2941/** Opcode 0x0f 0x69. */
2942FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2943{
2944 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2945 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2946}
2947
2948
2949/** Opcode 0x0f 0x6a. */
2950FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2951{
2952 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2953 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2954}
2955
2956/** Opcode 0x0f 0x6b. */
2957FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2958
2959
2960/* Opcode 0x0f 0x6c - invalid */
2961
2962/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2963FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2964{
2965 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2966 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2967}
2968
2969/* Opcode 0xf3 0x0f 0x6c - invalid */
2970/* Opcode 0xf2 0x0f 0x6c - invalid */
2971
2972
2973/** Opcode 0x0f 0x6d. */
2974FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2975{
2976 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2977 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2978}
2979
2980
2981/** Opcode 0x0f 0x6e. */
2982FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2986 {
2987 case IEM_OP_PRF_SIZE_OP: /* SSE */
2988 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2989 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2990 else
2991 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2993 {
2994 /* XMM, greg*/
2995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2996 IEM_MC_BEGIN(0, 1);
2997 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2998 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2999 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3000 {
3001 IEM_MC_LOCAL(uint64_t, u64Tmp);
3002 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3003 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3004 }
3005 else
3006 {
3007 IEM_MC_LOCAL(uint32_t, u32Tmp);
3008 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3009 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3010 }
3011 IEM_MC_ADVANCE_RIP();
3012 IEM_MC_END();
3013 }
3014 else
3015 {
3016 /* XMM, [mem] */
3017 IEM_MC_BEGIN(0, 2);
3018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3019 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3024 {
3025 IEM_MC_LOCAL(uint64_t, u64Tmp);
3026 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3027 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3028 }
3029 else
3030 {
3031 IEM_MC_LOCAL(uint32_t, u32Tmp);
3032 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3033 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3034 }
3035 IEM_MC_ADVANCE_RIP();
3036 IEM_MC_END();
3037 }
3038 return VINF_SUCCESS;
3039
3040 case 0: /* MMX */
3041 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3042 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3043 else
3044 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3045 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3046 {
3047 /* MMX, greg */
3048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3049 IEM_MC_BEGIN(0, 1);
3050 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3052 IEM_MC_LOCAL(uint64_t, u64Tmp);
3053 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3054 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3055 else
3056 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3057 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3058 IEM_MC_ADVANCE_RIP();
3059 IEM_MC_END();
3060 }
3061 else
3062 {
3063 /* MMX, [mem] */
3064 IEM_MC_BEGIN(0, 2);
3065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3066 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3069 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3070 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3071 {
3072 IEM_MC_LOCAL(uint64_t, u64Tmp);
3073 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3074 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3075 }
3076 else
3077 {
3078 IEM_MC_LOCAL(uint32_t, u32Tmp);
3079 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3080 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3081 }
3082 IEM_MC_ADVANCE_RIP();
3083 IEM_MC_END();
3084 }
3085 return VINF_SUCCESS;
3086
3087 default:
3088 return IEMOP_RAISE_INVALID_OPCODE();
3089 }
3090}
3091
3092
3093/** Opcode 0x0f 0x6f. */
3094FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
3095{
3096 bool fAligned = false;
3097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3098 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3099 {
3100 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3101 fAligned = true;
3102 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3103 if (fAligned)
3104 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3105 else
3106 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3108 {
3109 /*
3110 * Register, register.
3111 */
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_BEGIN(0, 0);
3114 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3115 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3116 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3117 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3118 IEM_MC_ADVANCE_RIP();
3119 IEM_MC_END();
3120 }
3121 else
3122 {
3123 /*
3124 * Register, memory.
3125 */
3126 IEM_MC_BEGIN(0, 2);
3127 IEM_MC_LOCAL(uint128_t, u128Tmp);
3128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3129
3130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3132 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3133 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3134 if (fAligned)
3135 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3136 else
3137 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3138 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3139
3140 IEM_MC_ADVANCE_RIP();
3141 IEM_MC_END();
3142 }
3143 return VINF_SUCCESS;
3144
3145 case 0: /* MMX */
3146 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3148 {
3149 /*
3150 * Register, register.
3151 */
3152 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3153 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3155 IEM_MC_BEGIN(0, 1);
3156 IEM_MC_LOCAL(uint64_t, u64Tmp);
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3159 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3160 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3161 IEM_MC_ADVANCE_RIP();
3162 IEM_MC_END();
3163 }
3164 else
3165 {
3166 /*
3167 * Register, memory.
3168 */
3169 IEM_MC_BEGIN(0, 2);
3170 IEM_MC_LOCAL(uint64_t, u64Tmp);
3171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3172
3173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3175 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3176 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3177 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3178 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3179
3180 IEM_MC_ADVANCE_RIP();
3181 IEM_MC_END();
3182 }
3183 return VINF_SUCCESS;
3184
3185 default:
3186 return IEMOP_RAISE_INVALID_OPCODE();
3187 }
3188}
3189
3190
3191/** Opcode 0x0f 0x70. The immediate here is evil! */
3192FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3193{
3194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3195 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3196 {
3197 case IEM_OP_PRF_SIZE_OP: /* SSE */
3198 case IEM_OP_PRF_REPNZ: /* SSE */
3199 case IEM_OP_PRF_REPZ: /* SSE */
3200 {
3201 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3202 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3203 {
3204 case IEM_OP_PRF_SIZE_OP:
3205 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3206 pfnAImpl = iemAImpl_pshufd;
3207 break;
3208 case IEM_OP_PRF_REPNZ:
3209 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3210 pfnAImpl = iemAImpl_pshuflw;
3211 break;
3212 case IEM_OP_PRF_REPZ:
3213 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3214 pfnAImpl = iemAImpl_pshufhw;
3215 break;
3216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3217 }
3218 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3219 {
3220 /*
3221 * Register, register.
3222 */
3223 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3225
3226 IEM_MC_BEGIN(3, 0);
3227 IEM_MC_ARG(uint128_t *, pDst, 0);
3228 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3229 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3230 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3231 IEM_MC_PREPARE_SSE_USAGE();
3232 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3233 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3234 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3235 IEM_MC_ADVANCE_RIP();
3236 IEM_MC_END();
3237 }
3238 else
3239 {
3240 /*
3241 * Register, memory.
3242 */
3243 IEM_MC_BEGIN(3, 2);
3244 IEM_MC_ARG(uint128_t *, pDst, 0);
3245 IEM_MC_LOCAL(uint128_t, uSrc);
3246 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3248
3249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3250 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3251 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3253 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3254
3255 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3256 IEM_MC_PREPARE_SSE_USAGE();
3257 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3258 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3259
3260 IEM_MC_ADVANCE_RIP();
3261 IEM_MC_END();
3262 }
3263 return VINF_SUCCESS;
3264 }
3265
3266 case 0: /* MMX Extension */
3267 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3275
3276 IEM_MC_BEGIN(3, 0);
3277 IEM_MC_ARG(uint64_t *, pDst, 0);
3278 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3279 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3280 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3281 IEM_MC_PREPARE_FPU_USAGE();
3282 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3283 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3284 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3285 IEM_MC_ADVANCE_RIP();
3286 IEM_MC_END();
3287 }
3288 else
3289 {
3290 /*
3291 * Register, memory.
3292 */
3293 IEM_MC_BEGIN(3, 2);
3294 IEM_MC_ARG(uint64_t *, pDst, 0);
3295 IEM_MC_LOCAL(uint64_t, uSrc);
3296 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3298
3299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3300 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3301 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3303 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3304
3305 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3306 IEM_MC_PREPARE_FPU_USAGE();
3307 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3308 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3309
3310 IEM_MC_ADVANCE_RIP();
3311 IEM_MC_END();
3312 }
3313 return VINF_SUCCESS;
3314
3315 default:
3316 return IEMOP_RAISE_INVALID_OPCODE();
3317 }
3318}
3319
3320
3321/** Opcode 0x0f 0x71 11/2. */
3322FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3323
3324/** Opcode 0x66 0x0f 0x71 11/2. */
3325FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3326
3327/** Opcode 0x0f 0x71 11/4. */
3328FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3329
3330/** Opcode 0x66 0x0f 0x71 11/4. */
3331FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3332
3333/** Opcode 0x0f 0x71 11/6. */
3334FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3335
3336/** Opcode 0x66 0x0f 0x71 11/6. */
3337FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3338
3339
3340/** Opcode 0x0f 0x71. */
3341FNIEMOP_DEF(iemOp_Grp12)
3342{
3343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3344 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3345 return IEMOP_RAISE_INVALID_OPCODE();
3346 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3347 {
3348 case 0: case 1: case 3: case 5: case 7:
3349 return IEMOP_RAISE_INVALID_OPCODE();
3350 case 2:
3351 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3352 {
3353 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3354 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3355 default: return IEMOP_RAISE_INVALID_OPCODE();
3356 }
3357 case 4:
3358 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3359 {
3360 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3361 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3362 default: return IEMOP_RAISE_INVALID_OPCODE();
3363 }
3364 case 6:
3365 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3366 {
3367 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3368 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3369 default: return IEMOP_RAISE_INVALID_OPCODE();
3370 }
3371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3372 }
3373}
3374
3375
3376/** Opcode 0x0f 0x72 11/2. */
3377FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3378
3379/** Opcode 0x66 0x0f 0x72 11/2. */
3380FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3381
3382/** Opcode 0x0f 0x72 11/4. */
3383FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3384
3385/** Opcode 0x66 0x0f 0x72 11/4. */
3386FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3387
3388/** Opcode 0x0f 0x72 11/6. */
3389FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3390
3391/** Opcode 0x66 0x0f 0x72 11/6. */
3392FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3393
3394
3395/** Opcode 0x0f 0x72. */
3396FNIEMOP_DEF(iemOp_Grp13)
3397{
3398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3399 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3400 return IEMOP_RAISE_INVALID_OPCODE();
3401 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3402 {
3403 case 0: case 1: case 3: case 5: case 7:
3404 return IEMOP_RAISE_INVALID_OPCODE();
3405 case 2:
3406 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3407 {
3408 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3409 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3410 default: return IEMOP_RAISE_INVALID_OPCODE();
3411 }
3412 case 4:
3413 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3414 {
3415 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3416 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3417 default: return IEMOP_RAISE_INVALID_OPCODE();
3418 }
3419 case 6:
3420 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3421 {
3422 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3423 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3424 default: return IEMOP_RAISE_INVALID_OPCODE();
3425 }
3426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3427 }
3428}
3429
3430
3431/** Opcode 0x0f 0x73 11/2. */
3432FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3433
3434/** Opcode 0x66 0x0f 0x73 11/2. */
3435FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3436
3437/** Opcode 0x66 0x0f 0x73 11/3. */
3438FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3439
3440/** Opcode 0x0f 0x73 11/6. */
3441FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3442
3443/** Opcode 0x66 0x0f 0x73 11/6. */
3444FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3445
3446/** Opcode 0x66 0x0f 0x73 11/7. */
3447FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3448
3449
3450/** Opcode 0x0f 0x73. */
3451FNIEMOP_DEF(iemOp_Grp14)
3452{
3453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3454 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3455 return IEMOP_RAISE_INVALID_OPCODE();
3456 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3457 {
3458 case 0: case 1: case 4: case 5:
3459 return IEMOP_RAISE_INVALID_OPCODE();
3460 case 2:
3461 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3462 {
3463 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3464 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3465 default: return IEMOP_RAISE_INVALID_OPCODE();
3466 }
3467 case 3:
3468 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3469 {
3470 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3471 default: return IEMOP_RAISE_INVALID_OPCODE();
3472 }
3473 case 6:
3474 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3475 {
3476 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3477 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3478 default: return IEMOP_RAISE_INVALID_OPCODE();
3479 }
3480 case 7:
3481 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3482 {
3483 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3484 default: return IEMOP_RAISE_INVALID_OPCODE();
3485 }
3486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3487 }
3488}
3489
3490
3491/**
3492 * Common worker for SSE2 and MMX instructions on the forms:
3493 * pxxx mm1, mm2/mem64
3494 * pxxx xmm1, xmm2/mem128
3495 *
3496 * Proper alignment of the 128-bit operand is enforced.
3497 * Exceptions type 4. SSE2 and MMX cpuid checks.
3498 */
3499FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3500{
3501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3502 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3503 {
3504 case IEM_OP_PRF_SIZE_OP: /* SSE */
3505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3506 {
3507 /*
3508 * Register, register.
3509 */
3510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3511 IEM_MC_BEGIN(2, 0);
3512 IEM_MC_ARG(uint128_t *, pDst, 0);
3513 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3514 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3515 IEM_MC_PREPARE_SSE_USAGE();
3516 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3517 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3518 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3519 IEM_MC_ADVANCE_RIP();
3520 IEM_MC_END();
3521 }
3522 else
3523 {
3524 /*
3525 * Register, memory.
3526 */
3527 IEM_MC_BEGIN(2, 2);
3528 IEM_MC_ARG(uint128_t *, pDst, 0);
3529 IEM_MC_LOCAL(uint128_t, uSrc);
3530 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3532
3533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3535 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3536 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3537
3538 IEM_MC_PREPARE_SSE_USAGE();
3539 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3540 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3541
3542 IEM_MC_ADVANCE_RIP();
3543 IEM_MC_END();
3544 }
3545 return VINF_SUCCESS;
3546
3547 case 0: /* MMX */
3548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3549 {
3550 /*
3551 * Register, register.
3552 */
3553 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3554 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3556 IEM_MC_BEGIN(2, 0);
3557 IEM_MC_ARG(uint64_t *, pDst, 0);
3558 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3560 IEM_MC_PREPARE_FPU_USAGE();
3561 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3562 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3563 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3564 IEM_MC_ADVANCE_RIP();
3565 IEM_MC_END();
3566 }
3567 else
3568 {
3569 /*
3570 * Register, memory.
3571 */
3572 IEM_MC_BEGIN(2, 2);
3573 IEM_MC_ARG(uint64_t *, pDst, 0);
3574 IEM_MC_LOCAL(uint64_t, uSrc);
3575 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3577
3578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3582
3583 IEM_MC_PREPARE_FPU_USAGE();
3584 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3585 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3586
3587 IEM_MC_ADVANCE_RIP();
3588 IEM_MC_END();
3589 }
3590 return VINF_SUCCESS;
3591
3592 default:
3593 return IEMOP_RAISE_INVALID_OPCODE();
3594 }
3595}
3596
3597
3598/** Opcode 0x0f 0x74. */
3599FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3600{
3601 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3602 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3603}
3604
3605
3606/** Opcode 0x0f 0x75. */
3607FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3608{
3609 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3610 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3611}
3612
3613
3614/** Opcode 0x0f 0x76. */
3615FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3616{
3617 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3618 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3619}
3620
3621
3622/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3623FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3624/* Opcode 0x66 0x0f 0x77 - invalid */
3625/* Opcode 0xf3 0x0f 0x77 - invalid */
3626/* Opcode 0xf2 0x0f 0x77 - invalid */
3627
3628/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3629FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3630/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3631FNIEMOP_STUB(iemOp_AmdGrp17);
3632/* Opcode 0xf3 0x0f 0x78 - invalid */
3633/* Opcode 0xf2 0x0f 0x78 - invalid */
3634
3635/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3636FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3637/* Opcode 0x66 0x0f 0x79 - invalid */
3638/* Opcode 0xf3 0x0f 0x79 - invalid */
3639/* Opcode 0xf2 0x0f 0x79 - invalid */
3640
3641/* Opcode 0x0f 0x7a - invalid */
3642/* Opcode 0x66 0x0f 0x7a - invalid */
3643/* Opcode 0xf3 0x0f 0x7a - invalid */
3644/* Opcode 0xf2 0x0f 0x7a - invalid */
3645
3646/* Opcode 0x0f 0x7b - invalid */
3647/* Opcode 0x66 0x0f 0x7b - invalid */
3648/* Opcode 0xf3 0x0f 0x7b - invalid */
3649/* Opcode 0xf2 0x0f 0x7b - invalid */
3650
3651/* Opcode 0x0f 0x7c - invalid */
3652/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3653FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3654/* Opcode 0xf3 0x0f 0x7c - invalid */
3655/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3656FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3657
3658/* Opcode 0x0f 0x7d - invalid */
3659/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3660FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3661/* Opcode 0xf3 0x0f 0x7d - invalid */
3662/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3663FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3664
3665
3666/** Opcode 0x0f 0x7e. */
3667FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3668{
3669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3670 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3671 {
3672 case IEM_OP_PRF_SIZE_OP: /* SSE */
3673 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3674 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3675 else
3676 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3678 {
3679 /* greg, XMM */
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681 IEM_MC_BEGIN(0, 1);
3682 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3683 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3684 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3685 {
3686 IEM_MC_LOCAL(uint64_t, u64Tmp);
3687 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3688 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3689 }
3690 else
3691 {
3692 IEM_MC_LOCAL(uint32_t, u32Tmp);
3693 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3694 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3695 }
3696 IEM_MC_ADVANCE_RIP();
3697 IEM_MC_END();
3698 }
3699 else
3700 {
3701 /* [mem], XMM */
3702 IEM_MC_BEGIN(0, 2);
3703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3704 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3707 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3708 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3709 {
3710 IEM_MC_LOCAL(uint64_t, u64Tmp);
3711 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3712 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3713 }
3714 else
3715 {
3716 IEM_MC_LOCAL(uint32_t, u32Tmp);
3717 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3718 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3719 }
3720 IEM_MC_ADVANCE_RIP();
3721 IEM_MC_END();
3722 }
3723 return VINF_SUCCESS;
3724
3725 case 0: /* MMX */
3726 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3727 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3728 else
3729 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3731 {
3732 /* greg, MMX */
3733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3734 IEM_MC_BEGIN(0, 1);
3735 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3736 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3738 {
3739 IEM_MC_LOCAL(uint64_t, u64Tmp);
3740 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3741 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3742 }
3743 else
3744 {
3745 IEM_MC_LOCAL(uint32_t, u32Tmp);
3746 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3747 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3748 }
3749 IEM_MC_ADVANCE_RIP();
3750 IEM_MC_END();
3751 }
3752 else
3753 {
3754 /* [mem], MMX */
3755 IEM_MC_BEGIN(0, 2);
3756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3757 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3761 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3762 {
3763 IEM_MC_LOCAL(uint64_t, u64Tmp);
3764 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3765 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3766 }
3767 else
3768 {
3769 IEM_MC_LOCAL(uint32_t, u32Tmp);
3770 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3771 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3772 }
3773 IEM_MC_ADVANCE_RIP();
3774 IEM_MC_END();
3775 }
3776 return VINF_SUCCESS;
3777
3778 default:
3779 return IEMOP_RAISE_INVALID_OPCODE();
3780 }
3781}
3782
3783
3784/** Opcode 0x0f 0x7f. */
3785FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3786{
3787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3788 bool fAligned = false;
3789 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3790 {
3791 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3792 fAligned = true;
3793 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3794 if (fAligned)
3795 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3796 else
3797 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3799 {
3800 /*
3801 * Register, register.
3802 */
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804 IEM_MC_BEGIN(0, 0);
3805 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3806 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3807 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3808 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3809 IEM_MC_ADVANCE_RIP();
3810 IEM_MC_END();
3811 }
3812 else
3813 {
3814 /*
3815 * Register, memory.
3816 */
3817 IEM_MC_BEGIN(0, 2);
3818 IEM_MC_LOCAL(uint128_t, u128Tmp);
3819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3820
3821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3824 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3825
3826 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3827 if (fAligned)
3828 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3829 else
3830 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3831
3832 IEM_MC_ADVANCE_RIP();
3833 IEM_MC_END();
3834 }
3835 return VINF_SUCCESS;
3836
3837 case 0: /* MMX */
3838 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3839
3840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3841 {
3842 /*
3843 * Register, register.
3844 */
3845 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3846 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3848 IEM_MC_BEGIN(0, 1);
3849 IEM_MC_LOCAL(uint64_t, u64Tmp);
3850 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3851 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3852 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3853 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3854 IEM_MC_ADVANCE_RIP();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 /*
3860 * Register, memory.
3861 */
3862 IEM_MC_BEGIN(0, 2);
3863 IEM_MC_LOCAL(uint64_t, u64Tmp);
3864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3865
3866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3868 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3869 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3870
3871 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3872 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3873
3874 IEM_MC_ADVANCE_RIP();
3875 IEM_MC_END();
3876 }
3877 return VINF_SUCCESS;
3878
3879 default:
3880 return IEMOP_RAISE_INVALID_OPCODE();
3881 }
3882}
3883
3884
3885
3886/** Opcode 0x0f 0x80. */
3887FNIEMOP_DEF(iemOp_jo_Jv)
3888{
3889 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3890 IEMOP_HLP_MIN_386();
3891 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3892 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3893 {
3894 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3896
3897 IEM_MC_BEGIN(0, 0);
3898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3899 IEM_MC_REL_JMP_S16(i16Imm);
3900 } IEM_MC_ELSE() {
3901 IEM_MC_ADVANCE_RIP();
3902 } IEM_MC_ENDIF();
3903 IEM_MC_END();
3904 }
3905 else
3906 {
3907 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3909
3910 IEM_MC_BEGIN(0, 0);
3911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3912 IEM_MC_REL_JMP_S32(i32Imm);
3913 } IEM_MC_ELSE() {
3914 IEM_MC_ADVANCE_RIP();
3915 } IEM_MC_ENDIF();
3916 IEM_MC_END();
3917 }
3918 return VINF_SUCCESS;
3919}
3920
3921
3922/** Opcode 0x0f 0x81. */
3923FNIEMOP_DEF(iemOp_jno_Jv)
3924{
3925 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3926 IEMOP_HLP_MIN_386();
3927 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3928 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3929 {
3930 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3932
3933 IEM_MC_BEGIN(0, 0);
3934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3935 IEM_MC_ADVANCE_RIP();
3936 } IEM_MC_ELSE() {
3937 IEM_MC_REL_JMP_S16(i16Imm);
3938 } IEM_MC_ENDIF();
3939 IEM_MC_END();
3940 }
3941 else
3942 {
3943 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3945
3946 IEM_MC_BEGIN(0, 0);
3947 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3948 IEM_MC_ADVANCE_RIP();
3949 } IEM_MC_ELSE() {
3950 IEM_MC_REL_JMP_S32(i32Imm);
3951 } IEM_MC_ENDIF();
3952 IEM_MC_END();
3953 }
3954 return VINF_SUCCESS;
3955}
3956
3957
3958/** Opcode 0x0f 0x82. */
3959FNIEMOP_DEF(iemOp_jc_Jv)
3960{
3961 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3962 IEMOP_HLP_MIN_386();
3963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3964 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3965 {
3966 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3968
3969 IEM_MC_BEGIN(0, 0);
3970 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3971 IEM_MC_REL_JMP_S16(i16Imm);
3972 } IEM_MC_ELSE() {
3973 IEM_MC_ADVANCE_RIP();
3974 } IEM_MC_ENDIF();
3975 IEM_MC_END();
3976 }
3977 else
3978 {
3979 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3981
3982 IEM_MC_BEGIN(0, 0);
3983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3984 IEM_MC_REL_JMP_S32(i32Imm);
3985 } IEM_MC_ELSE() {
3986 IEM_MC_ADVANCE_RIP();
3987 } IEM_MC_ENDIF();
3988 IEM_MC_END();
3989 }
3990 return VINF_SUCCESS;
3991}
3992
3993
3994/** Opcode 0x0f 0x83. */
3995FNIEMOP_DEF(iemOp_jnc_Jv)
3996{
3997 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3998 IEMOP_HLP_MIN_386();
3999 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4000 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4001 {
4002 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4004
4005 IEM_MC_BEGIN(0, 0);
4006 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4007 IEM_MC_ADVANCE_RIP();
4008 } IEM_MC_ELSE() {
4009 IEM_MC_REL_JMP_S16(i16Imm);
4010 } IEM_MC_ENDIF();
4011 IEM_MC_END();
4012 }
4013 else
4014 {
4015 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4017
4018 IEM_MC_BEGIN(0, 0);
4019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4020 IEM_MC_ADVANCE_RIP();
4021 } IEM_MC_ELSE() {
4022 IEM_MC_REL_JMP_S32(i32Imm);
4023 } IEM_MC_ENDIF();
4024 IEM_MC_END();
4025 }
4026 return VINF_SUCCESS;
4027}
4028
4029
4030/** Opcode 0x0f 0x84. */
4031FNIEMOP_DEF(iemOp_je_Jv)
4032{
4033 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4034 IEMOP_HLP_MIN_386();
4035 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4036 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4037 {
4038 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4040
4041 IEM_MC_BEGIN(0, 0);
4042 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4043 IEM_MC_REL_JMP_S16(i16Imm);
4044 } IEM_MC_ELSE() {
4045 IEM_MC_ADVANCE_RIP();
4046 } IEM_MC_ENDIF();
4047 IEM_MC_END();
4048 }
4049 else
4050 {
4051 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4053
4054 IEM_MC_BEGIN(0, 0);
4055 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4056 IEM_MC_REL_JMP_S32(i32Imm);
4057 } IEM_MC_ELSE() {
4058 IEM_MC_ADVANCE_RIP();
4059 } IEM_MC_ENDIF();
4060 IEM_MC_END();
4061 }
4062 return VINF_SUCCESS;
4063}
4064
4065
4066/** Opcode 0x0f 0x85. */
4067FNIEMOP_DEF(iemOp_jne_Jv)
4068{
4069 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4070 IEMOP_HLP_MIN_386();
4071 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4072 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4073 {
4074 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4076
4077 IEM_MC_BEGIN(0, 0);
4078 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4079 IEM_MC_ADVANCE_RIP();
4080 } IEM_MC_ELSE() {
4081 IEM_MC_REL_JMP_S16(i16Imm);
4082 } IEM_MC_ENDIF();
4083 IEM_MC_END();
4084 }
4085 else
4086 {
4087 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4089
4090 IEM_MC_BEGIN(0, 0);
4091 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4092 IEM_MC_ADVANCE_RIP();
4093 } IEM_MC_ELSE() {
4094 IEM_MC_REL_JMP_S32(i32Imm);
4095 } IEM_MC_ENDIF();
4096 IEM_MC_END();
4097 }
4098 return VINF_SUCCESS;
4099}
4100
4101
4102/** Opcode 0x0f 0x86. */
4103FNIEMOP_DEF(iemOp_jbe_Jv)
4104{
4105 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4106 IEMOP_HLP_MIN_386();
4107 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4108 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4109 {
4110 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4112
4113 IEM_MC_BEGIN(0, 0);
4114 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4115 IEM_MC_REL_JMP_S16(i16Imm);
4116 } IEM_MC_ELSE() {
4117 IEM_MC_ADVANCE_RIP();
4118 } IEM_MC_ENDIF();
4119 IEM_MC_END();
4120 }
4121 else
4122 {
4123 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4125
4126 IEM_MC_BEGIN(0, 0);
4127 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4128 IEM_MC_REL_JMP_S32(i32Imm);
4129 } IEM_MC_ELSE() {
4130 IEM_MC_ADVANCE_RIP();
4131 } IEM_MC_ENDIF();
4132 IEM_MC_END();
4133 }
4134 return VINF_SUCCESS;
4135}
4136
4137
4138/** Opcode 0x0f 0x87. */
4139FNIEMOP_DEF(iemOp_jnbe_Jv)
4140{
4141 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4142 IEMOP_HLP_MIN_386();
4143 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4144 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4145 {
4146 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4148
4149 IEM_MC_BEGIN(0, 0);
4150 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4151 IEM_MC_ADVANCE_RIP();
4152 } IEM_MC_ELSE() {
4153 IEM_MC_REL_JMP_S16(i16Imm);
4154 } IEM_MC_ENDIF();
4155 IEM_MC_END();
4156 }
4157 else
4158 {
4159 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4161
4162 IEM_MC_BEGIN(0, 0);
4163 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4164 IEM_MC_ADVANCE_RIP();
4165 } IEM_MC_ELSE() {
4166 IEM_MC_REL_JMP_S32(i32Imm);
4167 } IEM_MC_ENDIF();
4168 IEM_MC_END();
4169 }
4170 return VINF_SUCCESS;
4171}
4172
4173
4174/** Opcode 0x0f 0x88. */
4175FNIEMOP_DEF(iemOp_js_Jv)
4176{
4177 IEMOP_MNEMONIC(js_Jv, "js Jv");
4178 IEMOP_HLP_MIN_386();
4179 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4180 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4181 {
4182 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4184
4185 IEM_MC_BEGIN(0, 0);
4186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4187 IEM_MC_REL_JMP_S16(i16Imm);
4188 } IEM_MC_ELSE() {
4189 IEM_MC_ADVANCE_RIP();
4190 } IEM_MC_ENDIF();
4191 IEM_MC_END();
4192 }
4193 else
4194 {
4195 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4197
4198 IEM_MC_BEGIN(0, 0);
4199 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4200 IEM_MC_REL_JMP_S32(i32Imm);
4201 } IEM_MC_ELSE() {
4202 IEM_MC_ADVANCE_RIP();
4203 } IEM_MC_ENDIF();
4204 IEM_MC_END();
4205 }
4206 return VINF_SUCCESS;
4207}
4208
4209
4210/** Opcode 0x0f 0x89. */
4211FNIEMOP_DEF(iemOp_jns_Jv)
4212{
4213 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4214 IEMOP_HLP_MIN_386();
4215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4216 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4217 {
4218 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4220
4221 IEM_MC_BEGIN(0, 0);
4222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4223 IEM_MC_ADVANCE_RIP();
4224 } IEM_MC_ELSE() {
4225 IEM_MC_REL_JMP_S16(i16Imm);
4226 } IEM_MC_ENDIF();
4227 IEM_MC_END();
4228 }
4229 else
4230 {
4231 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4233
4234 IEM_MC_BEGIN(0, 0);
4235 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4236 IEM_MC_ADVANCE_RIP();
4237 } IEM_MC_ELSE() {
4238 IEM_MC_REL_JMP_S32(i32Imm);
4239 } IEM_MC_ENDIF();
4240 IEM_MC_END();
4241 }
4242 return VINF_SUCCESS;
4243}
4244
4245
4246/** Opcode 0x0f 0x8a. */
4247FNIEMOP_DEF(iemOp_jp_Jv)
4248{
4249 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4250 IEMOP_HLP_MIN_386();
4251 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4252 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4253 {
4254 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4256
4257 IEM_MC_BEGIN(0, 0);
4258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4259 IEM_MC_REL_JMP_S16(i16Imm);
4260 } IEM_MC_ELSE() {
4261 IEM_MC_ADVANCE_RIP();
4262 } IEM_MC_ENDIF();
4263 IEM_MC_END();
4264 }
4265 else
4266 {
4267 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4269
4270 IEM_MC_BEGIN(0, 0);
4271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4272 IEM_MC_REL_JMP_S32(i32Imm);
4273 } IEM_MC_ELSE() {
4274 IEM_MC_ADVANCE_RIP();
4275 } IEM_MC_ENDIF();
4276 IEM_MC_END();
4277 }
4278 return VINF_SUCCESS;
4279}
4280
4281
4282/** Opcode 0x0f 0x8b. */
4283FNIEMOP_DEF(iemOp_jnp_Jv)
4284{
4285 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4286 IEMOP_HLP_MIN_386();
4287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4288 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4289 {
4290 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4292
4293 IEM_MC_BEGIN(0, 0);
4294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4295 IEM_MC_ADVANCE_RIP();
4296 } IEM_MC_ELSE() {
4297 IEM_MC_REL_JMP_S16(i16Imm);
4298 } IEM_MC_ENDIF();
4299 IEM_MC_END();
4300 }
4301 else
4302 {
4303 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4305
4306 IEM_MC_BEGIN(0, 0);
4307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4308 IEM_MC_ADVANCE_RIP();
4309 } IEM_MC_ELSE() {
4310 IEM_MC_REL_JMP_S32(i32Imm);
4311 } IEM_MC_ENDIF();
4312 IEM_MC_END();
4313 }
4314 return VINF_SUCCESS;
4315}
4316
4317
4318/** Opcode 0x0f 0x8c. */
4319FNIEMOP_DEF(iemOp_jl_Jv)
4320{
4321 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4322 IEMOP_HLP_MIN_386();
4323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4324 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4325 {
4326 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4328
4329 IEM_MC_BEGIN(0, 0);
4330 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4331 IEM_MC_REL_JMP_S16(i16Imm);
4332 } IEM_MC_ELSE() {
4333 IEM_MC_ADVANCE_RIP();
4334 } IEM_MC_ENDIF();
4335 IEM_MC_END();
4336 }
4337 else
4338 {
4339 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4341
4342 IEM_MC_BEGIN(0, 0);
4343 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4344 IEM_MC_REL_JMP_S32(i32Imm);
4345 } IEM_MC_ELSE() {
4346 IEM_MC_ADVANCE_RIP();
4347 } IEM_MC_ENDIF();
4348 IEM_MC_END();
4349 }
4350 return VINF_SUCCESS;
4351}
4352
4353
4354/** Opcode 0x0f 0x8d. */
4355FNIEMOP_DEF(iemOp_jnl_Jv)
4356{
4357 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4358 IEMOP_HLP_MIN_386();
4359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4360 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4361 {
4362 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4364
4365 IEM_MC_BEGIN(0, 0);
4366 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4367 IEM_MC_ADVANCE_RIP();
4368 } IEM_MC_ELSE() {
4369 IEM_MC_REL_JMP_S16(i16Imm);
4370 } IEM_MC_ENDIF();
4371 IEM_MC_END();
4372 }
4373 else
4374 {
4375 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4377
4378 IEM_MC_BEGIN(0, 0);
4379 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4380 IEM_MC_ADVANCE_RIP();
4381 } IEM_MC_ELSE() {
4382 IEM_MC_REL_JMP_S32(i32Imm);
4383 } IEM_MC_ENDIF();
4384 IEM_MC_END();
4385 }
4386 return VINF_SUCCESS;
4387}
4388
4389
4390/** Opcode 0x0f 0x8e. */
4391FNIEMOP_DEF(iemOp_jle_Jv)
4392{
4393 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4394 IEMOP_HLP_MIN_386();
4395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4396 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4397 {
4398 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4400
4401 IEM_MC_BEGIN(0, 0);
4402 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4403 IEM_MC_REL_JMP_S16(i16Imm);
4404 } IEM_MC_ELSE() {
4405 IEM_MC_ADVANCE_RIP();
4406 } IEM_MC_ENDIF();
4407 IEM_MC_END();
4408 }
4409 else
4410 {
4411 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4413
4414 IEM_MC_BEGIN(0, 0);
4415 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4416 IEM_MC_REL_JMP_S32(i32Imm);
4417 } IEM_MC_ELSE() {
4418 IEM_MC_ADVANCE_RIP();
4419 } IEM_MC_ENDIF();
4420 IEM_MC_END();
4421 }
4422 return VINF_SUCCESS;
4423}
4424
4425
4426/** Opcode 0x0f 0x8f. */
4427FNIEMOP_DEF(iemOp_jnle_Jv)
4428{
4429 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4430 IEMOP_HLP_MIN_386();
4431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4432 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4433 {
4434 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4436
4437 IEM_MC_BEGIN(0, 0);
4438 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4439 IEM_MC_ADVANCE_RIP();
4440 } IEM_MC_ELSE() {
4441 IEM_MC_REL_JMP_S16(i16Imm);
4442 } IEM_MC_ENDIF();
4443 IEM_MC_END();
4444 }
4445 else
4446 {
4447 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4449
4450 IEM_MC_BEGIN(0, 0);
4451 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4452 IEM_MC_ADVANCE_RIP();
4453 } IEM_MC_ELSE() {
4454 IEM_MC_REL_JMP_S32(i32Imm);
4455 } IEM_MC_ENDIF();
4456 IEM_MC_END();
4457 }
4458 return VINF_SUCCESS;
4459}
4460
4461
4462/** Opcode 0x0f 0x90. */
4463FNIEMOP_DEF(iemOp_seto_Eb)
4464{
4465 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4466 IEMOP_HLP_MIN_386();
4467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4468
4469 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4470 * any way. AMD says it's "unused", whatever that means. We're
4471 * ignoring for now. */
4472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4473 {
4474 /* register target */
4475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4476 IEM_MC_BEGIN(0, 0);
4477 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4478 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4479 } IEM_MC_ELSE() {
4480 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4481 } IEM_MC_ENDIF();
4482 IEM_MC_ADVANCE_RIP();
4483 IEM_MC_END();
4484 }
4485 else
4486 {
4487 /* memory target */
4488 IEM_MC_BEGIN(0, 1);
4489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4492 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4493 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4494 } IEM_MC_ELSE() {
4495 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4496 } IEM_MC_ENDIF();
4497 IEM_MC_ADVANCE_RIP();
4498 IEM_MC_END();
4499 }
4500 return VINF_SUCCESS;
4501}
4502
4503
4504/** Opcode 0x0f 0x91. */
4505FNIEMOP_DEF(iemOp_setno_Eb)
4506{
4507 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4508 IEMOP_HLP_MIN_386();
4509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4510
4511 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4512 * any way. AMD says it's "unused", whatever that means. We're
4513 * ignoring for now. */
4514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4515 {
4516 /* register target */
4517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4518 IEM_MC_BEGIN(0, 0);
4519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4520 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4521 } IEM_MC_ELSE() {
4522 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4523 } IEM_MC_ENDIF();
4524 IEM_MC_ADVANCE_RIP();
4525 IEM_MC_END();
4526 }
4527 else
4528 {
4529 /* memory target */
4530 IEM_MC_BEGIN(0, 1);
4531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4534 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4535 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4536 } IEM_MC_ELSE() {
4537 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4538 } IEM_MC_ENDIF();
4539 IEM_MC_ADVANCE_RIP();
4540 IEM_MC_END();
4541 }
4542 return VINF_SUCCESS;
4543}
4544
4545
4546/** Opcode 0x0f 0x92. */
4547FNIEMOP_DEF(iemOp_setc_Eb)
4548{
4549 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4550 IEMOP_HLP_MIN_386();
4551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4552
4553 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4554 * any way. AMD says it's "unused", whatever that means. We're
4555 * ignoring for now. */
4556 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4557 {
4558 /* register target */
4559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4560 IEM_MC_BEGIN(0, 0);
4561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4562 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4563 } IEM_MC_ELSE() {
4564 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4565 } IEM_MC_ENDIF();
4566 IEM_MC_ADVANCE_RIP();
4567 IEM_MC_END();
4568 }
4569 else
4570 {
4571 /* memory target */
4572 IEM_MC_BEGIN(0, 1);
4573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4576 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4577 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4578 } IEM_MC_ELSE() {
4579 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4580 } IEM_MC_ENDIF();
4581 IEM_MC_ADVANCE_RIP();
4582 IEM_MC_END();
4583 }
4584 return VINF_SUCCESS;
4585}
4586
4587
4588/** Opcode 0x0f 0x93. */
4589FNIEMOP_DEF(iemOp_setnc_Eb)
4590{
4591 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4592 IEMOP_HLP_MIN_386();
4593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4594
4595 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4596 * any way. AMD says it's "unused", whatever that means. We're
4597 * ignoring for now. */
4598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4599 {
4600 /* register target */
4601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4602 IEM_MC_BEGIN(0, 0);
4603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4604 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4605 } IEM_MC_ELSE() {
4606 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4607 } IEM_MC_ENDIF();
4608 IEM_MC_ADVANCE_RIP();
4609 IEM_MC_END();
4610 }
4611 else
4612 {
4613 /* memory target */
4614 IEM_MC_BEGIN(0, 1);
4615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4618 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4619 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4620 } IEM_MC_ELSE() {
4621 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4622 } IEM_MC_ENDIF();
4623 IEM_MC_ADVANCE_RIP();
4624 IEM_MC_END();
4625 }
4626 return VINF_SUCCESS;
4627}
4628
4629
4630/** Opcode 0x0f 0x94. */
4631FNIEMOP_DEF(iemOp_sete_Eb)
4632{
4633 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4634 IEMOP_HLP_MIN_386();
4635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4636
4637 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4638 * any way. AMD says it's "unused", whatever that means. We're
4639 * ignoring for now. */
4640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4641 {
4642 /* register target */
4643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4644 IEM_MC_BEGIN(0, 0);
4645 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4646 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4647 } IEM_MC_ELSE() {
4648 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4649 } IEM_MC_ENDIF();
4650 IEM_MC_ADVANCE_RIP();
4651 IEM_MC_END();
4652 }
4653 else
4654 {
4655 /* memory target */
4656 IEM_MC_BEGIN(0, 1);
4657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4660 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4661 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4662 } IEM_MC_ELSE() {
4663 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4664 } IEM_MC_ENDIF();
4665 IEM_MC_ADVANCE_RIP();
4666 IEM_MC_END();
4667 }
4668 return VINF_SUCCESS;
4669}
4670
4671
4672/** Opcode 0x0f 0x95. */
4673FNIEMOP_DEF(iemOp_setne_Eb)
4674{
4675 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4676 IEMOP_HLP_MIN_386();
4677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4678
4679 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4680 * any way. AMD says it's "unused", whatever that means. We're
4681 * ignoring for now. */
4682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4683 {
4684 /* register target */
4685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4686 IEM_MC_BEGIN(0, 0);
4687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4688 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4689 } IEM_MC_ELSE() {
4690 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4691 } IEM_MC_ENDIF();
4692 IEM_MC_ADVANCE_RIP();
4693 IEM_MC_END();
4694 }
4695 else
4696 {
4697 /* memory target */
4698 IEM_MC_BEGIN(0, 1);
4699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4703 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4704 } IEM_MC_ELSE() {
4705 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4706 } IEM_MC_ENDIF();
4707 IEM_MC_ADVANCE_RIP();
4708 IEM_MC_END();
4709 }
4710 return VINF_SUCCESS;
4711}
4712
4713
4714/** Opcode 0x0f 0x96. */
4715FNIEMOP_DEF(iemOp_setbe_Eb)
4716{
4717 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4718 IEMOP_HLP_MIN_386();
4719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4720
4721 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4722 * any way. AMD says it's "unused", whatever that means. We're
4723 * ignoring for now. */
4724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4725 {
4726 /* register target */
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728 IEM_MC_BEGIN(0, 0);
4729 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4730 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4731 } IEM_MC_ELSE() {
4732 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4733 } IEM_MC_ENDIF();
4734 IEM_MC_ADVANCE_RIP();
4735 IEM_MC_END();
4736 }
4737 else
4738 {
4739 /* memory target */
4740 IEM_MC_BEGIN(0, 1);
4741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4744 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4745 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4746 } IEM_MC_ELSE() {
4747 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4748 } IEM_MC_ENDIF();
4749 IEM_MC_ADVANCE_RIP();
4750 IEM_MC_END();
4751 }
4752 return VINF_SUCCESS;
4753}
4754
4755
4756/** Opcode 0x0f 0x97. */
4757FNIEMOP_DEF(iemOp_setnbe_Eb)
4758{
4759 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4760 IEMOP_HLP_MIN_386();
4761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4762
4763 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4764 * any way. AMD says it's "unused", whatever that means. We're
4765 * ignoring for now. */
4766 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4767 {
4768 /* register target */
4769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4770 IEM_MC_BEGIN(0, 0);
4771 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4772 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4773 } IEM_MC_ELSE() {
4774 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4775 } IEM_MC_ENDIF();
4776 IEM_MC_ADVANCE_RIP();
4777 IEM_MC_END();
4778 }
4779 else
4780 {
4781 /* memory target */
4782 IEM_MC_BEGIN(0, 1);
4783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4786 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4787 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4788 } IEM_MC_ELSE() {
4789 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4790 } IEM_MC_ENDIF();
4791 IEM_MC_ADVANCE_RIP();
4792 IEM_MC_END();
4793 }
4794 return VINF_SUCCESS;
4795}
4796
4797
4798/** Opcode 0x0f 0x98. */
4799FNIEMOP_DEF(iemOp_sets_Eb)
4800{
4801 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4802 IEMOP_HLP_MIN_386();
4803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4804
4805 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4806 * any way. AMD says it's "unused", whatever that means. We're
4807 * ignoring for now. */
4808 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4809 {
4810 /* register target */
4811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4812 IEM_MC_BEGIN(0, 0);
4813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4814 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4815 } IEM_MC_ELSE() {
4816 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4817 } IEM_MC_ENDIF();
4818 IEM_MC_ADVANCE_RIP();
4819 IEM_MC_END();
4820 }
4821 else
4822 {
4823 /* memory target */
4824 IEM_MC_BEGIN(0, 1);
4825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4829 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4830 } IEM_MC_ELSE() {
4831 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4832 } IEM_MC_ENDIF();
4833 IEM_MC_ADVANCE_RIP();
4834 IEM_MC_END();
4835 }
4836 return VINF_SUCCESS;
4837}
4838
4839
4840/** Opcode 0x0f 0x99. */
4841FNIEMOP_DEF(iemOp_setns_Eb)
4842{
4843 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4844 IEMOP_HLP_MIN_386();
4845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4846
4847 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4848 * any way. AMD says it's "unused", whatever that means. We're
4849 * ignoring for now. */
4850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4851 {
4852 /* register target */
4853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4854 IEM_MC_BEGIN(0, 0);
4855 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4856 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4857 } IEM_MC_ELSE() {
4858 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4859 } IEM_MC_ENDIF();
4860 IEM_MC_ADVANCE_RIP();
4861 IEM_MC_END();
4862 }
4863 else
4864 {
4865 /* memory target */
4866 IEM_MC_BEGIN(0, 1);
4867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4871 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4872 } IEM_MC_ELSE() {
4873 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4874 } IEM_MC_ENDIF();
4875 IEM_MC_ADVANCE_RIP();
4876 IEM_MC_END();
4877 }
4878 return VINF_SUCCESS;
4879}
4880
4881
4882/** Opcode 0x0f 0x9a. */
4883FNIEMOP_DEF(iemOp_setp_Eb)
4884{
4885 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4886 IEMOP_HLP_MIN_386();
4887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4888
4889 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4890 * any way. AMD says it's "unused", whatever that means. We're
4891 * ignoring for now. */
4892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4893 {
4894 /* register target */
4895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4896 IEM_MC_BEGIN(0, 0);
4897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4898 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4899 } IEM_MC_ELSE() {
4900 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4901 } IEM_MC_ENDIF();
4902 IEM_MC_ADVANCE_RIP();
4903 IEM_MC_END();
4904 }
4905 else
4906 {
4907 /* memory target */
4908 IEM_MC_BEGIN(0, 1);
4909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4912 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4913 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4914 } IEM_MC_ELSE() {
4915 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4916 } IEM_MC_ENDIF();
4917 IEM_MC_ADVANCE_RIP();
4918 IEM_MC_END();
4919 }
4920 return VINF_SUCCESS;
4921}
4922
4923
4924/** Opcode 0x0f 0x9b. */
4925FNIEMOP_DEF(iemOp_setnp_Eb)
4926{
4927 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4928 IEMOP_HLP_MIN_386();
4929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4930
4931 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4932 * any way. AMD says it's "unused", whatever that means. We're
4933 * ignoring for now. */
4934 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4935 {
4936 /* register target */
4937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4938 IEM_MC_BEGIN(0, 0);
4939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4940 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4941 } IEM_MC_ELSE() {
4942 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4943 } IEM_MC_ENDIF();
4944 IEM_MC_ADVANCE_RIP();
4945 IEM_MC_END();
4946 }
4947 else
4948 {
4949 /* memory target */
4950 IEM_MC_BEGIN(0, 1);
4951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4955 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4956 } IEM_MC_ELSE() {
4957 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4958 } IEM_MC_ENDIF();
4959 IEM_MC_ADVANCE_RIP();
4960 IEM_MC_END();
4961 }
4962 return VINF_SUCCESS;
4963}
4964
4965
4966/** Opcode 0x0f 0x9c. */
4967FNIEMOP_DEF(iemOp_setl_Eb)
4968{
4969 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4970 IEMOP_HLP_MIN_386();
4971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4972
4973 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4974 * any way. AMD says it's "unused", whatever that means. We're
4975 * ignoring for now. */
4976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4977 {
4978 /* register target */
4979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4980 IEM_MC_BEGIN(0, 0);
4981 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4982 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4983 } IEM_MC_ELSE() {
4984 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4985 } IEM_MC_ENDIF();
4986 IEM_MC_ADVANCE_RIP();
4987 IEM_MC_END();
4988 }
4989 else
4990 {
4991 /* memory target */
4992 IEM_MC_BEGIN(0, 1);
4993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4996 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4997 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4998 } IEM_MC_ELSE() {
4999 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5000 } IEM_MC_ENDIF();
5001 IEM_MC_ADVANCE_RIP();
5002 IEM_MC_END();
5003 }
5004 return VINF_SUCCESS;
5005}
5006
5007
5008/** Opcode 0x0f 0x9d. */
5009FNIEMOP_DEF(iemOp_setnl_Eb)
5010{
5011 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5012 IEMOP_HLP_MIN_386();
5013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5014
5015 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5016 * any way. AMD says it's "unused", whatever that means. We're
5017 * ignoring for now. */
5018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5019 {
5020 /* register target */
5021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5022 IEM_MC_BEGIN(0, 0);
5023 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5024 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5025 } IEM_MC_ELSE() {
5026 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5027 } IEM_MC_ENDIF();
5028 IEM_MC_ADVANCE_RIP();
5029 IEM_MC_END();
5030 }
5031 else
5032 {
5033 /* memory target */
5034 IEM_MC_BEGIN(0, 1);
5035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5038 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5039 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5040 } IEM_MC_ELSE() {
5041 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5042 } IEM_MC_ENDIF();
5043 IEM_MC_ADVANCE_RIP();
5044 IEM_MC_END();
5045 }
5046 return VINF_SUCCESS;
5047}
5048
5049
5050/** Opcode 0x0f 0x9e. */
5051FNIEMOP_DEF(iemOp_setle_Eb)
5052{
5053 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5054 IEMOP_HLP_MIN_386();
5055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5056
5057 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5058 * any way. AMD says it's "unused", whatever that means. We're
5059 * ignoring for now. */
5060 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5061 {
5062 /* register target */
5063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5064 IEM_MC_BEGIN(0, 0);
5065 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5066 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5067 } IEM_MC_ELSE() {
5068 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5069 } IEM_MC_ENDIF();
5070 IEM_MC_ADVANCE_RIP();
5071 IEM_MC_END();
5072 }
5073 else
5074 {
5075 /* memory target */
5076 IEM_MC_BEGIN(0, 1);
5077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5080 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5081 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5082 } IEM_MC_ELSE() {
5083 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5084 } IEM_MC_ENDIF();
5085 IEM_MC_ADVANCE_RIP();
5086 IEM_MC_END();
5087 }
5088 return VINF_SUCCESS;
5089}
5090
5091
5092/** Opcode 0x0f 0x9f. */
5093FNIEMOP_DEF(iemOp_setnle_Eb)
5094{
5095 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5096 IEMOP_HLP_MIN_386();
5097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5098
5099 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5100 * any way. AMD says it's "unused", whatever that means. We're
5101 * ignoring for now. */
5102 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5103 {
5104 /* register target */
5105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5106 IEM_MC_BEGIN(0, 0);
5107 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5108 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5109 } IEM_MC_ELSE() {
5110 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5111 } IEM_MC_ENDIF();
5112 IEM_MC_ADVANCE_RIP();
5113 IEM_MC_END();
5114 }
5115 else
5116 {
5117 /* memory target */
5118 IEM_MC_BEGIN(0, 1);
5119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5122 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5124 } IEM_MC_ELSE() {
5125 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5126 } IEM_MC_ENDIF();
5127 IEM_MC_ADVANCE_RIP();
5128 IEM_MC_END();
5129 }
5130 return VINF_SUCCESS;
5131}
5132
5133
5134/**
5135 * Common 'push segment-register' helper.
5136 */
5137FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5138{
5139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5140 if (iReg < X86_SREG_FS)
5141 IEMOP_HLP_NO_64BIT();
5142 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5143
5144 switch (pVCpu->iem.s.enmEffOpSize)
5145 {
5146 case IEMMODE_16BIT:
5147 IEM_MC_BEGIN(0, 1);
5148 IEM_MC_LOCAL(uint16_t, u16Value);
5149 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5150 IEM_MC_PUSH_U16(u16Value);
5151 IEM_MC_ADVANCE_RIP();
5152 IEM_MC_END();
5153 break;
5154
5155 case IEMMODE_32BIT:
5156 IEM_MC_BEGIN(0, 1);
5157 IEM_MC_LOCAL(uint32_t, u32Value);
5158 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5159 IEM_MC_PUSH_U32_SREG(u32Value);
5160 IEM_MC_ADVANCE_RIP();
5161 IEM_MC_END();
5162 break;
5163
5164 case IEMMODE_64BIT:
5165 IEM_MC_BEGIN(0, 1);
5166 IEM_MC_LOCAL(uint64_t, u64Value);
5167 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5168 IEM_MC_PUSH_U64(u64Value);
5169 IEM_MC_ADVANCE_RIP();
5170 IEM_MC_END();
5171 break;
5172 }
5173
5174 return VINF_SUCCESS;
5175}
5176
5177
5178/** Opcode 0x0f 0xa0. */
5179FNIEMOP_DEF(iemOp_push_fs)
5180{
5181 IEMOP_MNEMONIC(push_fs, "push fs");
5182 IEMOP_HLP_MIN_386();
5183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5184 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5185}
5186
5187
5188/** Opcode 0x0f 0xa1. */
5189FNIEMOP_DEF(iemOp_pop_fs)
5190{
5191 IEMOP_MNEMONIC(pop_fs, "pop fs");
5192 IEMOP_HLP_MIN_386();
5193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5194 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5195}
5196
5197
5198/** Opcode 0x0f 0xa2. */
5199FNIEMOP_DEF(iemOp_cpuid)
5200{
5201 IEMOP_MNEMONIC(cpuid, "cpuid");
5202 IEMOP_HLP_MIN_486(); /* not all 486es. */
5203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5204 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5205}
5206
5207
5208/**
5209 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5210 * iemOp_bts_Ev_Gv.
5211 */
5212FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5213{
5214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5216
5217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5218 {
5219 /* register destination. */
5220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5221 switch (pVCpu->iem.s.enmEffOpSize)
5222 {
5223 case IEMMODE_16BIT:
5224 IEM_MC_BEGIN(3, 0);
5225 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5226 IEM_MC_ARG(uint16_t, u16Src, 1);
5227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5228
5229 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5230 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5231 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5232 IEM_MC_REF_EFLAGS(pEFlags);
5233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5234
5235 IEM_MC_ADVANCE_RIP();
5236 IEM_MC_END();
5237 return VINF_SUCCESS;
5238
5239 case IEMMODE_32BIT:
5240 IEM_MC_BEGIN(3, 0);
5241 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5242 IEM_MC_ARG(uint32_t, u32Src, 1);
5243 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5244
5245 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5246 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5247 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5248 IEM_MC_REF_EFLAGS(pEFlags);
5249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5250
5251 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5252 IEM_MC_ADVANCE_RIP();
5253 IEM_MC_END();
5254 return VINF_SUCCESS;
5255
5256 case IEMMODE_64BIT:
5257 IEM_MC_BEGIN(3, 0);
5258 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5259 IEM_MC_ARG(uint64_t, u64Src, 1);
5260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5261
5262 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5263 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5264 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5265 IEM_MC_REF_EFLAGS(pEFlags);
5266 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5267
5268 IEM_MC_ADVANCE_RIP();
5269 IEM_MC_END();
5270 return VINF_SUCCESS;
5271
5272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5273 }
5274 }
5275 else
5276 {
5277 /* memory destination. */
5278
5279 uint32_t fAccess;
5280 if (pImpl->pfnLockedU16)
5281 fAccess = IEM_ACCESS_DATA_RW;
5282 else /* BT */
5283 fAccess = IEM_ACCESS_DATA_R;
5284
5285 /** @todo test negative bit offsets! */
5286 switch (pVCpu->iem.s.enmEffOpSize)
5287 {
5288 case IEMMODE_16BIT:
5289 IEM_MC_BEGIN(3, 2);
5290 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5291 IEM_MC_ARG(uint16_t, u16Src, 1);
5292 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5294 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5295
5296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5297 if (pImpl->pfnLockedU16)
5298 IEMOP_HLP_DONE_DECODING();
5299 else
5300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5301 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5302 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5303 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5304 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5305 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5306 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5307 IEM_MC_FETCH_EFLAGS(EFlags);
5308
5309 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5310 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5312 else
5313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5315
5316 IEM_MC_COMMIT_EFLAGS(EFlags);
5317 IEM_MC_ADVANCE_RIP();
5318 IEM_MC_END();
5319 return VINF_SUCCESS;
5320
5321 case IEMMODE_32BIT:
5322 IEM_MC_BEGIN(3, 2);
5323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5324 IEM_MC_ARG(uint32_t, u32Src, 1);
5325 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5327 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5328
5329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5330 if (pImpl->pfnLockedU16)
5331 IEMOP_HLP_DONE_DECODING();
5332 else
5333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5334 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5335 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5336 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5337 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5338 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5339 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5340 IEM_MC_FETCH_EFLAGS(EFlags);
5341
5342 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5343 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5345 else
5346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5347 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5348
5349 IEM_MC_COMMIT_EFLAGS(EFlags);
5350 IEM_MC_ADVANCE_RIP();
5351 IEM_MC_END();
5352 return VINF_SUCCESS;
5353
5354 case IEMMODE_64BIT:
5355 IEM_MC_BEGIN(3, 2);
5356 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5357 IEM_MC_ARG(uint64_t, u64Src, 1);
5358 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5360 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5361
5362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5363 if (pImpl->pfnLockedU16)
5364 IEMOP_HLP_DONE_DECODING();
5365 else
5366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5367 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5368 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5369 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5370 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5371 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5372 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5373 IEM_MC_FETCH_EFLAGS(EFlags);
5374
5375 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5376 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5377 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5378 else
5379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5380 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5381
5382 IEM_MC_COMMIT_EFLAGS(EFlags);
5383 IEM_MC_ADVANCE_RIP();
5384 IEM_MC_END();
5385 return VINF_SUCCESS;
5386
5387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5388 }
5389 }
5390}
5391
5392
5393/** Opcode 0x0f 0xa3. */
5394FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5395{
5396 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5397 IEMOP_HLP_MIN_386();
5398 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5399}
5400
5401
5402/**
5403 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5404 */
5405FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5406{
5407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5408 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5409
5410 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5411 {
5412 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5414
5415 switch (pVCpu->iem.s.enmEffOpSize)
5416 {
5417 case IEMMODE_16BIT:
5418 IEM_MC_BEGIN(4, 0);
5419 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5420 IEM_MC_ARG(uint16_t, u16Src, 1);
5421 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5422 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5423
5424 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5425 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5426 IEM_MC_REF_EFLAGS(pEFlags);
5427 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5428
5429 IEM_MC_ADVANCE_RIP();
5430 IEM_MC_END();
5431 return VINF_SUCCESS;
5432
5433 case IEMMODE_32BIT:
5434 IEM_MC_BEGIN(4, 0);
5435 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5436 IEM_MC_ARG(uint32_t, u32Src, 1);
5437 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5438 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5439
5440 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5441 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5442 IEM_MC_REF_EFLAGS(pEFlags);
5443 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5444
5445 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5446 IEM_MC_ADVANCE_RIP();
5447 IEM_MC_END();
5448 return VINF_SUCCESS;
5449
5450 case IEMMODE_64BIT:
5451 IEM_MC_BEGIN(4, 0);
5452 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5453 IEM_MC_ARG(uint64_t, u64Src, 1);
5454 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5455 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5456
5457 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5458 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5459 IEM_MC_REF_EFLAGS(pEFlags);
5460 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5461
5462 IEM_MC_ADVANCE_RIP();
5463 IEM_MC_END();
5464 return VINF_SUCCESS;
5465
5466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5467 }
5468 }
5469 else
5470 {
5471 switch (pVCpu->iem.s.enmEffOpSize)
5472 {
5473 case IEMMODE_16BIT:
5474 IEM_MC_BEGIN(4, 2);
5475 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5476 IEM_MC_ARG(uint16_t, u16Src, 1);
5477 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5478 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5480
5481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5482 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5483 IEM_MC_ASSIGN(cShiftArg, cShift);
5484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5485 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5486 IEM_MC_FETCH_EFLAGS(EFlags);
5487 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5488 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5489
5490 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5491 IEM_MC_COMMIT_EFLAGS(EFlags);
5492 IEM_MC_ADVANCE_RIP();
5493 IEM_MC_END();
5494 return VINF_SUCCESS;
5495
5496 case IEMMODE_32BIT:
5497 IEM_MC_BEGIN(4, 2);
5498 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5499 IEM_MC_ARG(uint32_t, u32Src, 1);
5500 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5501 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5503
5504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5505 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5506 IEM_MC_ASSIGN(cShiftArg, cShift);
5507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5508 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5509 IEM_MC_FETCH_EFLAGS(EFlags);
5510 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5511 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5512
5513 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5514 IEM_MC_COMMIT_EFLAGS(EFlags);
5515 IEM_MC_ADVANCE_RIP();
5516 IEM_MC_END();
5517 return VINF_SUCCESS;
5518
5519 case IEMMODE_64BIT:
5520 IEM_MC_BEGIN(4, 2);
5521 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5522 IEM_MC_ARG(uint64_t, u64Src, 1);
5523 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5524 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5526
5527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5528 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5529 IEM_MC_ASSIGN(cShiftArg, cShift);
5530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5531 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5532 IEM_MC_FETCH_EFLAGS(EFlags);
5533 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5534 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5535
5536 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5537 IEM_MC_COMMIT_EFLAGS(EFlags);
5538 IEM_MC_ADVANCE_RIP();
5539 IEM_MC_END();
5540 return VINF_SUCCESS;
5541
5542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5543 }
5544 }
5545}
5546
5547
5548/**
5549 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5550 */
5551FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5552{
5553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5554 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5555
5556 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5557 {
5558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5559
5560 switch (pVCpu->iem.s.enmEffOpSize)
5561 {
5562 case IEMMODE_16BIT:
5563 IEM_MC_BEGIN(4, 0);
5564 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5565 IEM_MC_ARG(uint16_t, u16Src, 1);
5566 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5567 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5568
5569 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5570 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5571 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5572 IEM_MC_REF_EFLAGS(pEFlags);
5573 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5574
5575 IEM_MC_ADVANCE_RIP();
5576 IEM_MC_END();
5577 return VINF_SUCCESS;
5578
5579 case IEMMODE_32BIT:
5580 IEM_MC_BEGIN(4, 0);
5581 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5582 IEM_MC_ARG(uint32_t, u32Src, 1);
5583 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5584 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5585
5586 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5587 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5588 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5589 IEM_MC_REF_EFLAGS(pEFlags);
5590 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5591
5592 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5593 IEM_MC_ADVANCE_RIP();
5594 IEM_MC_END();
5595 return VINF_SUCCESS;
5596
5597 case IEMMODE_64BIT:
5598 IEM_MC_BEGIN(4, 0);
5599 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5600 IEM_MC_ARG(uint64_t, u64Src, 1);
5601 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5602 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5603
5604 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5605 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5606 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5607 IEM_MC_REF_EFLAGS(pEFlags);
5608 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5609
5610 IEM_MC_ADVANCE_RIP();
5611 IEM_MC_END();
5612 return VINF_SUCCESS;
5613
5614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5615 }
5616 }
5617 else
5618 {
5619 switch (pVCpu->iem.s.enmEffOpSize)
5620 {
5621 case IEMMODE_16BIT:
5622 IEM_MC_BEGIN(4, 2);
5623 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5624 IEM_MC_ARG(uint16_t, u16Src, 1);
5625 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5626 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5628
5629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5631 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5632 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5633 IEM_MC_FETCH_EFLAGS(EFlags);
5634 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5635 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5636
5637 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5638 IEM_MC_COMMIT_EFLAGS(EFlags);
5639 IEM_MC_ADVANCE_RIP();
5640 IEM_MC_END();
5641 return VINF_SUCCESS;
5642
5643 case IEMMODE_32BIT:
5644 IEM_MC_BEGIN(4, 2);
5645 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5646 IEM_MC_ARG(uint32_t, u32Src, 1);
5647 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5648 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5650
5651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5654 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5655 IEM_MC_FETCH_EFLAGS(EFlags);
5656 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5657 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5658
5659 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5660 IEM_MC_COMMIT_EFLAGS(EFlags);
5661 IEM_MC_ADVANCE_RIP();
5662 IEM_MC_END();
5663 return VINF_SUCCESS;
5664
5665 case IEMMODE_64BIT:
5666 IEM_MC_BEGIN(4, 2);
5667 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5668 IEM_MC_ARG(uint64_t, u64Src, 1);
5669 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5670 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5672
5673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5675 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5676 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5677 IEM_MC_FETCH_EFLAGS(EFlags);
5678 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5679 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5680
5681 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5682 IEM_MC_COMMIT_EFLAGS(EFlags);
5683 IEM_MC_ADVANCE_RIP();
5684 IEM_MC_END();
5685 return VINF_SUCCESS;
5686
5687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5688 }
5689 }
5690}
5691
5692
5693
5694/** Opcode 0x0f 0xa4. */
5695FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5696{
5697 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5698 IEMOP_HLP_MIN_386();
5699 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5700}
5701
5702
5703/** Opcode 0x0f 0xa5. */
5704FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5705{
5706 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5707 IEMOP_HLP_MIN_386();
5708 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5709}
5710
5711
5712/** Opcode 0x0f 0xa8. */
5713FNIEMOP_DEF(iemOp_push_gs)
5714{
5715 IEMOP_MNEMONIC(push_gs, "push gs");
5716 IEMOP_HLP_MIN_386();
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5719}
5720
5721
5722/** Opcode 0x0f 0xa9. */
5723FNIEMOP_DEF(iemOp_pop_gs)
5724{
5725 IEMOP_MNEMONIC(pop_gs, "pop gs");
5726 IEMOP_HLP_MIN_386();
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5729}
5730
5731
5732/** Opcode 0x0f 0xaa. */
5733FNIEMOP_STUB(iemOp_rsm);
5734//IEMOP_HLP_MIN_386();
5735
5736
5737/** Opcode 0x0f 0xab. */
5738FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5739{
5740 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5741 IEMOP_HLP_MIN_386();
5742 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5743}
5744
5745
5746/** Opcode 0x0f 0xac. */
5747FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5748{
5749 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5750 IEMOP_HLP_MIN_386();
5751 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5752}
5753
5754
5755/** Opcode 0x0f 0xad. */
5756FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5757{
5758 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5759 IEMOP_HLP_MIN_386();
5760 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5761}
5762
5763
5764/** Opcode 0x0f 0xae mem/0. */
5765FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5766{
5767 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5768 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5769 return IEMOP_RAISE_INVALID_OPCODE();
5770
5771 IEM_MC_BEGIN(3, 1);
5772 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5773 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5774 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5777 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5778 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5779 IEM_MC_END();
5780 return VINF_SUCCESS;
5781}
5782
5783
5784/** Opcode 0x0f 0xae mem/1. */
5785FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5786{
5787 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5788 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5789 return IEMOP_RAISE_INVALID_OPCODE();
5790
5791 IEM_MC_BEGIN(3, 1);
5792 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5793 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5794 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5798 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5799 IEM_MC_END();
5800 return VINF_SUCCESS;
5801}
5802
5803
5804/** Opcode 0x0f 0xae mem/2. */
5805FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5806
5807/** Opcode 0x0f 0xae mem/3. */
5808FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5809
5810/** Opcode 0x0f 0xae mem/4. */
5811FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5812
5813/** Opcode 0x0f 0xae mem/5. */
5814FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5815
5816/** Opcode 0x0f 0xae mem/6. */
5817FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5818
5819/** Opcode 0x0f 0xae mem/7. */
5820FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5821
5822
5823/** Opcode 0x0f 0xae 11b/5. */
5824FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5825{
5826 RT_NOREF_PV(bRm);
5827 IEMOP_MNEMONIC(lfence, "lfence");
5828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5829 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5830 return IEMOP_RAISE_INVALID_OPCODE();
5831
5832 IEM_MC_BEGIN(0, 0);
5833 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5834 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5835 else
5836 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5837 IEM_MC_ADVANCE_RIP();
5838 IEM_MC_END();
5839 return VINF_SUCCESS;
5840}
5841
5842
5843/** Opcode 0x0f 0xae 11b/6. */
5844FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5845{
5846 RT_NOREF_PV(bRm);
5847 IEMOP_MNEMONIC(mfence, "mfence");
5848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5849 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5850 return IEMOP_RAISE_INVALID_OPCODE();
5851
5852 IEM_MC_BEGIN(0, 0);
5853 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5854 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5855 else
5856 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5857 IEM_MC_ADVANCE_RIP();
5858 IEM_MC_END();
5859 return VINF_SUCCESS;
5860}
5861
5862
5863/** Opcode 0x0f 0xae 11b/7. */
5864FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5865{
5866 RT_NOREF_PV(bRm);
5867 IEMOP_MNEMONIC(sfence, "sfence");
5868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5869 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5870 return IEMOP_RAISE_INVALID_OPCODE();
5871
5872 IEM_MC_BEGIN(0, 0);
5873 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5874 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5875 else
5876 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5877 IEM_MC_ADVANCE_RIP();
5878 IEM_MC_END();
5879 return VINF_SUCCESS;
5880}
5881
5882
5883/** Opcode 0xf3 0x0f 0xae 11b/0. */
5884FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5885
5886/** Opcode 0xf3 0x0f 0xae 11b/1. */
5887FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5888
5889/** Opcode 0xf3 0x0f 0xae 11b/2. */
5890FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5891
5892/** Opcode 0xf3 0x0f 0xae 11b/3. */
5893FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5894
5895
5896/** Opcode 0x0f 0xae. */
5897FNIEMOP_DEF(iemOp_Grp15)
5898{
5899 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5901 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5902 {
5903 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5904 {
5905 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5906 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5907 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5908 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5909 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5910 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5911 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5912 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5914 }
5915 }
5916 else
5917 {
5918 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5919 {
5920 case 0:
5921 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5922 {
5923 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5924 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5925 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5926 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5927 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5928 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5929 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5930 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5932 }
5933 break;
5934
5935 case IEM_OP_PRF_REPZ:
5936 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5937 {
5938 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5939 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5940 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5941 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5942 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5943 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5944 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5945 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5947 }
5948 break;
5949
5950 default:
5951 return IEMOP_RAISE_INVALID_OPCODE();
5952 }
5953 }
5954}
5955
5956
5957/** Opcode 0x0f 0xaf. */
5958FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5959{
5960 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5961 IEMOP_HLP_MIN_386();
5962 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5963 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5964}
5965
5966
5967/** Opcode 0x0f 0xb0. */
5968FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5969{
5970 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5971 IEMOP_HLP_MIN_486();
5972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5973
5974 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5975 {
5976 IEMOP_HLP_DONE_DECODING();
5977 IEM_MC_BEGIN(4, 0);
5978 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5979 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5980 IEM_MC_ARG(uint8_t, u8Src, 2);
5981 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5982
5983 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5984 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5985 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5986 IEM_MC_REF_EFLAGS(pEFlags);
5987 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5988 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5989 else
5990 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5991
5992 IEM_MC_ADVANCE_RIP();
5993 IEM_MC_END();
5994 }
5995 else
5996 {
5997 IEM_MC_BEGIN(4, 3);
5998 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5999 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6000 IEM_MC_ARG(uint8_t, u8Src, 2);
6001 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6003 IEM_MC_LOCAL(uint8_t, u8Al);
6004
6005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6006 IEMOP_HLP_DONE_DECODING();
6007 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6008 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6009 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6010 IEM_MC_FETCH_EFLAGS(EFlags);
6011 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6012 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6013 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6014 else
6015 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6016
6017 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6018 IEM_MC_COMMIT_EFLAGS(EFlags);
6019 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6020 IEM_MC_ADVANCE_RIP();
6021 IEM_MC_END();
6022 }
6023 return VINF_SUCCESS;
6024}
6025
6026/** Opcode 0x0f 0xb1. */
6027FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6028{
6029 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6030 IEMOP_HLP_MIN_486();
6031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6032
6033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6034 {
6035 IEMOP_HLP_DONE_DECODING();
6036 switch (pVCpu->iem.s.enmEffOpSize)
6037 {
6038 case IEMMODE_16BIT:
6039 IEM_MC_BEGIN(4, 0);
6040 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6041 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6042 IEM_MC_ARG(uint16_t, u16Src, 2);
6043 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6044
6045 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6046 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6047 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6048 IEM_MC_REF_EFLAGS(pEFlags);
6049 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6050 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6051 else
6052 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6053
6054 IEM_MC_ADVANCE_RIP();
6055 IEM_MC_END();
6056 return VINF_SUCCESS;
6057
6058 case IEMMODE_32BIT:
6059 IEM_MC_BEGIN(4, 0);
6060 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6061 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6062 IEM_MC_ARG(uint32_t, u32Src, 2);
6063 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6064
6065 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6066 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6067 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6068 IEM_MC_REF_EFLAGS(pEFlags);
6069 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6070 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6071 else
6072 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6073
6074 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6075 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6076 IEM_MC_ADVANCE_RIP();
6077 IEM_MC_END();
6078 return VINF_SUCCESS;
6079
6080 case IEMMODE_64BIT:
6081 IEM_MC_BEGIN(4, 0);
6082 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6083 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6084#ifdef RT_ARCH_X86
6085 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6086#else
6087 IEM_MC_ARG(uint64_t, u64Src, 2);
6088#endif
6089 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6090
6091 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6092 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6093 IEM_MC_REF_EFLAGS(pEFlags);
6094#ifdef RT_ARCH_X86
6095 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6096 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6097 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6098 else
6099 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6100#else
6101 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6102 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6103 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6104 else
6105 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6106#endif
6107
6108 IEM_MC_ADVANCE_RIP();
6109 IEM_MC_END();
6110 return VINF_SUCCESS;
6111
6112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6113 }
6114 }
6115 else
6116 {
6117 switch (pVCpu->iem.s.enmEffOpSize)
6118 {
6119 case IEMMODE_16BIT:
6120 IEM_MC_BEGIN(4, 3);
6121 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6122 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6123 IEM_MC_ARG(uint16_t, u16Src, 2);
6124 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6126 IEM_MC_LOCAL(uint16_t, u16Ax);
6127
6128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6129 IEMOP_HLP_DONE_DECODING();
6130 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6131 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6132 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6133 IEM_MC_FETCH_EFLAGS(EFlags);
6134 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6135 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6136 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6137 else
6138 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6139
6140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6141 IEM_MC_COMMIT_EFLAGS(EFlags);
6142 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6143 IEM_MC_ADVANCE_RIP();
6144 IEM_MC_END();
6145 return VINF_SUCCESS;
6146
6147 case IEMMODE_32BIT:
6148 IEM_MC_BEGIN(4, 3);
6149 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6150 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6151 IEM_MC_ARG(uint32_t, u32Src, 2);
6152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6154 IEM_MC_LOCAL(uint32_t, u32Eax);
6155
6156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6157 IEMOP_HLP_DONE_DECODING();
6158 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6159 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6160 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6161 IEM_MC_FETCH_EFLAGS(EFlags);
6162 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6163 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6164 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6165 else
6166 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6167
6168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6169 IEM_MC_COMMIT_EFLAGS(EFlags);
6170 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6171 IEM_MC_ADVANCE_RIP();
6172 IEM_MC_END();
6173 return VINF_SUCCESS;
6174
6175 case IEMMODE_64BIT:
6176 IEM_MC_BEGIN(4, 3);
6177 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6178 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6179#ifdef RT_ARCH_X86
6180 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6181#else
6182 IEM_MC_ARG(uint64_t, u64Src, 2);
6183#endif
6184 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6186 IEM_MC_LOCAL(uint64_t, u64Rax);
6187
6188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6189 IEMOP_HLP_DONE_DECODING();
6190 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6191 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6192 IEM_MC_FETCH_EFLAGS(EFlags);
6193 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6194#ifdef RT_ARCH_X86
6195 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6196 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6197 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6198 else
6199 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6200#else
6201 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6202 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6203 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6204 else
6205 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6206#endif
6207
6208 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6209 IEM_MC_COMMIT_EFLAGS(EFlags);
6210 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6211 IEM_MC_ADVANCE_RIP();
6212 IEM_MC_END();
6213 return VINF_SUCCESS;
6214
6215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6216 }
6217 }
6218}
6219
6220
6221FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6222{
6223 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6224 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6225
6226 switch (pVCpu->iem.s.enmEffOpSize)
6227 {
6228 case IEMMODE_16BIT:
6229 IEM_MC_BEGIN(5, 1);
6230 IEM_MC_ARG(uint16_t, uSel, 0);
6231 IEM_MC_ARG(uint16_t, offSeg, 1);
6232 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6233 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6234 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6235 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6239 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6240 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6241 IEM_MC_END();
6242 return VINF_SUCCESS;
6243
6244 case IEMMODE_32BIT:
6245 IEM_MC_BEGIN(5, 1);
6246 IEM_MC_ARG(uint16_t, uSel, 0);
6247 IEM_MC_ARG(uint32_t, offSeg, 1);
6248 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6249 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6250 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6251 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6254 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6255 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6256 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6257 IEM_MC_END();
6258 return VINF_SUCCESS;
6259
6260 case IEMMODE_64BIT:
6261 IEM_MC_BEGIN(5, 1);
6262 IEM_MC_ARG(uint16_t, uSel, 0);
6263 IEM_MC_ARG(uint64_t, offSeg, 1);
6264 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6265 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6266 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6267 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6270 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6271 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6272 else
6273 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6274 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6275 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6276 IEM_MC_END();
6277 return VINF_SUCCESS;
6278
6279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6280 }
6281}
6282
6283
6284/** Opcode 0x0f 0xb2. */
6285FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6286{
6287 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6288 IEMOP_HLP_MIN_386();
6289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6291 return IEMOP_RAISE_INVALID_OPCODE();
6292 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6293}
6294
6295
6296/** Opcode 0x0f 0xb3. */
6297FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6298{
6299 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6300 IEMOP_HLP_MIN_386();
6301 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6302}
6303
6304
6305/** Opcode 0x0f 0xb4. */
6306FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6307{
6308 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6309 IEMOP_HLP_MIN_386();
6310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6311 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6312 return IEMOP_RAISE_INVALID_OPCODE();
6313 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6314}
6315
6316
6317/** Opcode 0x0f 0xb5. */
6318FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6319{
6320 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6321 IEMOP_HLP_MIN_386();
6322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6323 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6324 return IEMOP_RAISE_INVALID_OPCODE();
6325 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6326}
6327
6328
6329/** Opcode 0x0f 0xb6. */
6330FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6331{
6332 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6333 IEMOP_HLP_MIN_386();
6334
6335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6336
6337 /*
6338 * If rm is denoting a register, no more instruction bytes.
6339 */
6340 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6341 {
6342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6343 switch (pVCpu->iem.s.enmEffOpSize)
6344 {
6345 case IEMMODE_16BIT:
6346 IEM_MC_BEGIN(0, 1);
6347 IEM_MC_LOCAL(uint16_t, u16Value);
6348 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6349 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6350 IEM_MC_ADVANCE_RIP();
6351 IEM_MC_END();
6352 return VINF_SUCCESS;
6353
6354 case IEMMODE_32BIT:
6355 IEM_MC_BEGIN(0, 1);
6356 IEM_MC_LOCAL(uint32_t, u32Value);
6357 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6358 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6359 IEM_MC_ADVANCE_RIP();
6360 IEM_MC_END();
6361 return VINF_SUCCESS;
6362
6363 case IEMMODE_64BIT:
6364 IEM_MC_BEGIN(0, 1);
6365 IEM_MC_LOCAL(uint64_t, u64Value);
6366 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6367 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6368 IEM_MC_ADVANCE_RIP();
6369 IEM_MC_END();
6370 return VINF_SUCCESS;
6371
6372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6373 }
6374 }
6375 else
6376 {
6377 /*
6378 * We're loading a register from memory.
6379 */
6380 switch (pVCpu->iem.s.enmEffOpSize)
6381 {
6382 case IEMMODE_16BIT:
6383 IEM_MC_BEGIN(0, 2);
6384 IEM_MC_LOCAL(uint16_t, u16Value);
6385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6388 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6389 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6390 IEM_MC_ADVANCE_RIP();
6391 IEM_MC_END();
6392 return VINF_SUCCESS;
6393
6394 case IEMMODE_32BIT:
6395 IEM_MC_BEGIN(0, 2);
6396 IEM_MC_LOCAL(uint32_t, u32Value);
6397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6401 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6402 IEM_MC_ADVANCE_RIP();
6403 IEM_MC_END();
6404 return VINF_SUCCESS;
6405
6406 case IEMMODE_64BIT:
6407 IEM_MC_BEGIN(0, 2);
6408 IEM_MC_LOCAL(uint64_t, u64Value);
6409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6412 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6413 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6414 IEM_MC_ADVANCE_RIP();
6415 IEM_MC_END();
6416 return VINF_SUCCESS;
6417
6418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6419 }
6420 }
6421}
6422
6423
6424/** Opcode 0x0f 0xb7. */
6425FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6426{
6427 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6428 IEMOP_HLP_MIN_386();
6429
6430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6431
6432 /** @todo Not entirely sure how the operand size prefix is handled here,
6433 * assuming that it will be ignored. Would be nice to have a few
6434 * test for this. */
6435 /*
6436 * If rm is denoting a register, no more instruction bytes.
6437 */
6438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6439 {
6440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6441 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6442 {
6443 IEM_MC_BEGIN(0, 1);
6444 IEM_MC_LOCAL(uint32_t, u32Value);
6445 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6446 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6447 IEM_MC_ADVANCE_RIP();
6448 IEM_MC_END();
6449 }
6450 else
6451 {
6452 IEM_MC_BEGIN(0, 1);
6453 IEM_MC_LOCAL(uint64_t, u64Value);
6454 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6455 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6456 IEM_MC_ADVANCE_RIP();
6457 IEM_MC_END();
6458 }
6459 }
6460 else
6461 {
6462 /*
6463 * We're loading a register from memory.
6464 */
6465 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6466 {
6467 IEM_MC_BEGIN(0, 2);
6468 IEM_MC_LOCAL(uint32_t, u32Value);
6469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6472 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6473 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6474 IEM_MC_ADVANCE_RIP();
6475 IEM_MC_END();
6476 }
6477 else
6478 {
6479 IEM_MC_BEGIN(0, 2);
6480 IEM_MC_LOCAL(uint64_t, u64Value);
6481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6484 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6485 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6486 IEM_MC_ADVANCE_RIP();
6487 IEM_MC_END();
6488 }
6489 }
6490 return VINF_SUCCESS;
6491}
6492
6493
6494/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6495FNIEMOP_UD_STUB(iemOp_jmpe);
6496/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6497FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6498
6499
6500/** Opcode 0x0f 0xb9. */
6501FNIEMOP_DEF(iemOp_Grp10)
6502{
6503 Log(("iemOp_Grp10 -> #UD\n"));
6504 return IEMOP_RAISE_INVALID_OPCODE();
6505}
6506
6507
6508/** Opcode 0x0f 0xba. */
6509FNIEMOP_DEF(iemOp_Grp8)
6510{
6511 IEMOP_HLP_MIN_386();
6512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6513 PCIEMOPBINSIZES pImpl;
6514 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6515 {
6516 case 0: case 1: case 2: case 3:
6517 return IEMOP_RAISE_INVALID_OPCODE();
6518 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6519 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6520 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6521 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6523 }
6524 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6525
6526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6527 {
6528 /* register destination. */
6529 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6531
6532 switch (pVCpu->iem.s.enmEffOpSize)
6533 {
6534 case IEMMODE_16BIT:
6535 IEM_MC_BEGIN(3, 0);
6536 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6537 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6538 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6539
6540 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6541 IEM_MC_REF_EFLAGS(pEFlags);
6542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6543
6544 IEM_MC_ADVANCE_RIP();
6545 IEM_MC_END();
6546 return VINF_SUCCESS;
6547
6548 case IEMMODE_32BIT:
6549 IEM_MC_BEGIN(3, 0);
6550 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6551 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6552 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6553
6554 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6555 IEM_MC_REF_EFLAGS(pEFlags);
6556 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6557
6558 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6559 IEM_MC_ADVANCE_RIP();
6560 IEM_MC_END();
6561 return VINF_SUCCESS;
6562
6563 case IEMMODE_64BIT:
6564 IEM_MC_BEGIN(3, 0);
6565 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6566 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6567 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6568
6569 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6570 IEM_MC_REF_EFLAGS(pEFlags);
6571 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6572
6573 IEM_MC_ADVANCE_RIP();
6574 IEM_MC_END();
6575 return VINF_SUCCESS;
6576
6577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6578 }
6579 }
6580 else
6581 {
6582 /* memory destination. */
6583
6584 uint32_t fAccess;
6585 if (pImpl->pfnLockedU16)
6586 fAccess = IEM_ACCESS_DATA_RW;
6587 else /* BT */
6588 fAccess = IEM_ACCESS_DATA_R;
6589
6590 /** @todo test negative bit offsets! */
6591 switch (pVCpu->iem.s.enmEffOpSize)
6592 {
6593 case IEMMODE_16BIT:
6594 IEM_MC_BEGIN(3, 1);
6595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6596 IEM_MC_ARG(uint16_t, u16Src, 1);
6597 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6599
6600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6601 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6602 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6603 if (pImpl->pfnLockedU16)
6604 IEMOP_HLP_DONE_DECODING();
6605 else
6606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6607 IEM_MC_FETCH_EFLAGS(EFlags);
6608 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6609 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6611 else
6612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6613 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6614
6615 IEM_MC_COMMIT_EFLAGS(EFlags);
6616 IEM_MC_ADVANCE_RIP();
6617 IEM_MC_END();
6618 return VINF_SUCCESS;
6619
6620 case IEMMODE_32BIT:
6621 IEM_MC_BEGIN(3, 1);
6622 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6623 IEM_MC_ARG(uint32_t, u32Src, 1);
6624 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6626
6627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6628 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6629 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6630 if (pImpl->pfnLockedU16)
6631 IEMOP_HLP_DONE_DECODING();
6632 else
6633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6634 IEM_MC_FETCH_EFLAGS(EFlags);
6635 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6636 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6637 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6638 else
6639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6640 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6641
6642 IEM_MC_COMMIT_EFLAGS(EFlags);
6643 IEM_MC_ADVANCE_RIP();
6644 IEM_MC_END();
6645 return VINF_SUCCESS;
6646
6647 case IEMMODE_64BIT:
6648 IEM_MC_BEGIN(3, 1);
6649 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6650 IEM_MC_ARG(uint64_t, u64Src, 1);
6651 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6653
6654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6655 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6656 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6657 if (pImpl->pfnLockedU16)
6658 IEMOP_HLP_DONE_DECODING();
6659 else
6660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6661 IEM_MC_FETCH_EFLAGS(EFlags);
6662 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6663 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6664 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6665 else
6666 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6667 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6668
6669 IEM_MC_COMMIT_EFLAGS(EFlags);
6670 IEM_MC_ADVANCE_RIP();
6671 IEM_MC_END();
6672 return VINF_SUCCESS;
6673
6674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6675 }
6676 }
6677
6678}
6679
6680
6681/** Opcode 0x0f 0xbb. */
6682FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6683{
6684 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6685 IEMOP_HLP_MIN_386();
6686 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6687}
6688
6689
6690/** Opcode 0x0f 0xbc. */
6691FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6692{
6693 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6694 IEMOP_HLP_MIN_386();
6695 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6696 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6697}
6698
6699
6700/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6701FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6702
6703
6704/** Opcode 0x0f 0xbd. */
6705FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6706{
6707 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6708 IEMOP_HLP_MIN_386();
6709 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6710 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6711}
6712
6713
6714/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6715FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6716
6717
6718/** Opcode 0x0f 0xbe. */
6719FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6720{
6721 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6722 IEMOP_HLP_MIN_386();
6723
6724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6725
6726 /*
6727 * If rm is denoting a register, no more instruction bytes.
6728 */
6729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6730 {
6731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6732 switch (pVCpu->iem.s.enmEffOpSize)
6733 {
6734 case IEMMODE_16BIT:
6735 IEM_MC_BEGIN(0, 1);
6736 IEM_MC_LOCAL(uint16_t, u16Value);
6737 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6738 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 return VINF_SUCCESS;
6742
6743 case IEMMODE_32BIT:
6744 IEM_MC_BEGIN(0, 1);
6745 IEM_MC_LOCAL(uint32_t, u32Value);
6746 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6747 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6748 IEM_MC_ADVANCE_RIP();
6749 IEM_MC_END();
6750 return VINF_SUCCESS;
6751
6752 case IEMMODE_64BIT:
6753 IEM_MC_BEGIN(0, 1);
6754 IEM_MC_LOCAL(uint64_t, u64Value);
6755 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6756 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6757 IEM_MC_ADVANCE_RIP();
6758 IEM_MC_END();
6759 return VINF_SUCCESS;
6760
6761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6762 }
6763 }
6764 else
6765 {
6766 /*
6767 * We're loading a register from memory.
6768 */
6769 switch (pVCpu->iem.s.enmEffOpSize)
6770 {
6771 case IEMMODE_16BIT:
6772 IEM_MC_BEGIN(0, 2);
6773 IEM_MC_LOCAL(uint16_t, u16Value);
6774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6777 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6778 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6779 IEM_MC_ADVANCE_RIP();
6780 IEM_MC_END();
6781 return VINF_SUCCESS;
6782
6783 case IEMMODE_32BIT:
6784 IEM_MC_BEGIN(0, 2);
6785 IEM_MC_LOCAL(uint32_t, u32Value);
6786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6789 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6790 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6791 IEM_MC_ADVANCE_RIP();
6792 IEM_MC_END();
6793 return VINF_SUCCESS;
6794
6795 case IEMMODE_64BIT:
6796 IEM_MC_BEGIN(0, 2);
6797 IEM_MC_LOCAL(uint64_t, u64Value);
6798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6801 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6802 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6803 IEM_MC_ADVANCE_RIP();
6804 IEM_MC_END();
6805 return VINF_SUCCESS;
6806
6807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6808 }
6809 }
6810}
6811
6812
6813/** Opcode 0x0f 0xbf. */
6814FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6815{
6816 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6817 IEMOP_HLP_MIN_386();
6818
6819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6820
6821 /** @todo Not entirely sure how the operand size prefix is handled here,
6822 * assuming that it will be ignored. Would be nice to have a few
6823 * test for this. */
6824 /*
6825 * If rm is denoting a register, no more instruction bytes.
6826 */
6827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6828 {
6829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6830 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6831 {
6832 IEM_MC_BEGIN(0, 1);
6833 IEM_MC_LOCAL(uint32_t, u32Value);
6834 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6835 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6836 IEM_MC_ADVANCE_RIP();
6837 IEM_MC_END();
6838 }
6839 else
6840 {
6841 IEM_MC_BEGIN(0, 1);
6842 IEM_MC_LOCAL(uint64_t, u64Value);
6843 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6844 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6845 IEM_MC_ADVANCE_RIP();
6846 IEM_MC_END();
6847 }
6848 }
6849 else
6850 {
6851 /*
6852 * We're loading a register from memory.
6853 */
6854 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6855 {
6856 IEM_MC_BEGIN(0, 2);
6857 IEM_MC_LOCAL(uint32_t, u32Value);
6858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6861 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6862 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6863 IEM_MC_ADVANCE_RIP();
6864 IEM_MC_END();
6865 }
6866 else
6867 {
6868 IEM_MC_BEGIN(0, 2);
6869 IEM_MC_LOCAL(uint64_t, u64Value);
6870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6873 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6874 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6875 IEM_MC_ADVANCE_RIP();
6876 IEM_MC_END();
6877 }
6878 }
6879 return VINF_SUCCESS;
6880}
6881
6882
6883/** Opcode 0x0f 0xc0. */
6884FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6885{
6886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6887 IEMOP_HLP_MIN_486();
6888 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6889
6890 /*
6891 * If rm is denoting a register, no more instruction bytes.
6892 */
6893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6894 {
6895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6896
6897 IEM_MC_BEGIN(3, 0);
6898 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6899 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6901
6902 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6903 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6904 IEM_MC_REF_EFLAGS(pEFlags);
6905 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6906
6907 IEM_MC_ADVANCE_RIP();
6908 IEM_MC_END();
6909 }
6910 else
6911 {
6912 /*
6913 * We're accessing memory.
6914 */
6915 IEM_MC_BEGIN(3, 3);
6916 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6917 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6918 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6919 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6921
6922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6923 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6924 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6925 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6926 IEM_MC_FETCH_EFLAGS(EFlags);
6927 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6928 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6929 else
6930 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6931
6932 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6933 IEM_MC_COMMIT_EFLAGS(EFlags);
6934 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6935 IEM_MC_ADVANCE_RIP();
6936 IEM_MC_END();
6937 return VINF_SUCCESS;
6938 }
6939 return VINF_SUCCESS;
6940}
6941
6942
6943/** Opcode 0x0f 0xc1. */
6944FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6945{
6946 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6947 IEMOP_HLP_MIN_486();
6948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6949
6950 /*
6951 * If rm is denoting a register, no more instruction bytes.
6952 */
6953 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6954 {
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956
6957 switch (pVCpu->iem.s.enmEffOpSize)
6958 {
6959 case IEMMODE_16BIT:
6960 IEM_MC_BEGIN(3, 0);
6961 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6962 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6963 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6964
6965 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6966 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6967 IEM_MC_REF_EFLAGS(pEFlags);
6968 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6969
6970 IEM_MC_ADVANCE_RIP();
6971 IEM_MC_END();
6972 return VINF_SUCCESS;
6973
6974 case IEMMODE_32BIT:
6975 IEM_MC_BEGIN(3, 0);
6976 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6977 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6979
6980 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6981 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6982 IEM_MC_REF_EFLAGS(pEFlags);
6983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6984
6985 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6986 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6987 IEM_MC_ADVANCE_RIP();
6988 IEM_MC_END();
6989 return VINF_SUCCESS;
6990
6991 case IEMMODE_64BIT:
6992 IEM_MC_BEGIN(3, 0);
6993 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6994 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6995 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6996
6997 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6998 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6999 IEM_MC_REF_EFLAGS(pEFlags);
7000 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7001
7002 IEM_MC_ADVANCE_RIP();
7003 IEM_MC_END();
7004 return VINF_SUCCESS;
7005
7006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7007 }
7008 }
7009 else
7010 {
7011 /*
7012 * We're accessing memory.
7013 */
7014 switch (pVCpu->iem.s.enmEffOpSize)
7015 {
7016 case IEMMODE_16BIT:
7017 IEM_MC_BEGIN(3, 3);
7018 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7019 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7021 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7023
7024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7025 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7026 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7027 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7028 IEM_MC_FETCH_EFLAGS(EFlags);
7029 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7030 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7031 else
7032 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7033
7034 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7035 IEM_MC_COMMIT_EFLAGS(EFlags);
7036 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7037 IEM_MC_ADVANCE_RIP();
7038 IEM_MC_END();
7039 return VINF_SUCCESS;
7040
7041 case IEMMODE_32BIT:
7042 IEM_MC_BEGIN(3, 3);
7043 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7044 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7045 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7046 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7048
7049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7050 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7051 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7052 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7053 IEM_MC_FETCH_EFLAGS(EFlags);
7054 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7055 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7056 else
7057 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7058
7059 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7060 IEM_MC_COMMIT_EFLAGS(EFlags);
7061 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7062 IEM_MC_ADVANCE_RIP();
7063 IEM_MC_END();
7064 return VINF_SUCCESS;
7065
7066 case IEMMODE_64BIT:
7067 IEM_MC_BEGIN(3, 3);
7068 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7069 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7070 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7071 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7073
7074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7075 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7076 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7077 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7078 IEM_MC_FETCH_EFLAGS(EFlags);
7079 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7080 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7081 else
7082 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7083
7084 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7085 IEM_MC_COMMIT_EFLAGS(EFlags);
7086 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7087 IEM_MC_ADVANCE_RIP();
7088 IEM_MC_END();
7089 return VINF_SUCCESS;
7090
7091 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7092 }
7093 }
7094}
7095
7096
7097/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7098FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7099/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7100FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7101/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7102FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7103/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7104FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7105
7106
7107/** Opcode 0x0f 0xc3. */
7108FNIEMOP_DEF(iemOp_movnti_My_Gy)
7109{
7110 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7111
7112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7113
7114 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7115 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7116 {
7117 switch (pVCpu->iem.s.enmEffOpSize)
7118 {
7119 case IEMMODE_32BIT:
7120 IEM_MC_BEGIN(0, 2);
7121 IEM_MC_LOCAL(uint32_t, u32Value);
7122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7123
7124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7126 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7127 return IEMOP_RAISE_INVALID_OPCODE();
7128
7129 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7130 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7131 IEM_MC_ADVANCE_RIP();
7132 IEM_MC_END();
7133 break;
7134
7135 case IEMMODE_64BIT:
7136 IEM_MC_BEGIN(0, 2);
7137 IEM_MC_LOCAL(uint64_t, u64Value);
7138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7139
7140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7142 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7143 return IEMOP_RAISE_INVALID_OPCODE();
7144
7145 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7146 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7147 IEM_MC_ADVANCE_RIP();
7148 IEM_MC_END();
7149 break;
7150
7151 case IEMMODE_16BIT:
7152 /** @todo check this form. */
7153 return IEMOP_RAISE_INVALID_OPCODE();
7154 }
7155 }
7156 else
7157 return IEMOP_RAISE_INVALID_OPCODE();
7158 return VINF_SUCCESS;
7159}
7160/* Opcode 0x66 0x0f 0xc3 - invalid */
7161/* Opcode 0xf3 0x0f 0xc3 - invalid */
7162/* Opcode 0xf2 0x0f 0xc3 - invalid */
7163
7164/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7165FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7166/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7167FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7168/* Opcode 0xf3 0x0f 0xc4 - invalid */
7169/* Opcode 0xf2 0x0f 0xc4 - invalid */
7170
7171/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7172FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7173/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7174FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7175/* Opcode 0xf3 0x0f 0xc5 - invalid */
7176/* Opcode 0xf2 0x0f 0xc5 - invalid */
7177
7178/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7179FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7180/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7181FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7182/* Opcode 0xf3 0x0f 0xc6 - invalid */
7183/* Opcode 0xf2 0x0f 0xc6 - invalid */
7184
7185
7186/** Opcode 0x0f 0xc7 !11/1. */
7187FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7188{
7189 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7190
7191 IEM_MC_BEGIN(4, 3);
7192 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7193 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7194 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7195 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7196 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7197 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7199
7200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7201 IEMOP_HLP_DONE_DECODING();
7202 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7203
7204 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7205 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7206 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7207
7208 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7209 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7210 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7211
7212 IEM_MC_FETCH_EFLAGS(EFlags);
7213 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7214 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7215 else
7216 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7217
7218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7219 IEM_MC_COMMIT_EFLAGS(EFlags);
7220 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7221 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7222 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7223 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7224 IEM_MC_ENDIF();
7225 IEM_MC_ADVANCE_RIP();
7226
7227 IEM_MC_END();
7228 return VINF_SUCCESS;
7229}
7230
7231
7232/** Opcode REX.W 0x0f 0xc7 !11/1. */
7233FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7234{
7235 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7236 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7237 {
7238#if 0
7239 RT_NOREF(bRm);
7240 IEMOP_BITCH_ABOUT_STUB();
7241 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7242#else
7243 IEM_MC_BEGIN(4, 3);
7244 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7245 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7246 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7247 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7248 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7249 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7251
7252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7253 IEMOP_HLP_DONE_DECODING();
7254 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7255 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7256
7257 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7258 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7259 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7260
7261 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7262 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7263 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7264
7265 IEM_MC_FETCH_EFLAGS(EFlags);
7266# ifdef RT_ARCH_AMD64
7267 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7268 {
7269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7271 else
7272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7273 }
7274 else
7275# endif
7276 {
7277 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7278 accesses and not all all atomic, which works fine on in UNI CPU guest
7279 configuration (ignoring DMA). If guest SMP is active we have no choice
7280 but to use a rendezvous callback here. Sigh. */
7281 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7282 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7283 else
7284 {
7285 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7286 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7287 }
7288 }
7289
7290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7291 IEM_MC_COMMIT_EFLAGS(EFlags);
7292 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7293 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7294 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7295 IEM_MC_ENDIF();
7296 IEM_MC_ADVANCE_RIP();
7297
7298 IEM_MC_END();
7299 return VINF_SUCCESS;
7300#endif
7301 }
7302 Log(("cmpxchg16b -> #UD\n"));
7303 return IEMOP_RAISE_INVALID_OPCODE();
7304}
7305
7306
7307/** Opcode 0x0f 0xc7 11/6. */
7308FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7309
7310/** Opcode 0x0f 0xc7 !11/6. */
7311FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7312
7313/** Opcode 0x66 0x0f 0xc7 !11/6. */
7314FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7315
7316/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7317FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7318
7319/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7320FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7321
7322
7323/** Opcode 0x0f 0xc7. */
7324FNIEMOP_DEF(iemOp_Grp9)
7325{
7326 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7328 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7329 {
7330 case 0: case 2: case 3: case 4: case 5:
7331 return IEMOP_RAISE_INVALID_OPCODE();
7332 case 1:
7333 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7334 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7335 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7336 return IEMOP_RAISE_INVALID_OPCODE();
7337 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7338 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7339 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7340 case 6:
7341 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7342 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7343 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7344 {
7345 case 0:
7346 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7347 case IEM_OP_PRF_SIZE_OP:
7348 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7349 case IEM_OP_PRF_REPZ:
7350 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7351 default:
7352 return IEMOP_RAISE_INVALID_OPCODE();
7353 }
7354 case 7:
7355 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7356 {
7357 case 0:
7358 case IEM_OP_PRF_REPZ:
7359 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7360 default:
7361 return IEMOP_RAISE_INVALID_OPCODE();
7362 }
7363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7364 }
7365}
7366
7367
7368/**
7369 * Common 'bswap register' helper.
7370 */
7371FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7372{
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 switch (pVCpu->iem.s.enmEffOpSize)
7375 {
7376 case IEMMODE_16BIT:
7377 IEM_MC_BEGIN(1, 0);
7378 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7379 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7380 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7381 IEM_MC_ADVANCE_RIP();
7382 IEM_MC_END();
7383 return VINF_SUCCESS;
7384
7385 case IEMMODE_32BIT:
7386 IEM_MC_BEGIN(1, 0);
7387 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7388 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7389 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7390 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7391 IEM_MC_ADVANCE_RIP();
7392 IEM_MC_END();
7393 return VINF_SUCCESS;
7394
7395 case IEMMODE_64BIT:
7396 IEM_MC_BEGIN(1, 0);
7397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7398 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7399 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7400 IEM_MC_ADVANCE_RIP();
7401 IEM_MC_END();
7402 return VINF_SUCCESS;
7403
7404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7405 }
7406}
7407
7408
7409/** Opcode 0x0f 0xc8. */
7410FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7411{
7412 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7413 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7414 prefix. REX.B is the correct prefix it appears. For a parallel
7415 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7416 IEMOP_HLP_MIN_486();
7417 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7418}
7419
7420
7421/** Opcode 0x0f 0xc9. */
7422FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7423{
7424 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7425 IEMOP_HLP_MIN_486();
7426 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7427}
7428
7429
7430/** Opcode 0x0f 0xca. */
7431FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7432{
7433 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7434 IEMOP_HLP_MIN_486();
7435 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7436}
7437
7438
7439/** Opcode 0x0f 0xcb. */
7440FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7441{
7442 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7443 IEMOP_HLP_MIN_486();
7444 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7445}
7446
7447
7448/** Opcode 0x0f 0xcc. */
7449FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7450{
7451 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7452 IEMOP_HLP_MIN_486();
7453 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7454}
7455
7456
7457/** Opcode 0x0f 0xcd. */
7458FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7459{
7460 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7461 IEMOP_HLP_MIN_486();
7462 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7463}
7464
7465
7466/** Opcode 0x0f 0xce. */
7467FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7468{
7469 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7470 IEMOP_HLP_MIN_486();
7471 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7472}
7473
7474
7475/** Opcode 0x0f 0xcf. */
7476FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7477{
7478 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7479 IEMOP_HLP_MIN_486();
7480 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7481}
7482
7483
7484/* Opcode 0x0f 0xd0 - invalid */
7485/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7486FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7487/* Opcode 0xf3 0x0f 0xd0 - invalid */
7488/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7489FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7490
7491/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7492FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7493/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7494FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7495/* Opcode 0xf3 0x0f 0xd1 - invalid */
7496/* Opcode 0xf2 0x0f 0xd1 - invalid */
7497
7498/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7499FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7500/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7501FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7502/* Opcode 0xf3 0x0f 0xd2 - invalid */
7503/* Opcode 0xf2 0x0f 0xd2 - invalid */
7504
7505/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7506FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7507/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7508FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7509/* Opcode 0xf3 0x0f 0xd3 - invalid */
7510/* Opcode 0xf2 0x0f 0xd3 - invalid */
7511
7512/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7513FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7514/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7515FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7516/* Opcode 0xf3 0x0f 0xd4 - invalid */
7517/* Opcode 0xf2 0x0f 0xd4 - invalid */
7518
7519/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7520FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7521/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7522FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7523/* Opcode 0xf3 0x0f 0xd5 - invalid */
7524/* Opcode 0xf2 0x0f 0xd5 - invalid */
7525
7526/* Opcode 0x0f 0xd6 - invalid */
7527/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7528FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7529/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7530FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7531/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7532FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7533#if 0
7534FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7535{
7536 /* Docs says register only. */
7537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7538
7539 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7540 {
7541 case IEM_OP_PRF_SIZE_OP: /* SSE */
7542 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7543 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7544 IEM_MC_BEGIN(2, 0);
7545 IEM_MC_ARG(uint64_t *, pDst, 0);
7546 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7547 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7548 IEM_MC_PREPARE_SSE_USAGE();
7549 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7550 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7551 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7552 IEM_MC_ADVANCE_RIP();
7553 IEM_MC_END();
7554 return VINF_SUCCESS;
7555
7556 case 0: /* MMX */
7557 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7558 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7559 IEM_MC_BEGIN(2, 0);
7560 IEM_MC_ARG(uint64_t *, pDst, 0);
7561 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7562 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7563 IEM_MC_PREPARE_FPU_USAGE();
7564 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7565 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7566 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7567 IEM_MC_ADVANCE_RIP();
7568 IEM_MC_END();
7569 return VINF_SUCCESS;
7570
7571 default:
7572 return IEMOP_RAISE_INVALID_OPCODE();
7573 }
7574}
7575#endif
7576
7577
7578/** Opcode 0x0f 0xd7. */
7579FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7580{
7581 /* Docs says register only. */
7582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7583 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7584 return IEMOP_RAISE_INVALID_OPCODE();
7585
7586 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7587 /** @todo testcase: Check that the instruction implicitly clears the high
7588 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7589 * and opcode modifications are made to work with the whole width (not
7590 * just 128). */
7591 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7592 {
7593 case IEM_OP_PRF_SIZE_OP: /* SSE */
7594 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7595 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7596 IEM_MC_BEGIN(2, 0);
7597 IEM_MC_ARG(uint64_t *, pDst, 0);
7598 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7599 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7600 IEM_MC_PREPARE_SSE_USAGE();
7601 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7602 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7603 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7604 IEM_MC_ADVANCE_RIP();
7605 IEM_MC_END();
7606 return VINF_SUCCESS;
7607
7608 case 0: /* MMX */
7609 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7610 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7611 IEM_MC_BEGIN(2, 0);
7612 IEM_MC_ARG(uint64_t *, pDst, 0);
7613 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7614 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7615 IEM_MC_PREPARE_FPU_USAGE();
7616 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7617 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7618 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7619 IEM_MC_ADVANCE_RIP();
7620 IEM_MC_END();
7621 return VINF_SUCCESS;
7622
7623 default:
7624 return IEMOP_RAISE_INVALID_OPCODE();
7625 }
7626}
7627
7628
7629/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7630FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7631/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7632FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7633/* Opcode 0xf3 0x0f 0xd8 - invalid */
7634/* Opcode 0xf2 0x0f 0xd8 - invalid */
7635
7636/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7637FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7638/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7639FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7640/* Opcode 0xf3 0x0f 0xd9 - invalid */
7641/* Opcode 0xf2 0x0f 0xd9 - invalid */
7642
7643/** Opcode 0x0f 0xda - pminub Pq, Qq */
7644FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7645/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7646FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7647/* Opcode 0xf3 0x0f 0xda - invalid */
7648/* Opcode 0xf2 0x0f 0xda - invalid */
7649
7650/** Opcode 0x0f 0xdb - pand Pq, Qq */
7651FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7652/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7653FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7654/* Opcode 0xf3 0x0f 0xdb - invalid */
7655/* Opcode 0xf2 0x0f 0xdb - invalid */
7656
7657/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7658FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7659/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7660FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7661/* Opcode 0xf3 0x0f 0xdc - invalid */
7662/* Opcode 0xf2 0x0f 0xdc - invalid */
7663
7664/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7665FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7666/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7667FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7668/* Opcode 0xf3 0x0f 0xdd - invalid */
7669/* Opcode 0xf2 0x0f 0xdd - invalid */
7670
7671/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7672FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7673/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7674FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7675/* Opcode 0xf3 0x0f 0xde - invalid */
7676/* Opcode 0xf2 0x0f 0xde - invalid */
7677
7678/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7679FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7680/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7681FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7682/* Opcode 0xf3 0x0f 0xdf - invalid */
7683/* Opcode 0xf2 0x0f 0xdf - invalid */
7684
7685/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7686FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7687/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7688FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7689/* Opcode 0xf3 0x0f 0xe0 - invalid */
7690/* Opcode 0xf2 0x0f 0xe0 - invalid */
7691
7692/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7693FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7694/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7695FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7696/* Opcode 0xf3 0x0f 0xe1 - invalid */
7697/* Opcode 0xf2 0x0f 0xe1 - invalid */
7698
7699/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7700FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7701/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7702FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7703/* Opcode 0xf3 0x0f 0xe2 - invalid */
7704/* Opcode 0xf2 0x0f 0xe2 - invalid */
7705
7706/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7707FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7708/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7709FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7710/* Opcode 0xf3 0x0f 0xe3 - invalid */
7711/* Opcode 0xf2 0x0f 0xe3 - invalid */
7712
7713/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7714FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7715/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7716FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7717/* Opcode 0xf3 0x0f 0xe4 - invalid */
7718/* Opcode 0xf2 0x0f 0xe4 - invalid */
7719
7720/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7721FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7722/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7723FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7724/* Opcode 0xf3 0x0f 0xe5 - invalid */
7725/* Opcode 0xf2 0x0f 0xe5 - invalid */
7726
7727/* Opcode 0x0f 0xe6 - invalid */
7728/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7729FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7730/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7731FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7732/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7733FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7734
7735
7736/** Opcode 0x0f 0xe7. */
7737FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7738{
7739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7740 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7741 {
7742 /*
7743 * Register, memory.
7744 */
7745/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7746 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7747 {
7748
7749 case IEM_OP_PRF_SIZE_OP: /* SSE */
7750 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7751 IEM_MC_BEGIN(0, 2);
7752 IEM_MC_LOCAL(uint128_t, uSrc);
7753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7754
7755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7757 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7758 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7759
7760 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7761 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7762
7763 IEM_MC_ADVANCE_RIP();
7764 IEM_MC_END();
7765 break;
7766
7767 case 0: /* MMX */
7768 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7769 IEM_MC_BEGIN(0, 2);
7770 IEM_MC_LOCAL(uint64_t, uSrc);
7771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7772
7773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7775 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7777
7778 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7779 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7780
7781 IEM_MC_ADVANCE_RIP();
7782 IEM_MC_END();
7783 break;
7784
7785 default:
7786 return IEMOP_RAISE_INVALID_OPCODE();
7787 }
7788 }
7789 /* The register, register encoding is invalid. */
7790 else
7791 return IEMOP_RAISE_INVALID_OPCODE();
7792 return VINF_SUCCESS;
7793}
7794
7795
7796/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7797FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7798/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7799FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7800/* Opcode 0xf3 0x0f 0xe8 - invalid */
7801/* Opcode 0xf2 0x0f 0xe8 - invalid */
7802
7803/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7804FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7805/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7806FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7807/* Opcode 0xf3 0x0f 0xe9 - invalid */
7808/* Opcode 0xf2 0x0f 0xe9 - invalid */
7809
7810/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7811FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7812/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7813FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7814/* Opcode 0xf3 0x0f 0xea - invalid */
7815/* Opcode 0xf2 0x0f 0xea - invalid */
7816
7817/** Opcode 0x0f 0xeb - por Pq, Qq */
7818FNIEMOP_STUB(iemOp_por_Pq_Qq);
7819/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7820FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7821/* Opcode 0xf3 0x0f 0xeb - invalid */
7822/* Opcode 0xf2 0x0f 0xeb - invalid */
7823
7824/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7825FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7826/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7827FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7828/* Opcode 0xf3 0x0f 0xec - invalid */
7829/* Opcode 0xf2 0x0f 0xec - invalid */
7830
7831/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7832FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7833/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7834FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7835/* Opcode 0xf3 0x0f 0xed - invalid */
7836/* Opcode 0xf2 0x0f 0xed - invalid */
7837
7838/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7839FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7840/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7841FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7842/* Opcode 0xf3 0x0f 0xee - invalid */
7843/* Opcode 0xf2 0x0f 0xee - invalid */
7844
7845
7846/** Opcode 0x0f 0xef. */
7847FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7848{
7849 IEMOP_MNEMONIC(pxor, "pxor");
7850 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7851}
7852/* Opcode 0xf3 0x0f 0xef - invalid */
7853/* Opcode 0xf2 0x0f 0xef - invalid */
7854
7855/* Opcode 0x0f 0xf0 - invalid */
7856/* Opcode 0x66 0x0f 0xf0 - invalid */
7857/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7858FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7859
7860/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7861FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7862/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7863FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7864/* Opcode 0xf2 0x0f 0xf1 - invalid */
7865
7866/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7867FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7868/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7869FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7870/* Opcode 0xf2 0x0f 0xf2 - invalid */
7871
7872/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7873FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7874/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7875FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7876/* Opcode 0xf2 0x0f 0xf3 - invalid */
7877
7878/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7879FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7880/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7881FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7882/* Opcode 0xf2 0x0f 0xf4 - invalid */
7883
7884/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7885FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7886/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7887FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7888/* Opcode 0xf2 0x0f 0xf5 - invalid */
7889
7890/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7891FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7892/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7893FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7894/* Opcode 0xf2 0x0f 0xf6 - invalid */
7895
7896/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7897FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7898/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7899FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7900/* Opcode 0xf2 0x0f 0xf7 - invalid */
7901
7902/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7903FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7904/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7905FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7906/* Opcode 0xf2 0x0f 0xf8 - invalid */
7907
7908/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7909FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7910/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7911FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7912/* Opcode 0xf2 0x0f 0xf9 - invalid */
7913
7914/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7915FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7916/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7917FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7918/* Opcode 0xf2 0x0f 0xfa - invalid */
7919
7920/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7921FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7922/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7923FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7924/* Opcode 0xf2 0x0f 0xfb - invalid */
7925
7926/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7927FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7928/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7929FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7930/* Opcode 0xf2 0x0f 0xfc - invalid */
7931
7932/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7933FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7934/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7935FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7936/* Opcode 0xf2 0x0f 0xfd - invalid */
7937
7938/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7939FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7940/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7941FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7942/* Opcode 0xf2 0x0f 0xfe - invalid */
7943
7944
7945/** Opcode **** 0x0f 0xff - UD0 */
7946FNIEMOP_DEF(iemOp_ud0)
7947{
7948 IEMOP_MNEMONIC(ud0, "ud0");
7949 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7950 {
7951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7952#ifndef TST_IEM_CHECK_MC
7953 RTGCPTR GCPtrEff;
7954 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7955 if (rcStrict != VINF_SUCCESS)
7956 return rcStrict;
7957#endif
7958 IEMOP_HLP_DONE_DECODING();
7959 }
7960 return IEMOP_RAISE_INVALID_OPCODE();
7961}
7962
7963
7964
7965/** Repeats a_fn four times. For decoding tables. */
7966#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
7967
7968IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7969{
7970 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7971 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7972 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7973 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7974 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7975 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7976 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7977 /* 0x06 */ IEMOP_X4(iemOp_clts),
7978 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7979 /* 0x08 */ IEMOP_X4(iemOp_invd),
7980 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7981 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7982 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7983 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7984 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7985 /* 0x0e */ IEMOP_X4(iemOp_femms),
7986 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7987
7988 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7989 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7990 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7991 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7992 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7993 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7994 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7995 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7996 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7997 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7998 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7999 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8000 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8001 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8002 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8003 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8004
8005 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8006 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8007 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8008 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8009 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8010 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8011 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8012 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8013 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8014 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8015 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8016 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8017 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8018 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8019 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8021
8022 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8023 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8024 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8025 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8026 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8027 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8028 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8029 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8030 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8031 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8032 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8033 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8034 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8035 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8036 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8037 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8038
8039 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8040 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8041 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8042 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8043 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8044 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8045 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8046 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8047 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8048 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8049 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8050 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8051 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8052 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8053 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8054 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8055
8056 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8057 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8058 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8059 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8060 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8061 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8062 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8063 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8064 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8065 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8066 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8067 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8068 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8069 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8070 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8071 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8072
8073 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8074 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8075 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8076 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8077 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8078 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8079 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8080 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8081 /* 0x68 */ IEMOP_X4(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq),
8082 /* 0x69 */ IEMOP_X4(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq),
8083 /* 0x6a */ IEMOP_X4(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq),
8084 /* 0x6b */ IEMOP_X4(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq),
8085 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8086 /* 0x6d */ IEMOP_X4(iemOp_punpckhqdq_Vdq_Wdq),
8087 /* 0x6e */ IEMOP_X4(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey),
8088 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),
8089
8090 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8091 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8092 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8093 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8094 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8095 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8096 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8097 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8098
8099 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8100 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8101 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8102 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8103 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8104 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8105 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8106 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8107
8108 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8109 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8110 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8111 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8112 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8113 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8114 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8115 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8116 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8117 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8118 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8119 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8120 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8121 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8122 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8123 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8124
8125 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8126 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8127 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8128 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8129 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8130 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8131 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8132 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8133 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8134 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8135 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8136 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8137 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8138 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8139 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8140 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8141
8142 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8143 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8144 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8145 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8146 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8147 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8148 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8149 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8150 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8151 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8152 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8153 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8154 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8155 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8156 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8157 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8158
8159 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8160 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8161 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8162 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8163 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8164 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8165 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8166 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8167 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8168 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8169 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8170 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8171 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8172 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8173 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8174 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8175
8176 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8177 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8178 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8179 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8180 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8181 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8182 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8183 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8184 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8185 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8186 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8187 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8188 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8189 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8190 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8191 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8192
8193 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8194 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8195 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8196 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8197 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8198 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8199 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8200 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8201 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8202 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8203 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8204 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8205 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8206 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8207 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8208 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8209
8210 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8211 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8212 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8213 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8215 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8216 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8217 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8218 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8219 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8220 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8221 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8222 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8223 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8224 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8225 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8226
8227 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8228 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8229 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8230 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8231 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8232 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8234 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8235 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8237 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8238 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8239 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8240 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8241 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8242 /* 0xff */ IEMOP_X4(iemOp_ud0),
8243};
8244AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8245/** @} */
8246
8247
8248/** @name One byte opcodes.
8249 *
8250 * @{
8251 */
8252
8253/** Opcode 0x00. */
8254FNIEMOP_DEF(iemOp_add_Eb_Gb)
8255{
8256 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8257 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8258}
8259
8260
8261/** Opcode 0x01. */
8262FNIEMOP_DEF(iemOp_add_Ev_Gv)
8263{
8264 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8265 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8266}
8267
8268
8269/** Opcode 0x02. */
8270FNIEMOP_DEF(iemOp_add_Gb_Eb)
8271{
8272 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8273 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8274}
8275
8276
8277/** Opcode 0x03. */
8278FNIEMOP_DEF(iemOp_add_Gv_Ev)
8279{
8280 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8281 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8282}
8283
8284
8285/** Opcode 0x04. */
8286FNIEMOP_DEF(iemOp_add_Al_Ib)
8287{
8288 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8289 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8290}
8291
8292
8293/** Opcode 0x05. */
8294FNIEMOP_DEF(iemOp_add_eAX_Iz)
8295{
8296 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8297 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8298}
8299
8300
8301/** Opcode 0x06. */
8302FNIEMOP_DEF(iemOp_push_ES)
8303{
8304 IEMOP_MNEMONIC(push_es, "push es");
8305 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8306}
8307
8308
8309/** Opcode 0x07. */
8310FNIEMOP_DEF(iemOp_pop_ES)
8311{
8312 IEMOP_MNEMONIC(pop_es, "pop es");
8313 IEMOP_HLP_NO_64BIT();
8314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8315 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8316}
8317
8318
8319/** Opcode 0x08. */
8320FNIEMOP_DEF(iemOp_or_Eb_Gb)
8321{
8322 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8324 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8325}
8326
8327
8328/** Opcode 0x09. */
8329FNIEMOP_DEF(iemOp_or_Ev_Gv)
8330{
8331 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8333 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8334}
8335
8336
8337/** Opcode 0x0a. */
8338FNIEMOP_DEF(iemOp_or_Gb_Eb)
8339{
8340 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8341 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8342 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8343}
8344
8345
8346/** Opcode 0x0b. */
8347FNIEMOP_DEF(iemOp_or_Gv_Ev)
8348{
8349 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8350 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8351 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8352}
8353
8354
8355/** Opcode 0x0c. */
8356FNIEMOP_DEF(iemOp_or_Al_Ib)
8357{
8358 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8359 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8360 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8361}
8362
8363
8364/** Opcode 0x0d. */
8365FNIEMOP_DEF(iemOp_or_eAX_Iz)
8366{
8367 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8368 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8369 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8370}
8371
8372
8373/** Opcode 0x0e. */
8374FNIEMOP_DEF(iemOp_push_CS)
8375{
8376 IEMOP_MNEMONIC(push_cs, "push cs");
8377 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8378}
8379
8380
8381/** Opcode 0x0f. */
8382FNIEMOP_DEF(iemOp_2byteEscape)
8383{
8384#ifdef VBOX_STRICT
8385 static bool s_fTested = false;
8386 if (RT_LIKELY(s_fTested)) { /* likely */ }
8387 else
8388 {
8389 s_fTested = true;
8390 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8391 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8392 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8393 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8394 }
8395#endif
8396
8397 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8398
8399 /** @todo PUSH CS on 8086, undefined on 80186. */
8400 IEMOP_HLP_MIN_286();
8401 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8402}
8403
8404/** Opcode 0x10. */
8405FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8406{
8407 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8409}
8410
8411
8412/** Opcode 0x11. */
8413FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8414{
8415 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8416 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8417}
8418
8419
8420/** Opcode 0x12. */
8421FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8422{
8423 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8424 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8425}
8426
8427
8428/** Opcode 0x13. */
8429FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8430{
8431 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8433}
8434
8435
8436/** Opcode 0x14. */
8437FNIEMOP_DEF(iemOp_adc_Al_Ib)
8438{
8439 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8440 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8441}
8442
8443
8444/** Opcode 0x15. */
8445FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8446{
8447 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8448 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8449}
8450
8451
8452/** Opcode 0x16. */
8453FNIEMOP_DEF(iemOp_push_SS)
8454{
8455 IEMOP_MNEMONIC(push_ss, "push ss");
8456 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8457}
8458
8459
8460/** Opcode 0x17. */
8461FNIEMOP_DEF(iemOp_pop_SS)
8462{
8463 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8465 IEMOP_HLP_NO_64BIT();
8466 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8467}
8468
8469
8470/** Opcode 0x18. */
8471FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8472{
8473 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8475}
8476
8477
8478/** Opcode 0x19. */
8479FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8480{
8481 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8482 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8483}
8484
8485
8486/** Opcode 0x1a. */
8487FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8488{
8489 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8490 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8491}
8492
8493
8494/** Opcode 0x1b. */
8495FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8496{
8497 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8498 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8499}
8500
8501
8502/** Opcode 0x1c. */
8503FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8504{
8505 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8506 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8507}
8508
8509
8510/** Opcode 0x1d. */
8511FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8512{
8513 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8514 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8515}
8516
8517
8518/** Opcode 0x1e. */
8519FNIEMOP_DEF(iemOp_push_DS)
8520{
8521 IEMOP_MNEMONIC(push_ds, "push ds");
8522 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8523}
8524
8525
8526/** Opcode 0x1f. */
8527FNIEMOP_DEF(iemOp_pop_DS)
8528{
8529 IEMOP_MNEMONIC(pop_ds, "pop ds");
8530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8531 IEMOP_HLP_NO_64BIT();
8532 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8533}
8534
8535
8536/** Opcode 0x20. */
8537FNIEMOP_DEF(iemOp_and_Eb_Gb)
8538{
8539 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8541 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8542}
8543
8544
8545/** Opcode 0x21. */
8546FNIEMOP_DEF(iemOp_and_Ev_Gv)
8547{
8548 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8549 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8550 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8551}
8552
8553
8554/** Opcode 0x22. */
8555FNIEMOP_DEF(iemOp_and_Gb_Eb)
8556{
8557 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8559 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8560}
8561
8562
8563/** Opcode 0x23. */
8564FNIEMOP_DEF(iemOp_and_Gv_Ev)
8565{
8566 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8567 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8568 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8569}
8570
8571
8572/** Opcode 0x24. */
8573FNIEMOP_DEF(iemOp_and_Al_Ib)
8574{
8575 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8577 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8578}
8579
8580
8581/** Opcode 0x25. */
8582FNIEMOP_DEF(iemOp_and_eAX_Iz)
8583{
8584 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8585 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8586 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8587}
8588
8589
8590/** Opcode 0x26. */
8591FNIEMOP_DEF(iemOp_seg_ES)
8592{
8593 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8594 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8595 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8596
8597 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8598 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8599}
8600
8601
8602/** Opcode 0x27. */
8603FNIEMOP_DEF(iemOp_daa)
8604{
8605 IEMOP_MNEMONIC(daa_AL, "daa AL");
8606 IEMOP_HLP_NO_64BIT();
8607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8608 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8609 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8610}
8611
8612
8613/** Opcode 0x28. */
8614FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8615{
8616 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8617 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8618}
8619
8620
8621/** Opcode 0x29. */
8622FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8623{
8624 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8625 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8626}
8627
8628
8629/** Opcode 0x2a. */
8630FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8631{
8632 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8633 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8634}
8635
8636
8637/** Opcode 0x2b. */
8638FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8639{
8640 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8642}
8643
8644
8645/** Opcode 0x2c. */
8646FNIEMOP_DEF(iemOp_sub_Al_Ib)
8647{
8648 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8649 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8650}
8651
8652
8653/** Opcode 0x2d. */
8654FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8655{
8656 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8657 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8658}
8659
8660
8661/** Opcode 0x2e. */
8662FNIEMOP_DEF(iemOp_seg_CS)
8663{
8664 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8665 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8666 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8667
8668 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8669 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8670}
8671
8672
8673/** Opcode 0x2f. */
8674FNIEMOP_DEF(iemOp_das)
8675{
8676 IEMOP_MNEMONIC(das_AL, "das AL");
8677 IEMOP_HLP_NO_64BIT();
8678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8679 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8680 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8681}
8682
8683
8684/** Opcode 0x30. */
8685FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8686{
8687 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8688 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8690}
8691
8692
8693/** Opcode 0x31. */
8694FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8695{
8696 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8697 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8698 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8699}
8700
8701
8702/** Opcode 0x32. */
8703FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8704{
8705 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8706 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8707 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8708}
8709
8710
8711/** Opcode 0x33. */
8712FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8713{
8714 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8717}
8718
8719
8720/** Opcode 0x34. */
8721FNIEMOP_DEF(iemOp_xor_Al_Ib)
8722{
8723 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8724 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8726}
8727
8728
8729/** Opcode 0x35. */
8730FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8731{
8732 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8733 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8734 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8735}
8736
8737
8738/** Opcode 0x36. */
8739FNIEMOP_DEF(iemOp_seg_SS)
8740{
8741 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8742 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8743 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8744
8745 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8746 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8747}
8748
8749
8750/** Opcode 0x37. */
8751FNIEMOP_STUB(iemOp_aaa);
8752
8753
8754/** Opcode 0x38. */
8755FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8756{
8757 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8758 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8759}
8760
8761
8762/** Opcode 0x39. */
8763FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8764{
8765 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8766 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8767}
8768
8769
8770/** Opcode 0x3a. */
8771FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8772{
8773 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8774 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8775}
8776
8777
8778/** Opcode 0x3b. */
8779FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8780{
8781 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8782 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8783}
8784
8785
8786/** Opcode 0x3c. */
8787FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8788{
8789 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8790 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8791}
8792
8793
8794/** Opcode 0x3d. */
8795FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8796{
8797 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8798 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8799}
8800
8801
8802/** Opcode 0x3e. */
8803FNIEMOP_DEF(iemOp_seg_DS)
8804{
8805 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8806 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8807 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8808
8809 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8810 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8811}
8812
8813
8814/** Opcode 0x3f. */
8815FNIEMOP_STUB(iemOp_aas);
8816
8817/**
8818 * Common 'inc/dec/not/neg register' helper.
8819 */
8820FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8821{
8822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8823 switch (pVCpu->iem.s.enmEffOpSize)
8824 {
8825 case IEMMODE_16BIT:
8826 IEM_MC_BEGIN(2, 0);
8827 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8828 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8829 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8830 IEM_MC_REF_EFLAGS(pEFlags);
8831 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8832 IEM_MC_ADVANCE_RIP();
8833 IEM_MC_END();
8834 return VINF_SUCCESS;
8835
8836 case IEMMODE_32BIT:
8837 IEM_MC_BEGIN(2, 0);
8838 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8839 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8840 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8841 IEM_MC_REF_EFLAGS(pEFlags);
8842 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8843 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8844 IEM_MC_ADVANCE_RIP();
8845 IEM_MC_END();
8846 return VINF_SUCCESS;
8847
8848 case IEMMODE_64BIT:
8849 IEM_MC_BEGIN(2, 0);
8850 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8851 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8852 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8853 IEM_MC_REF_EFLAGS(pEFlags);
8854 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8855 IEM_MC_ADVANCE_RIP();
8856 IEM_MC_END();
8857 return VINF_SUCCESS;
8858 }
8859 return VINF_SUCCESS;
8860}
8861
8862
8863/** Opcode 0x40. */
8864FNIEMOP_DEF(iemOp_inc_eAX)
8865{
8866 /*
8867 * This is a REX prefix in 64-bit mode.
8868 */
8869 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8870 {
8871 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8872 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8873
8874 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8875 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8876 }
8877
8878 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8879 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8880}
8881
8882
8883/** Opcode 0x41. */
8884FNIEMOP_DEF(iemOp_inc_eCX)
8885{
8886 /*
8887 * This is a REX prefix in 64-bit mode.
8888 */
8889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8890 {
8891 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8892 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8893 pVCpu->iem.s.uRexB = 1 << 3;
8894
8895 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8896 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8897 }
8898
8899 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8900 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8901}
8902
8903
8904/** Opcode 0x42. */
8905FNIEMOP_DEF(iemOp_inc_eDX)
8906{
8907 /*
8908 * This is a REX prefix in 64-bit mode.
8909 */
8910 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8911 {
8912 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8913 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8914 pVCpu->iem.s.uRexIndex = 1 << 3;
8915
8916 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8917 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8918 }
8919
8920 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8921 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8922}
8923
8924
8925
8926/** Opcode 0x43. */
8927FNIEMOP_DEF(iemOp_inc_eBX)
8928{
8929 /*
8930 * This is a REX prefix in 64-bit mode.
8931 */
8932 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8933 {
8934 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8935 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8936 pVCpu->iem.s.uRexB = 1 << 3;
8937 pVCpu->iem.s.uRexIndex = 1 << 3;
8938
8939 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8940 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8941 }
8942
8943 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8944 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8945}
8946
8947
8948/** Opcode 0x44. */
8949FNIEMOP_DEF(iemOp_inc_eSP)
8950{
8951 /*
8952 * This is a REX prefix in 64-bit mode.
8953 */
8954 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8955 {
8956 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8957 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8958 pVCpu->iem.s.uRexReg = 1 << 3;
8959
8960 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8961 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8962 }
8963
8964 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8965 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8966}
8967
8968
8969/** Opcode 0x45. */
8970FNIEMOP_DEF(iemOp_inc_eBP)
8971{
8972 /*
8973 * This is a REX prefix in 64-bit mode.
8974 */
8975 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8976 {
8977 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8978 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8979 pVCpu->iem.s.uRexReg = 1 << 3;
8980 pVCpu->iem.s.uRexB = 1 << 3;
8981
8982 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8983 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8984 }
8985
8986 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8987 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8988}
8989
8990
8991/** Opcode 0x46. */
8992FNIEMOP_DEF(iemOp_inc_eSI)
8993{
8994 /*
8995 * This is a REX prefix in 64-bit mode.
8996 */
8997 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8998 {
8999 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9000 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9001 pVCpu->iem.s.uRexReg = 1 << 3;
9002 pVCpu->iem.s.uRexIndex = 1 << 3;
9003
9004 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9005 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9006 }
9007
9008 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9009 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9010}
9011
9012
9013/** Opcode 0x47. */
9014FNIEMOP_DEF(iemOp_inc_eDI)
9015{
9016 /*
9017 * This is a REX prefix in 64-bit mode.
9018 */
9019 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9020 {
9021 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9022 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9023 pVCpu->iem.s.uRexReg = 1 << 3;
9024 pVCpu->iem.s.uRexB = 1 << 3;
9025 pVCpu->iem.s.uRexIndex = 1 << 3;
9026
9027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9029 }
9030
9031 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9032 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9033}
9034
9035
9036/** Opcode 0x48. */
9037FNIEMOP_DEF(iemOp_dec_eAX)
9038{
9039 /*
9040 * This is a REX prefix in 64-bit mode.
9041 */
9042 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9043 {
9044 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9045 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9046 iemRecalEffOpSize(pVCpu);
9047
9048 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9049 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9050 }
9051
9052 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9053 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9054}
9055
9056
9057/** Opcode 0x49. */
9058FNIEMOP_DEF(iemOp_dec_eCX)
9059{
9060 /*
9061 * This is a REX prefix in 64-bit mode.
9062 */
9063 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9064 {
9065 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9066 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9067 pVCpu->iem.s.uRexB = 1 << 3;
9068 iemRecalEffOpSize(pVCpu);
9069
9070 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9071 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9072 }
9073
9074 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9075 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9076}
9077
9078
9079/** Opcode 0x4a. */
9080FNIEMOP_DEF(iemOp_dec_eDX)
9081{
9082 /*
9083 * This is a REX prefix in 64-bit mode.
9084 */
9085 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9086 {
9087 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9088 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9089 pVCpu->iem.s.uRexIndex = 1 << 3;
9090 iemRecalEffOpSize(pVCpu);
9091
9092 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9093 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9094 }
9095
9096 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9097 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9098}
9099
9100
9101/** Opcode 0x4b. */
9102FNIEMOP_DEF(iemOp_dec_eBX)
9103{
9104 /*
9105 * This is a REX prefix in 64-bit mode.
9106 */
9107 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9108 {
9109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9110 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9111 pVCpu->iem.s.uRexB = 1 << 3;
9112 pVCpu->iem.s.uRexIndex = 1 << 3;
9113 iemRecalEffOpSize(pVCpu);
9114
9115 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9116 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9117 }
9118
9119 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9120 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9121}
9122
9123
9124/** Opcode 0x4c. */
9125FNIEMOP_DEF(iemOp_dec_eSP)
9126{
9127 /*
9128 * This is a REX prefix in 64-bit mode.
9129 */
9130 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9131 {
9132 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9133 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9134 pVCpu->iem.s.uRexReg = 1 << 3;
9135 iemRecalEffOpSize(pVCpu);
9136
9137 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9138 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9139 }
9140
9141 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9142 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9143}
9144
9145
9146/** Opcode 0x4d. */
9147FNIEMOP_DEF(iemOp_dec_eBP)
9148{
9149 /*
9150 * This is a REX prefix in 64-bit mode.
9151 */
9152 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9153 {
9154 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9155 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9156 pVCpu->iem.s.uRexReg = 1 << 3;
9157 pVCpu->iem.s.uRexB = 1 << 3;
9158 iemRecalEffOpSize(pVCpu);
9159
9160 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9161 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9162 }
9163
9164 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9165 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9166}
9167
9168
9169/** Opcode 0x4e. */
9170FNIEMOP_DEF(iemOp_dec_eSI)
9171{
9172 /*
9173 * This is a REX prefix in 64-bit mode.
9174 */
9175 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9176 {
9177 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9178 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9179 pVCpu->iem.s.uRexReg = 1 << 3;
9180 pVCpu->iem.s.uRexIndex = 1 << 3;
9181 iemRecalEffOpSize(pVCpu);
9182
9183 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9184 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9185 }
9186
9187 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9188 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9189}
9190
9191
9192/** Opcode 0x4f. */
9193FNIEMOP_DEF(iemOp_dec_eDI)
9194{
9195 /*
9196 * This is a REX prefix in 64-bit mode.
9197 */
9198 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9199 {
9200 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9201 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9202 pVCpu->iem.s.uRexReg = 1 << 3;
9203 pVCpu->iem.s.uRexB = 1 << 3;
9204 pVCpu->iem.s.uRexIndex = 1 << 3;
9205 iemRecalEffOpSize(pVCpu);
9206
9207 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9208 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9209 }
9210
9211 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9212 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9213}
9214
9215
9216/**
9217 * Common 'push register' helper.
9218 */
9219FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9220{
9221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9222 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9223 {
9224 iReg |= pVCpu->iem.s.uRexB;
9225 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9226 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9227 }
9228
9229 switch (pVCpu->iem.s.enmEffOpSize)
9230 {
9231 case IEMMODE_16BIT:
9232 IEM_MC_BEGIN(0, 1);
9233 IEM_MC_LOCAL(uint16_t, u16Value);
9234 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9235 IEM_MC_PUSH_U16(u16Value);
9236 IEM_MC_ADVANCE_RIP();
9237 IEM_MC_END();
9238 break;
9239
9240 case IEMMODE_32BIT:
9241 IEM_MC_BEGIN(0, 1);
9242 IEM_MC_LOCAL(uint32_t, u32Value);
9243 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9244 IEM_MC_PUSH_U32(u32Value);
9245 IEM_MC_ADVANCE_RIP();
9246 IEM_MC_END();
9247 break;
9248
9249 case IEMMODE_64BIT:
9250 IEM_MC_BEGIN(0, 1);
9251 IEM_MC_LOCAL(uint64_t, u64Value);
9252 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9253 IEM_MC_PUSH_U64(u64Value);
9254 IEM_MC_ADVANCE_RIP();
9255 IEM_MC_END();
9256 break;
9257 }
9258
9259 return VINF_SUCCESS;
9260}
9261
9262
9263/** Opcode 0x50. */
9264FNIEMOP_DEF(iemOp_push_eAX)
9265{
9266 IEMOP_MNEMONIC(push_rAX, "push rAX");
9267 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9268}
9269
9270
9271/** Opcode 0x51. */
9272FNIEMOP_DEF(iemOp_push_eCX)
9273{
9274 IEMOP_MNEMONIC(push_rCX, "push rCX");
9275 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9276}
9277
9278
9279/** Opcode 0x52. */
9280FNIEMOP_DEF(iemOp_push_eDX)
9281{
9282 IEMOP_MNEMONIC(push_rDX, "push rDX");
9283 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9284}
9285
9286
9287/** Opcode 0x53. */
9288FNIEMOP_DEF(iemOp_push_eBX)
9289{
9290 IEMOP_MNEMONIC(push_rBX, "push rBX");
9291 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9292}
9293
9294
9295/** Opcode 0x54. */
9296FNIEMOP_DEF(iemOp_push_eSP)
9297{
9298 IEMOP_MNEMONIC(push_rSP, "push rSP");
9299 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9300 {
9301 IEM_MC_BEGIN(0, 1);
9302 IEM_MC_LOCAL(uint16_t, u16Value);
9303 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9304 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9305 IEM_MC_PUSH_U16(u16Value);
9306 IEM_MC_ADVANCE_RIP();
9307 IEM_MC_END();
9308 }
9309 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9310}
9311
9312
9313/** Opcode 0x55. */
9314FNIEMOP_DEF(iemOp_push_eBP)
9315{
9316 IEMOP_MNEMONIC(push_rBP, "push rBP");
9317 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9318}
9319
9320
9321/** Opcode 0x56. */
9322FNIEMOP_DEF(iemOp_push_eSI)
9323{
9324 IEMOP_MNEMONIC(push_rSI, "push rSI");
9325 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9326}
9327
9328
9329/** Opcode 0x57. */
9330FNIEMOP_DEF(iemOp_push_eDI)
9331{
9332 IEMOP_MNEMONIC(push_rDI, "push rDI");
9333 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9334}
9335
9336
9337/**
9338 * Common 'pop register' helper.
9339 */
9340FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9341{
9342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9343 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9344 {
9345 iReg |= pVCpu->iem.s.uRexB;
9346 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9347 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9348 }
9349
9350 switch (pVCpu->iem.s.enmEffOpSize)
9351 {
9352 case IEMMODE_16BIT:
9353 IEM_MC_BEGIN(0, 1);
9354 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9355 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9356 IEM_MC_POP_U16(pu16Dst);
9357 IEM_MC_ADVANCE_RIP();
9358 IEM_MC_END();
9359 break;
9360
9361 case IEMMODE_32BIT:
9362 IEM_MC_BEGIN(0, 1);
9363 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9364 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9365 IEM_MC_POP_U32(pu32Dst);
9366 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9367 IEM_MC_ADVANCE_RIP();
9368 IEM_MC_END();
9369 break;
9370
9371 case IEMMODE_64BIT:
9372 IEM_MC_BEGIN(0, 1);
9373 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9374 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9375 IEM_MC_POP_U64(pu64Dst);
9376 IEM_MC_ADVANCE_RIP();
9377 IEM_MC_END();
9378 break;
9379 }
9380
9381 return VINF_SUCCESS;
9382}
9383
9384
9385/** Opcode 0x58. */
9386FNIEMOP_DEF(iemOp_pop_eAX)
9387{
9388 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9389 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9390}
9391
9392
9393/** Opcode 0x59. */
9394FNIEMOP_DEF(iemOp_pop_eCX)
9395{
9396 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9397 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9398}
9399
9400
9401/** Opcode 0x5a. */
9402FNIEMOP_DEF(iemOp_pop_eDX)
9403{
9404 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9406}
9407
9408
9409/** Opcode 0x5b. */
9410FNIEMOP_DEF(iemOp_pop_eBX)
9411{
9412 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9413 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9414}
9415
9416
9417/** Opcode 0x5c. */
9418FNIEMOP_DEF(iemOp_pop_eSP)
9419{
9420 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9421 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9422 {
9423 if (pVCpu->iem.s.uRexB)
9424 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9425 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9426 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9427 }
9428
9429 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9430 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9431 /** @todo add testcase for this instruction. */
9432 switch (pVCpu->iem.s.enmEffOpSize)
9433 {
9434 case IEMMODE_16BIT:
9435 IEM_MC_BEGIN(0, 1);
9436 IEM_MC_LOCAL(uint16_t, u16Dst);
9437 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9438 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9439 IEM_MC_ADVANCE_RIP();
9440 IEM_MC_END();
9441 break;
9442
9443 case IEMMODE_32BIT:
9444 IEM_MC_BEGIN(0, 1);
9445 IEM_MC_LOCAL(uint32_t, u32Dst);
9446 IEM_MC_POP_U32(&u32Dst);
9447 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9448 IEM_MC_ADVANCE_RIP();
9449 IEM_MC_END();
9450 break;
9451
9452 case IEMMODE_64BIT:
9453 IEM_MC_BEGIN(0, 1);
9454 IEM_MC_LOCAL(uint64_t, u64Dst);
9455 IEM_MC_POP_U64(&u64Dst);
9456 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9457 IEM_MC_ADVANCE_RIP();
9458 IEM_MC_END();
9459 break;
9460 }
9461
9462 return VINF_SUCCESS;
9463}
9464
9465
9466/** Opcode 0x5d. */
9467FNIEMOP_DEF(iemOp_pop_eBP)
9468{
9469 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9470 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9471}
9472
9473
9474/** Opcode 0x5e. */
9475FNIEMOP_DEF(iemOp_pop_eSI)
9476{
9477 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9478 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9479}
9480
9481
9482/** Opcode 0x5f. */
9483FNIEMOP_DEF(iemOp_pop_eDI)
9484{
9485 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9486 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9487}
9488
9489
9490/** Opcode 0x60. */
9491FNIEMOP_DEF(iemOp_pusha)
9492{
9493 IEMOP_MNEMONIC(pusha, "pusha");
9494 IEMOP_HLP_MIN_186();
9495 IEMOP_HLP_NO_64BIT();
9496 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9497 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9498 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9499 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9500}
9501
9502
9503/** Opcode 0x61. */
9504FNIEMOP_DEF(iemOp_popa)
9505{
9506 IEMOP_MNEMONIC(popa, "popa");
9507 IEMOP_HLP_MIN_186();
9508 IEMOP_HLP_NO_64BIT();
9509 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9510 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9511 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9512 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9513}
9514
9515
9516/** Opcode 0x62. */
9517FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9518// IEMOP_HLP_MIN_186();
9519
9520
9521/** Opcode 0x63 - non-64-bit modes. */
9522FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9523{
9524 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9525 IEMOP_HLP_MIN_286();
9526 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9528
9529 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9530 {
9531 /* Register */
9532 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9533 IEM_MC_BEGIN(3, 0);
9534 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9535 IEM_MC_ARG(uint16_t, u16Src, 1);
9536 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9537
9538 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9539 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9540 IEM_MC_REF_EFLAGS(pEFlags);
9541 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9542
9543 IEM_MC_ADVANCE_RIP();
9544 IEM_MC_END();
9545 }
9546 else
9547 {
9548 /* Memory */
9549 IEM_MC_BEGIN(3, 2);
9550 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9551 IEM_MC_ARG(uint16_t, u16Src, 1);
9552 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9554
9555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9556 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9557 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9558 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9559 IEM_MC_FETCH_EFLAGS(EFlags);
9560 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9561
9562 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9563 IEM_MC_COMMIT_EFLAGS(EFlags);
9564 IEM_MC_ADVANCE_RIP();
9565 IEM_MC_END();
9566 }
9567 return VINF_SUCCESS;
9568
9569}
9570
9571
9572/** Opcode 0x63.
9573 * @note This is a weird one. It works like a regular move instruction if
9574 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9575 * @todo This definitely needs a testcase to verify the odd cases. */
9576FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9577{
9578 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9579
9580 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9582
9583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9584 {
9585 /*
9586 * Register to register.
9587 */
9588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9589 IEM_MC_BEGIN(0, 1);
9590 IEM_MC_LOCAL(uint64_t, u64Value);
9591 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9592 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9593 IEM_MC_ADVANCE_RIP();
9594 IEM_MC_END();
9595 }
9596 else
9597 {
9598 /*
9599 * We're loading a register from memory.
9600 */
9601 IEM_MC_BEGIN(0, 2);
9602 IEM_MC_LOCAL(uint64_t, u64Value);
9603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9606 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9607 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9608 IEM_MC_ADVANCE_RIP();
9609 IEM_MC_END();
9610 }
9611 return VINF_SUCCESS;
9612}
9613
9614
9615/** Opcode 0x64. */
9616FNIEMOP_DEF(iemOp_seg_FS)
9617{
9618 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9619 IEMOP_HLP_MIN_386();
9620
9621 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9622 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9623
9624 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9625 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9626}
9627
9628
9629/** Opcode 0x65. */
9630FNIEMOP_DEF(iemOp_seg_GS)
9631{
9632 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9633 IEMOP_HLP_MIN_386();
9634
9635 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9636 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9637
9638 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9639 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9640}
9641
9642
9643/** Opcode 0x66. */
9644FNIEMOP_DEF(iemOp_op_size)
9645{
9646 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9647 IEMOP_HLP_MIN_386();
9648
9649 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9650 iemRecalEffOpSize(pVCpu);
9651
9652 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9653 when REPZ or REPNZ are present. */
9654 if (pVCpu->iem.s.idxPrefix == 0)
9655 pVCpu->iem.s.idxPrefix = 1;
9656
9657 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9658 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9659}
9660
9661
9662/** Opcode 0x67. */
9663FNIEMOP_DEF(iemOp_addr_size)
9664{
9665 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9666 IEMOP_HLP_MIN_386();
9667
9668 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9669 switch (pVCpu->iem.s.enmDefAddrMode)
9670 {
9671 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9672 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9673 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9674 default: AssertFailed();
9675 }
9676
9677 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9678 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9679}
9680
9681
9682/** Opcode 0x68. */
9683FNIEMOP_DEF(iemOp_push_Iz)
9684{
9685 IEMOP_MNEMONIC(push_Iz, "push Iz");
9686 IEMOP_HLP_MIN_186();
9687 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9688 switch (pVCpu->iem.s.enmEffOpSize)
9689 {
9690 case IEMMODE_16BIT:
9691 {
9692 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9694 IEM_MC_BEGIN(0,0);
9695 IEM_MC_PUSH_U16(u16Imm);
9696 IEM_MC_ADVANCE_RIP();
9697 IEM_MC_END();
9698 return VINF_SUCCESS;
9699 }
9700
9701 case IEMMODE_32BIT:
9702 {
9703 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9705 IEM_MC_BEGIN(0,0);
9706 IEM_MC_PUSH_U32(u32Imm);
9707 IEM_MC_ADVANCE_RIP();
9708 IEM_MC_END();
9709 return VINF_SUCCESS;
9710 }
9711
9712 case IEMMODE_64BIT:
9713 {
9714 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9716 IEM_MC_BEGIN(0,0);
9717 IEM_MC_PUSH_U64(u64Imm);
9718 IEM_MC_ADVANCE_RIP();
9719 IEM_MC_END();
9720 return VINF_SUCCESS;
9721 }
9722
9723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9724 }
9725}
9726
9727
9728/** Opcode 0x69. */
9729FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9730{
9731 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9732 IEMOP_HLP_MIN_186();
9733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9734 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9735
9736 switch (pVCpu->iem.s.enmEffOpSize)
9737 {
9738 case IEMMODE_16BIT:
9739 {
9740 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9741 {
9742 /* register operand */
9743 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9745
9746 IEM_MC_BEGIN(3, 1);
9747 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9748 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9750 IEM_MC_LOCAL(uint16_t, u16Tmp);
9751
9752 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9753 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9754 IEM_MC_REF_EFLAGS(pEFlags);
9755 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9756 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9757
9758 IEM_MC_ADVANCE_RIP();
9759 IEM_MC_END();
9760 }
9761 else
9762 {
9763 /* memory operand */
9764 IEM_MC_BEGIN(3, 2);
9765 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9766 IEM_MC_ARG(uint16_t, u16Src, 1);
9767 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9768 IEM_MC_LOCAL(uint16_t, u16Tmp);
9769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9770
9771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9772 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9773 IEM_MC_ASSIGN(u16Src, u16Imm);
9774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9775 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9776 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9777 IEM_MC_REF_EFLAGS(pEFlags);
9778 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9779 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9780
9781 IEM_MC_ADVANCE_RIP();
9782 IEM_MC_END();
9783 }
9784 return VINF_SUCCESS;
9785 }
9786
9787 case IEMMODE_32BIT:
9788 {
9789 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9790 {
9791 /* register operand */
9792 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9794
9795 IEM_MC_BEGIN(3, 1);
9796 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9797 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9798 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9799 IEM_MC_LOCAL(uint32_t, u32Tmp);
9800
9801 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9802 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9803 IEM_MC_REF_EFLAGS(pEFlags);
9804 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9805 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9806
9807 IEM_MC_ADVANCE_RIP();
9808 IEM_MC_END();
9809 }
9810 else
9811 {
9812 /* memory operand */
9813 IEM_MC_BEGIN(3, 2);
9814 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9815 IEM_MC_ARG(uint32_t, u32Src, 1);
9816 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9817 IEM_MC_LOCAL(uint32_t, u32Tmp);
9818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9819
9820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9821 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9822 IEM_MC_ASSIGN(u32Src, u32Imm);
9823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9824 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9825 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9826 IEM_MC_REF_EFLAGS(pEFlags);
9827 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9828 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9829
9830 IEM_MC_ADVANCE_RIP();
9831 IEM_MC_END();
9832 }
9833 return VINF_SUCCESS;
9834 }
9835
9836 case IEMMODE_64BIT:
9837 {
9838 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9839 {
9840 /* register operand */
9841 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9843
9844 IEM_MC_BEGIN(3, 1);
9845 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9846 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9847 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9848 IEM_MC_LOCAL(uint64_t, u64Tmp);
9849
9850 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9851 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9852 IEM_MC_REF_EFLAGS(pEFlags);
9853 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9854 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9855
9856 IEM_MC_ADVANCE_RIP();
9857 IEM_MC_END();
9858 }
9859 else
9860 {
9861 /* memory operand */
9862 IEM_MC_BEGIN(3, 2);
9863 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9864 IEM_MC_ARG(uint64_t, u64Src, 1);
9865 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9866 IEM_MC_LOCAL(uint64_t, u64Tmp);
9867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9868
9869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9870 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9871 IEM_MC_ASSIGN(u64Src, u64Imm);
9872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9873 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9874 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9875 IEM_MC_REF_EFLAGS(pEFlags);
9876 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9877 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9878
9879 IEM_MC_ADVANCE_RIP();
9880 IEM_MC_END();
9881 }
9882 return VINF_SUCCESS;
9883 }
9884 }
9885 AssertFailedReturn(VERR_IEM_IPE_9);
9886}
9887
9888
9889/** Opcode 0x6a. */
9890FNIEMOP_DEF(iemOp_push_Ib)
9891{
9892 IEMOP_MNEMONIC(push_Ib, "push Ib");
9893 IEMOP_HLP_MIN_186();
9894 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9896 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9897
9898 IEM_MC_BEGIN(0,0);
9899 switch (pVCpu->iem.s.enmEffOpSize)
9900 {
9901 case IEMMODE_16BIT:
9902 IEM_MC_PUSH_U16(i8Imm);
9903 break;
9904 case IEMMODE_32BIT:
9905 IEM_MC_PUSH_U32(i8Imm);
9906 break;
9907 case IEMMODE_64BIT:
9908 IEM_MC_PUSH_U64(i8Imm);
9909 break;
9910 }
9911 IEM_MC_ADVANCE_RIP();
9912 IEM_MC_END();
9913 return VINF_SUCCESS;
9914}
9915
9916
9917/** Opcode 0x6b. */
9918FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9919{
9920 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9921 IEMOP_HLP_MIN_186();
9922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9923 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9924
9925 switch (pVCpu->iem.s.enmEffOpSize)
9926 {
9927 case IEMMODE_16BIT:
9928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9929 {
9930 /* register operand */
9931 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9933
9934 IEM_MC_BEGIN(3, 1);
9935 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9936 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9937 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9938 IEM_MC_LOCAL(uint16_t, u16Tmp);
9939
9940 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9941 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9942 IEM_MC_REF_EFLAGS(pEFlags);
9943 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9944 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9945
9946 IEM_MC_ADVANCE_RIP();
9947 IEM_MC_END();
9948 }
9949 else
9950 {
9951 /* memory operand */
9952 IEM_MC_BEGIN(3, 2);
9953 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9954 IEM_MC_ARG(uint16_t, u16Src, 1);
9955 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9956 IEM_MC_LOCAL(uint16_t, u16Tmp);
9957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9958
9959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9960 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9961 IEM_MC_ASSIGN(u16Src, u16Imm);
9962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9963 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9964 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9965 IEM_MC_REF_EFLAGS(pEFlags);
9966 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9967 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9968
9969 IEM_MC_ADVANCE_RIP();
9970 IEM_MC_END();
9971 }
9972 return VINF_SUCCESS;
9973
9974 case IEMMODE_32BIT:
9975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9976 {
9977 /* register operand */
9978 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9980
9981 IEM_MC_BEGIN(3, 1);
9982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9983 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9984 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9985 IEM_MC_LOCAL(uint32_t, u32Tmp);
9986
9987 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9988 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9989 IEM_MC_REF_EFLAGS(pEFlags);
9990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9991 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9992
9993 IEM_MC_ADVANCE_RIP();
9994 IEM_MC_END();
9995 }
9996 else
9997 {
9998 /* memory operand */
9999 IEM_MC_BEGIN(3, 2);
10000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10001 IEM_MC_ARG(uint32_t, u32Src, 1);
10002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10003 IEM_MC_LOCAL(uint32_t, u32Tmp);
10004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10005
10006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10007 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10008 IEM_MC_ASSIGN(u32Src, u32Imm);
10009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10010 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10011 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10012 IEM_MC_REF_EFLAGS(pEFlags);
10013 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10014 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10015
10016 IEM_MC_ADVANCE_RIP();
10017 IEM_MC_END();
10018 }
10019 return VINF_SUCCESS;
10020
10021 case IEMMODE_64BIT:
10022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10023 {
10024 /* register operand */
10025 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10027
10028 IEM_MC_BEGIN(3, 1);
10029 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10030 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10031 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10032 IEM_MC_LOCAL(uint64_t, u64Tmp);
10033
10034 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10035 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10036 IEM_MC_REF_EFLAGS(pEFlags);
10037 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10038 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10039
10040 IEM_MC_ADVANCE_RIP();
10041 IEM_MC_END();
10042 }
10043 else
10044 {
10045 /* memory operand */
10046 IEM_MC_BEGIN(3, 2);
10047 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10048 IEM_MC_ARG(uint64_t, u64Src, 1);
10049 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10050 IEM_MC_LOCAL(uint64_t, u64Tmp);
10051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10052
10053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10054 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10055 IEM_MC_ASSIGN(u64Src, u64Imm);
10056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10057 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10058 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10059 IEM_MC_REF_EFLAGS(pEFlags);
10060 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10061 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10062
10063 IEM_MC_ADVANCE_RIP();
10064 IEM_MC_END();
10065 }
10066 return VINF_SUCCESS;
10067 }
10068 AssertFailedReturn(VERR_IEM_IPE_8);
10069}
10070
10071
10072/** Opcode 0x6c. */
10073FNIEMOP_DEF(iemOp_insb_Yb_DX)
10074{
10075 IEMOP_HLP_MIN_186();
10076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10077 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10078 {
10079 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10080 switch (pVCpu->iem.s.enmEffAddrMode)
10081 {
10082 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10083 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10084 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10086 }
10087 }
10088 else
10089 {
10090 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10091 switch (pVCpu->iem.s.enmEffAddrMode)
10092 {
10093 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10094 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10095 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10097 }
10098 }
10099}
10100
10101
10102/** Opcode 0x6d. */
10103FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10104{
10105 IEMOP_HLP_MIN_186();
10106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10107 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10108 {
10109 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10110 switch (pVCpu->iem.s.enmEffOpSize)
10111 {
10112 case IEMMODE_16BIT:
10113 switch (pVCpu->iem.s.enmEffAddrMode)
10114 {
10115 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10116 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10117 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10119 }
10120 break;
10121 case IEMMODE_64BIT:
10122 case IEMMODE_32BIT:
10123 switch (pVCpu->iem.s.enmEffAddrMode)
10124 {
10125 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10126 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10127 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10129 }
10130 break;
10131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10132 }
10133 }
10134 else
10135 {
10136 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10137 switch (pVCpu->iem.s.enmEffOpSize)
10138 {
10139 case IEMMODE_16BIT:
10140 switch (pVCpu->iem.s.enmEffAddrMode)
10141 {
10142 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10143 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10144 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10146 }
10147 break;
10148 case IEMMODE_64BIT:
10149 case IEMMODE_32BIT:
10150 switch (pVCpu->iem.s.enmEffAddrMode)
10151 {
10152 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10153 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10154 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10156 }
10157 break;
10158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10159 }
10160 }
10161}
10162
10163
10164/** Opcode 0x6e. */
10165FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10166{
10167 IEMOP_HLP_MIN_186();
10168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10169 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10170 {
10171 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10172 switch (pVCpu->iem.s.enmEffAddrMode)
10173 {
10174 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10175 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10176 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10178 }
10179 }
10180 else
10181 {
10182 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10183 switch (pVCpu->iem.s.enmEffAddrMode)
10184 {
10185 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10186 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10187 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10189 }
10190 }
10191}
10192
10193
10194/** Opcode 0x6f. */
10195FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10196{
10197 IEMOP_HLP_MIN_186();
10198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10199 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10200 {
10201 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10202 switch (pVCpu->iem.s.enmEffOpSize)
10203 {
10204 case IEMMODE_16BIT:
10205 switch (pVCpu->iem.s.enmEffAddrMode)
10206 {
10207 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10208 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10209 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10211 }
10212 break;
10213 case IEMMODE_64BIT:
10214 case IEMMODE_32BIT:
10215 switch (pVCpu->iem.s.enmEffAddrMode)
10216 {
10217 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10218 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10219 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10221 }
10222 break;
10223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10224 }
10225 }
10226 else
10227 {
10228 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10229 switch (pVCpu->iem.s.enmEffOpSize)
10230 {
10231 case IEMMODE_16BIT:
10232 switch (pVCpu->iem.s.enmEffAddrMode)
10233 {
10234 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10235 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10236 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10238 }
10239 break;
10240 case IEMMODE_64BIT:
10241 case IEMMODE_32BIT:
10242 switch (pVCpu->iem.s.enmEffAddrMode)
10243 {
10244 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10245 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10246 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10248 }
10249 break;
10250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10251 }
10252 }
10253}
10254
10255
10256/** Opcode 0x70. */
10257FNIEMOP_DEF(iemOp_jo_Jb)
10258{
10259 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10260 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10263
10264 IEM_MC_BEGIN(0, 0);
10265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10266 IEM_MC_REL_JMP_S8(i8Imm);
10267 } IEM_MC_ELSE() {
10268 IEM_MC_ADVANCE_RIP();
10269 } IEM_MC_ENDIF();
10270 IEM_MC_END();
10271 return VINF_SUCCESS;
10272}
10273
10274
10275/** Opcode 0x71. */
10276FNIEMOP_DEF(iemOp_jno_Jb)
10277{
10278 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10279 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10281 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10282
10283 IEM_MC_BEGIN(0, 0);
10284 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10285 IEM_MC_ADVANCE_RIP();
10286 } IEM_MC_ELSE() {
10287 IEM_MC_REL_JMP_S8(i8Imm);
10288 } IEM_MC_ENDIF();
10289 IEM_MC_END();
10290 return VINF_SUCCESS;
10291}
10292
10293/** Opcode 0x72. */
10294FNIEMOP_DEF(iemOp_jc_Jb)
10295{
10296 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10297 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10300
10301 IEM_MC_BEGIN(0, 0);
10302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10303 IEM_MC_REL_JMP_S8(i8Imm);
10304 } IEM_MC_ELSE() {
10305 IEM_MC_ADVANCE_RIP();
10306 } IEM_MC_ENDIF();
10307 IEM_MC_END();
10308 return VINF_SUCCESS;
10309}
10310
10311
10312/** Opcode 0x73. */
10313FNIEMOP_DEF(iemOp_jnc_Jb)
10314{
10315 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10316 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10319
10320 IEM_MC_BEGIN(0, 0);
10321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10322 IEM_MC_ADVANCE_RIP();
10323 } IEM_MC_ELSE() {
10324 IEM_MC_REL_JMP_S8(i8Imm);
10325 } IEM_MC_ENDIF();
10326 IEM_MC_END();
10327 return VINF_SUCCESS;
10328}
10329
10330
10331/** Opcode 0x74. */
10332FNIEMOP_DEF(iemOp_je_Jb)
10333{
10334 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10335 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10338
10339 IEM_MC_BEGIN(0, 0);
10340 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10341 IEM_MC_REL_JMP_S8(i8Imm);
10342 } IEM_MC_ELSE() {
10343 IEM_MC_ADVANCE_RIP();
10344 } IEM_MC_ENDIF();
10345 IEM_MC_END();
10346 return VINF_SUCCESS;
10347}
10348
10349
10350/** Opcode 0x75. */
10351FNIEMOP_DEF(iemOp_jne_Jb)
10352{
10353 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10354 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10356 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10357
10358 IEM_MC_BEGIN(0, 0);
10359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10360 IEM_MC_ADVANCE_RIP();
10361 } IEM_MC_ELSE() {
10362 IEM_MC_REL_JMP_S8(i8Imm);
10363 } IEM_MC_ENDIF();
10364 IEM_MC_END();
10365 return VINF_SUCCESS;
10366}
10367
10368
10369/** Opcode 0x76. */
10370FNIEMOP_DEF(iemOp_jbe_Jb)
10371{
10372 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10373 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10375 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10376
10377 IEM_MC_BEGIN(0, 0);
10378 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10379 IEM_MC_REL_JMP_S8(i8Imm);
10380 } IEM_MC_ELSE() {
10381 IEM_MC_ADVANCE_RIP();
10382 } IEM_MC_ENDIF();
10383 IEM_MC_END();
10384 return VINF_SUCCESS;
10385}
10386
10387
10388/** Opcode 0x77. */
10389FNIEMOP_DEF(iemOp_jnbe_Jb)
10390{
10391 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10392 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10394 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10395
10396 IEM_MC_BEGIN(0, 0);
10397 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10398 IEM_MC_ADVANCE_RIP();
10399 } IEM_MC_ELSE() {
10400 IEM_MC_REL_JMP_S8(i8Imm);
10401 } IEM_MC_ENDIF();
10402 IEM_MC_END();
10403 return VINF_SUCCESS;
10404}
10405
10406
10407/** Opcode 0x78. */
10408FNIEMOP_DEF(iemOp_js_Jb)
10409{
10410 IEMOP_MNEMONIC(js_Jb, "js Jb");
10411 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10413 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10414
10415 IEM_MC_BEGIN(0, 0);
10416 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10417 IEM_MC_REL_JMP_S8(i8Imm);
10418 } IEM_MC_ELSE() {
10419 IEM_MC_ADVANCE_RIP();
10420 } IEM_MC_ENDIF();
10421 IEM_MC_END();
10422 return VINF_SUCCESS;
10423}
10424
10425
10426/** Opcode 0x79. */
10427FNIEMOP_DEF(iemOp_jns_Jb)
10428{
10429 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10430 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10432 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10433
10434 IEM_MC_BEGIN(0, 0);
10435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10436 IEM_MC_ADVANCE_RIP();
10437 } IEM_MC_ELSE() {
10438 IEM_MC_REL_JMP_S8(i8Imm);
10439 } IEM_MC_ENDIF();
10440 IEM_MC_END();
10441 return VINF_SUCCESS;
10442}
10443
10444
10445/** Opcode 0x7a. */
10446FNIEMOP_DEF(iemOp_jp_Jb)
10447{
10448 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10449 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10451 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10452
10453 IEM_MC_BEGIN(0, 0);
10454 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10455 IEM_MC_REL_JMP_S8(i8Imm);
10456 } IEM_MC_ELSE() {
10457 IEM_MC_ADVANCE_RIP();
10458 } IEM_MC_ENDIF();
10459 IEM_MC_END();
10460 return VINF_SUCCESS;
10461}
10462
10463
10464/** Opcode 0x7b. */
10465FNIEMOP_DEF(iemOp_jnp_Jb)
10466{
10467 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10468 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10471
10472 IEM_MC_BEGIN(0, 0);
10473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10474 IEM_MC_ADVANCE_RIP();
10475 } IEM_MC_ELSE() {
10476 IEM_MC_REL_JMP_S8(i8Imm);
10477 } IEM_MC_ENDIF();
10478 IEM_MC_END();
10479 return VINF_SUCCESS;
10480}
10481
10482
10483/** Opcode 0x7c. */
10484FNIEMOP_DEF(iemOp_jl_Jb)
10485{
10486 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10487 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10489 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10490
10491 IEM_MC_BEGIN(0, 0);
10492 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10493 IEM_MC_REL_JMP_S8(i8Imm);
10494 } IEM_MC_ELSE() {
10495 IEM_MC_ADVANCE_RIP();
10496 } IEM_MC_ENDIF();
10497 IEM_MC_END();
10498 return VINF_SUCCESS;
10499}
10500
10501
10502/** Opcode 0x7d. */
10503FNIEMOP_DEF(iemOp_jnl_Jb)
10504{
10505 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10506 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10509
10510 IEM_MC_BEGIN(0, 0);
10511 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10512 IEM_MC_ADVANCE_RIP();
10513 } IEM_MC_ELSE() {
10514 IEM_MC_REL_JMP_S8(i8Imm);
10515 } IEM_MC_ENDIF();
10516 IEM_MC_END();
10517 return VINF_SUCCESS;
10518}
10519
10520
10521/** Opcode 0x7e. */
10522FNIEMOP_DEF(iemOp_jle_Jb)
10523{
10524 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10525 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10527 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10528
10529 IEM_MC_BEGIN(0, 0);
10530 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10531 IEM_MC_REL_JMP_S8(i8Imm);
10532 } IEM_MC_ELSE() {
10533 IEM_MC_ADVANCE_RIP();
10534 } IEM_MC_ENDIF();
10535 IEM_MC_END();
10536 return VINF_SUCCESS;
10537}
10538
10539
10540/** Opcode 0x7f. */
10541FNIEMOP_DEF(iemOp_jnle_Jb)
10542{
10543 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10544 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10546 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10547
10548 IEM_MC_BEGIN(0, 0);
10549 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10550 IEM_MC_ADVANCE_RIP();
10551 } IEM_MC_ELSE() {
10552 IEM_MC_REL_JMP_S8(i8Imm);
10553 } IEM_MC_ENDIF();
10554 IEM_MC_END();
10555 return VINF_SUCCESS;
10556}
10557
10558
10559/** Opcode 0x80. */
10560FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10561{
10562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10563 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10564 {
10565 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10566 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10567 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10568 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10569 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10570 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10571 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10572 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10573 }
10574 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10575
10576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10577 {
10578 /* register target */
10579 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10581 IEM_MC_BEGIN(3, 0);
10582 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10583 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10584 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10585
10586 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10587 IEM_MC_REF_EFLAGS(pEFlags);
10588 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10589
10590 IEM_MC_ADVANCE_RIP();
10591 IEM_MC_END();
10592 }
10593 else
10594 {
10595 /* memory target */
10596 uint32_t fAccess;
10597 if (pImpl->pfnLockedU8)
10598 fAccess = IEM_ACCESS_DATA_RW;
10599 else /* CMP */
10600 fAccess = IEM_ACCESS_DATA_R;
10601 IEM_MC_BEGIN(3, 2);
10602 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10603 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10605
10606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10607 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10608 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10609 if (pImpl->pfnLockedU8)
10610 IEMOP_HLP_DONE_DECODING();
10611 else
10612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10613
10614 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10615 IEM_MC_FETCH_EFLAGS(EFlags);
10616 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10618 else
10619 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10620
10621 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10622 IEM_MC_COMMIT_EFLAGS(EFlags);
10623 IEM_MC_ADVANCE_RIP();
10624 IEM_MC_END();
10625 }
10626 return VINF_SUCCESS;
10627}
10628
10629
10630/** Opcode 0x81. */
10631FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10632{
10633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10634 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10635 {
10636 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10637 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10638 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10639 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10640 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10641 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10642 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10643 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10644 }
10645 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10646
10647 switch (pVCpu->iem.s.enmEffOpSize)
10648 {
10649 case IEMMODE_16BIT:
10650 {
10651 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10652 {
10653 /* register target */
10654 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10656 IEM_MC_BEGIN(3, 0);
10657 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10658 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10660
10661 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10662 IEM_MC_REF_EFLAGS(pEFlags);
10663 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10664
10665 IEM_MC_ADVANCE_RIP();
10666 IEM_MC_END();
10667 }
10668 else
10669 {
10670 /* memory target */
10671 uint32_t fAccess;
10672 if (pImpl->pfnLockedU16)
10673 fAccess = IEM_ACCESS_DATA_RW;
10674 else /* CMP, TEST */
10675 fAccess = IEM_ACCESS_DATA_R;
10676 IEM_MC_BEGIN(3, 2);
10677 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10678 IEM_MC_ARG(uint16_t, u16Src, 1);
10679 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10681
10682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10683 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10684 IEM_MC_ASSIGN(u16Src, u16Imm);
10685 if (pImpl->pfnLockedU16)
10686 IEMOP_HLP_DONE_DECODING();
10687 else
10688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10689 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10690 IEM_MC_FETCH_EFLAGS(EFlags);
10691 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10692 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10693 else
10694 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10695
10696 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10697 IEM_MC_COMMIT_EFLAGS(EFlags);
10698 IEM_MC_ADVANCE_RIP();
10699 IEM_MC_END();
10700 }
10701 break;
10702 }
10703
10704 case IEMMODE_32BIT:
10705 {
10706 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10707 {
10708 /* register target */
10709 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10711 IEM_MC_BEGIN(3, 0);
10712 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10713 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10714 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10715
10716 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10717 IEM_MC_REF_EFLAGS(pEFlags);
10718 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10719 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10720
10721 IEM_MC_ADVANCE_RIP();
10722 IEM_MC_END();
10723 }
10724 else
10725 {
10726 /* memory target */
10727 uint32_t fAccess;
10728 if (pImpl->pfnLockedU32)
10729 fAccess = IEM_ACCESS_DATA_RW;
10730 else /* CMP, TEST */
10731 fAccess = IEM_ACCESS_DATA_R;
10732 IEM_MC_BEGIN(3, 2);
10733 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10734 IEM_MC_ARG(uint32_t, u32Src, 1);
10735 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10737
10738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10739 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10740 IEM_MC_ASSIGN(u32Src, u32Imm);
10741 if (pImpl->pfnLockedU32)
10742 IEMOP_HLP_DONE_DECODING();
10743 else
10744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10745 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10746 IEM_MC_FETCH_EFLAGS(EFlags);
10747 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10748 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10749 else
10750 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10751
10752 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10753 IEM_MC_COMMIT_EFLAGS(EFlags);
10754 IEM_MC_ADVANCE_RIP();
10755 IEM_MC_END();
10756 }
10757 break;
10758 }
10759
10760 case IEMMODE_64BIT:
10761 {
10762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10763 {
10764 /* register target */
10765 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10767 IEM_MC_BEGIN(3, 0);
10768 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10769 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10770 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10771
10772 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10773 IEM_MC_REF_EFLAGS(pEFlags);
10774 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10775
10776 IEM_MC_ADVANCE_RIP();
10777 IEM_MC_END();
10778 }
10779 else
10780 {
10781 /* memory target */
10782 uint32_t fAccess;
10783 if (pImpl->pfnLockedU64)
10784 fAccess = IEM_ACCESS_DATA_RW;
10785 else /* CMP */
10786 fAccess = IEM_ACCESS_DATA_R;
10787 IEM_MC_BEGIN(3, 2);
10788 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10789 IEM_MC_ARG(uint64_t, u64Src, 1);
10790 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10792
10793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10794 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10795 if (pImpl->pfnLockedU64)
10796 IEMOP_HLP_DONE_DECODING();
10797 else
10798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10799 IEM_MC_ASSIGN(u64Src, u64Imm);
10800 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10801 IEM_MC_FETCH_EFLAGS(EFlags);
10802 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10803 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10804 else
10805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10806
10807 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10808 IEM_MC_COMMIT_EFLAGS(EFlags);
10809 IEM_MC_ADVANCE_RIP();
10810 IEM_MC_END();
10811 }
10812 break;
10813 }
10814 }
10815 return VINF_SUCCESS;
10816}
10817
10818
10819/** Opcode 0x82. */
10820FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10821{
10822 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10823 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10824}
10825
10826
10827/** Opcode 0x83. */
10828FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10829{
10830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10831 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10832 {
10833 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10834 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10835 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10836 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10837 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10838 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10839 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10840 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10841 }
10842 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10843 to the 386 even if absent in the intel reference manuals and some
10844 3rd party opcode listings. */
10845 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10846
10847 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10848 {
10849 /*
10850 * Register target
10851 */
10852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10853 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10854 switch (pVCpu->iem.s.enmEffOpSize)
10855 {
10856 case IEMMODE_16BIT:
10857 {
10858 IEM_MC_BEGIN(3, 0);
10859 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10860 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10861 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10862
10863 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10864 IEM_MC_REF_EFLAGS(pEFlags);
10865 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10866
10867 IEM_MC_ADVANCE_RIP();
10868 IEM_MC_END();
10869 break;
10870 }
10871
10872 case IEMMODE_32BIT:
10873 {
10874 IEM_MC_BEGIN(3, 0);
10875 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10876 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10878
10879 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10880 IEM_MC_REF_EFLAGS(pEFlags);
10881 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10882 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10883
10884 IEM_MC_ADVANCE_RIP();
10885 IEM_MC_END();
10886 break;
10887 }
10888
10889 case IEMMODE_64BIT:
10890 {
10891 IEM_MC_BEGIN(3, 0);
10892 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10893 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10894 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10895
10896 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10897 IEM_MC_REF_EFLAGS(pEFlags);
10898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10899
10900 IEM_MC_ADVANCE_RIP();
10901 IEM_MC_END();
10902 break;
10903 }
10904 }
10905 }
10906 else
10907 {
10908 /*
10909 * Memory target.
10910 */
10911 uint32_t fAccess;
10912 if (pImpl->pfnLockedU16)
10913 fAccess = IEM_ACCESS_DATA_RW;
10914 else /* CMP */
10915 fAccess = IEM_ACCESS_DATA_R;
10916
10917 switch (pVCpu->iem.s.enmEffOpSize)
10918 {
10919 case IEMMODE_16BIT:
10920 {
10921 IEM_MC_BEGIN(3, 2);
10922 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10923 IEM_MC_ARG(uint16_t, u16Src, 1);
10924 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10926
10927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10928 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10929 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10930 if (pImpl->pfnLockedU16)
10931 IEMOP_HLP_DONE_DECODING();
10932 else
10933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10934 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10935 IEM_MC_FETCH_EFLAGS(EFlags);
10936 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10938 else
10939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10940
10941 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10942 IEM_MC_COMMIT_EFLAGS(EFlags);
10943 IEM_MC_ADVANCE_RIP();
10944 IEM_MC_END();
10945 break;
10946 }
10947
10948 case IEMMODE_32BIT:
10949 {
10950 IEM_MC_BEGIN(3, 2);
10951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10952 IEM_MC_ARG(uint32_t, u32Src, 1);
10953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10955
10956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10957 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10958 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10959 if (pImpl->pfnLockedU32)
10960 IEMOP_HLP_DONE_DECODING();
10961 else
10962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10963 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10964 IEM_MC_FETCH_EFLAGS(EFlags);
10965 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10967 else
10968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10969
10970 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10971 IEM_MC_COMMIT_EFLAGS(EFlags);
10972 IEM_MC_ADVANCE_RIP();
10973 IEM_MC_END();
10974 break;
10975 }
10976
10977 case IEMMODE_64BIT:
10978 {
10979 IEM_MC_BEGIN(3, 2);
10980 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10981 IEM_MC_ARG(uint64_t, u64Src, 1);
10982 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10984
10985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10986 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10987 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10988 if (pImpl->pfnLockedU64)
10989 IEMOP_HLP_DONE_DECODING();
10990 else
10991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10992 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10993 IEM_MC_FETCH_EFLAGS(EFlags);
10994 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10995 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10996 else
10997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10998
10999 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11000 IEM_MC_COMMIT_EFLAGS(EFlags);
11001 IEM_MC_ADVANCE_RIP();
11002 IEM_MC_END();
11003 break;
11004 }
11005 }
11006 }
11007 return VINF_SUCCESS;
11008}
11009
11010
11011/** Opcode 0x84. */
11012FNIEMOP_DEF(iemOp_test_Eb_Gb)
11013{
11014 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11015 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11016 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11017}
11018
11019
11020/** Opcode 0x85. */
11021FNIEMOP_DEF(iemOp_test_Ev_Gv)
11022{
11023 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11024 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11025 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11026}
11027
11028
11029/** Opcode 0x86. */
11030FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11031{
11032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11033 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11034
11035 /*
11036 * If rm is denoting a register, no more instruction bytes.
11037 */
11038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11039 {
11040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11041
11042 IEM_MC_BEGIN(0, 2);
11043 IEM_MC_LOCAL(uint8_t, uTmp1);
11044 IEM_MC_LOCAL(uint8_t, uTmp2);
11045
11046 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11047 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11048 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11049 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11050
11051 IEM_MC_ADVANCE_RIP();
11052 IEM_MC_END();
11053 }
11054 else
11055 {
11056 /*
11057 * We're accessing memory.
11058 */
11059/** @todo the register must be committed separately! */
11060 IEM_MC_BEGIN(2, 2);
11061 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11062 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11064
11065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11066 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11067 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11068 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11069 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11070
11071 IEM_MC_ADVANCE_RIP();
11072 IEM_MC_END();
11073 }
11074 return VINF_SUCCESS;
11075}
11076
11077
11078/** Opcode 0x87. */
11079FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11080{
11081 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11083
11084 /*
11085 * If rm is denoting a register, no more instruction bytes.
11086 */
11087 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11088 {
11089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11090
11091 switch (pVCpu->iem.s.enmEffOpSize)
11092 {
11093 case IEMMODE_16BIT:
11094 IEM_MC_BEGIN(0, 2);
11095 IEM_MC_LOCAL(uint16_t, uTmp1);
11096 IEM_MC_LOCAL(uint16_t, uTmp2);
11097
11098 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11099 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11100 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11101 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11102
11103 IEM_MC_ADVANCE_RIP();
11104 IEM_MC_END();
11105 return VINF_SUCCESS;
11106
11107 case IEMMODE_32BIT:
11108 IEM_MC_BEGIN(0, 2);
11109 IEM_MC_LOCAL(uint32_t, uTmp1);
11110 IEM_MC_LOCAL(uint32_t, uTmp2);
11111
11112 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11113 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11114 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11115 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11116
11117 IEM_MC_ADVANCE_RIP();
11118 IEM_MC_END();
11119 return VINF_SUCCESS;
11120
11121 case IEMMODE_64BIT:
11122 IEM_MC_BEGIN(0, 2);
11123 IEM_MC_LOCAL(uint64_t, uTmp1);
11124 IEM_MC_LOCAL(uint64_t, uTmp2);
11125
11126 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11127 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11128 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11129 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11130
11131 IEM_MC_ADVANCE_RIP();
11132 IEM_MC_END();
11133 return VINF_SUCCESS;
11134
11135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11136 }
11137 }
11138 else
11139 {
11140 /*
11141 * We're accessing memory.
11142 */
11143 switch (pVCpu->iem.s.enmEffOpSize)
11144 {
11145/** @todo the register must be committed separately! */
11146 case IEMMODE_16BIT:
11147 IEM_MC_BEGIN(2, 2);
11148 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11149 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11151
11152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11153 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11154 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11155 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11156 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11157
11158 IEM_MC_ADVANCE_RIP();
11159 IEM_MC_END();
11160 return VINF_SUCCESS;
11161
11162 case IEMMODE_32BIT:
11163 IEM_MC_BEGIN(2, 2);
11164 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11165 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11167
11168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11169 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11170 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11171 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11172 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11173
11174 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11175 IEM_MC_ADVANCE_RIP();
11176 IEM_MC_END();
11177 return VINF_SUCCESS;
11178
11179 case IEMMODE_64BIT:
11180 IEM_MC_BEGIN(2, 2);
11181 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11182 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11184
11185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11186 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11187 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11188 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11190
11191 IEM_MC_ADVANCE_RIP();
11192 IEM_MC_END();
11193 return VINF_SUCCESS;
11194
11195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11196 }
11197 }
11198}
11199
11200
11201/** Opcode 0x88. */
11202FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11203{
11204 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11205
11206 uint8_t bRm;
11207 IEM_OPCODE_GET_NEXT_U8(&bRm);
11208
11209 /*
11210 * If rm is denoting a register, no more instruction bytes.
11211 */
11212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11213 {
11214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11215 IEM_MC_BEGIN(0, 1);
11216 IEM_MC_LOCAL(uint8_t, u8Value);
11217 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11218 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11219 IEM_MC_ADVANCE_RIP();
11220 IEM_MC_END();
11221 }
11222 else
11223 {
11224 /*
11225 * We're writing a register to memory.
11226 */
11227 IEM_MC_BEGIN(0, 2);
11228 IEM_MC_LOCAL(uint8_t, u8Value);
11229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11232 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11233 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11234 IEM_MC_ADVANCE_RIP();
11235 IEM_MC_END();
11236 }
11237 return VINF_SUCCESS;
11238
11239}
11240
11241
11242/** Opcode 0x89. */
11243FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11244{
11245 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11246
11247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11248
11249 /*
11250 * If rm is denoting a register, no more instruction bytes.
11251 */
11252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11253 {
11254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11255 switch (pVCpu->iem.s.enmEffOpSize)
11256 {
11257 case IEMMODE_16BIT:
11258 IEM_MC_BEGIN(0, 1);
11259 IEM_MC_LOCAL(uint16_t, u16Value);
11260 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11261 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11262 IEM_MC_ADVANCE_RIP();
11263 IEM_MC_END();
11264 break;
11265
11266 case IEMMODE_32BIT:
11267 IEM_MC_BEGIN(0, 1);
11268 IEM_MC_LOCAL(uint32_t, u32Value);
11269 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11270 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11271 IEM_MC_ADVANCE_RIP();
11272 IEM_MC_END();
11273 break;
11274
11275 case IEMMODE_64BIT:
11276 IEM_MC_BEGIN(0, 1);
11277 IEM_MC_LOCAL(uint64_t, u64Value);
11278 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11279 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11280 IEM_MC_ADVANCE_RIP();
11281 IEM_MC_END();
11282 break;
11283 }
11284 }
11285 else
11286 {
11287 /*
11288 * We're writing a register to memory.
11289 */
11290 switch (pVCpu->iem.s.enmEffOpSize)
11291 {
11292 case IEMMODE_16BIT:
11293 IEM_MC_BEGIN(0, 2);
11294 IEM_MC_LOCAL(uint16_t, u16Value);
11295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11298 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11299 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11300 IEM_MC_ADVANCE_RIP();
11301 IEM_MC_END();
11302 break;
11303
11304 case IEMMODE_32BIT:
11305 IEM_MC_BEGIN(0, 2);
11306 IEM_MC_LOCAL(uint32_t, u32Value);
11307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11310 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11311 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11312 IEM_MC_ADVANCE_RIP();
11313 IEM_MC_END();
11314 break;
11315
11316 case IEMMODE_64BIT:
11317 IEM_MC_BEGIN(0, 2);
11318 IEM_MC_LOCAL(uint64_t, u64Value);
11319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11322 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11323 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11324 IEM_MC_ADVANCE_RIP();
11325 IEM_MC_END();
11326 break;
11327 }
11328 }
11329 return VINF_SUCCESS;
11330}
11331
11332
11333/** Opcode 0x8a. */
11334FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11335{
11336 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11337
11338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11339
11340 /*
11341 * If rm is denoting a register, no more instruction bytes.
11342 */
11343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11344 {
11345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11346 IEM_MC_BEGIN(0, 1);
11347 IEM_MC_LOCAL(uint8_t, u8Value);
11348 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11349 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11350 IEM_MC_ADVANCE_RIP();
11351 IEM_MC_END();
11352 }
11353 else
11354 {
11355 /*
11356 * We're loading a register from memory.
11357 */
11358 IEM_MC_BEGIN(0, 2);
11359 IEM_MC_LOCAL(uint8_t, u8Value);
11360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11363 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11364 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11365 IEM_MC_ADVANCE_RIP();
11366 IEM_MC_END();
11367 }
11368 return VINF_SUCCESS;
11369}
11370
11371
11372/** Opcode 0x8b. */
11373FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11374{
11375 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11376
11377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11378
11379 /*
11380 * If rm is denoting a register, no more instruction bytes.
11381 */
11382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11383 {
11384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11385 switch (pVCpu->iem.s.enmEffOpSize)
11386 {
11387 case IEMMODE_16BIT:
11388 IEM_MC_BEGIN(0, 1);
11389 IEM_MC_LOCAL(uint16_t, u16Value);
11390 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11391 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11392 IEM_MC_ADVANCE_RIP();
11393 IEM_MC_END();
11394 break;
11395
11396 case IEMMODE_32BIT:
11397 IEM_MC_BEGIN(0, 1);
11398 IEM_MC_LOCAL(uint32_t, u32Value);
11399 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11400 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11401 IEM_MC_ADVANCE_RIP();
11402 IEM_MC_END();
11403 break;
11404
11405 case IEMMODE_64BIT:
11406 IEM_MC_BEGIN(0, 1);
11407 IEM_MC_LOCAL(uint64_t, u64Value);
11408 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11409 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11410 IEM_MC_ADVANCE_RIP();
11411 IEM_MC_END();
11412 break;
11413 }
11414 }
11415 else
11416 {
11417 /*
11418 * We're loading a register from memory.
11419 */
11420 switch (pVCpu->iem.s.enmEffOpSize)
11421 {
11422 case IEMMODE_16BIT:
11423 IEM_MC_BEGIN(0, 2);
11424 IEM_MC_LOCAL(uint16_t, u16Value);
11425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11428 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11429 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11430 IEM_MC_ADVANCE_RIP();
11431 IEM_MC_END();
11432 break;
11433
11434 case IEMMODE_32BIT:
11435 IEM_MC_BEGIN(0, 2);
11436 IEM_MC_LOCAL(uint32_t, u32Value);
11437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11440 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11441 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11442 IEM_MC_ADVANCE_RIP();
11443 IEM_MC_END();
11444 break;
11445
11446 case IEMMODE_64BIT:
11447 IEM_MC_BEGIN(0, 2);
11448 IEM_MC_LOCAL(uint64_t, u64Value);
11449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11452 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11453 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11454 IEM_MC_ADVANCE_RIP();
11455 IEM_MC_END();
11456 break;
11457 }
11458 }
11459 return VINF_SUCCESS;
11460}
11461
11462
11463/** Opcode 0x63. */
11464FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11465{
11466 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11467 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11468 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11469 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11470 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11471}
11472
11473
11474/** Opcode 0x8c. */
11475FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11476{
11477 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11478
11479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11480
11481 /*
11482 * Check that the destination register exists. The REX.R prefix is ignored.
11483 */
11484 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11485 if ( iSegReg > X86_SREG_GS)
11486 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11487
11488 /*
11489 * If rm is denoting a register, no more instruction bytes.
11490 * In that case, the operand size is respected and the upper bits are
11491 * cleared (starting with some pentium).
11492 */
11493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11494 {
11495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11496 switch (pVCpu->iem.s.enmEffOpSize)
11497 {
11498 case IEMMODE_16BIT:
11499 IEM_MC_BEGIN(0, 1);
11500 IEM_MC_LOCAL(uint16_t, u16Value);
11501 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11502 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11503 IEM_MC_ADVANCE_RIP();
11504 IEM_MC_END();
11505 break;
11506
11507 case IEMMODE_32BIT:
11508 IEM_MC_BEGIN(0, 1);
11509 IEM_MC_LOCAL(uint32_t, u32Value);
11510 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11511 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11512 IEM_MC_ADVANCE_RIP();
11513 IEM_MC_END();
11514 break;
11515
11516 case IEMMODE_64BIT:
11517 IEM_MC_BEGIN(0, 1);
11518 IEM_MC_LOCAL(uint64_t, u64Value);
11519 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11520 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11521 IEM_MC_ADVANCE_RIP();
11522 IEM_MC_END();
11523 break;
11524 }
11525 }
11526 else
11527 {
11528 /*
11529 * We're saving the register to memory. The access is word sized
11530 * regardless of operand size prefixes.
11531 */
11532#if 0 /* not necessary */
11533 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11534#endif
11535 IEM_MC_BEGIN(0, 2);
11536 IEM_MC_LOCAL(uint16_t, u16Value);
11537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11540 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11541 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11542 IEM_MC_ADVANCE_RIP();
11543 IEM_MC_END();
11544 }
11545 return VINF_SUCCESS;
11546}
11547
11548
11549
11550
11551/** Opcode 0x8d. */
11552FNIEMOP_DEF(iemOp_lea_Gv_M)
11553{
11554 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11556 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11557 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11558
11559 switch (pVCpu->iem.s.enmEffOpSize)
11560 {
11561 case IEMMODE_16BIT:
11562 IEM_MC_BEGIN(0, 2);
11563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11564 IEM_MC_LOCAL(uint16_t, u16Cast);
11565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11567 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11568 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11569 IEM_MC_ADVANCE_RIP();
11570 IEM_MC_END();
11571 return VINF_SUCCESS;
11572
11573 case IEMMODE_32BIT:
11574 IEM_MC_BEGIN(0, 2);
11575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11576 IEM_MC_LOCAL(uint32_t, u32Cast);
11577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11579 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11580 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11581 IEM_MC_ADVANCE_RIP();
11582 IEM_MC_END();
11583 return VINF_SUCCESS;
11584
11585 case IEMMODE_64BIT:
11586 IEM_MC_BEGIN(0, 1);
11587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11590 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11591 IEM_MC_ADVANCE_RIP();
11592 IEM_MC_END();
11593 return VINF_SUCCESS;
11594 }
11595 AssertFailedReturn(VERR_IEM_IPE_7);
11596}
11597
11598
11599/** Opcode 0x8e. */
11600FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11601{
11602 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11603
11604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11605
11606 /*
11607 * The practical operand size is 16-bit.
11608 */
11609#if 0 /* not necessary */
11610 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11611#endif
11612
11613 /*
11614 * Check that the destination register exists and can be used with this
11615 * instruction. The REX.R prefix is ignored.
11616 */
11617 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11618 if ( iSegReg == X86_SREG_CS
11619 || iSegReg > X86_SREG_GS)
11620 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11621
11622 /*
11623 * If rm is denoting a register, no more instruction bytes.
11624 */
11625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11626 {
11627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11628 IEM_MC_BEGIN(2, 0);
11629 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11630 IEM_MC_ARG(uint16_t, u16Value, 1);
11631 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11632 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11633 IEM_MC_END();
11634 }
11635 else
11636 {
11637 /*
11638 * We're loading the register from memory. The access is word sized
11639 * regardless of operand size prefixes.
11640 */
11641 IEM_MC_BEGIN(2, 1);
11642 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11643 IEM_MC_ARG(uint16_t, u16Value, 1);
11644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11647 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11648 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11649 IEM_MC_END();
11650 }
11651 return VINF_SUCCESS;
11652}
11653
11654
11655/** Opcode 0x8f /0. */
11656FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11657{
11658 /* This bugger is rather annoying as it requires rSP to be updated before
11659 doing the effective address calculations. Will eventually require a
11660 split between the R/M+SIB decoding and the effective address
11661 calculation - which is something that is required for any attempt at
11662 reusing this code for a recompiler. It may also be good to have if we
11663 need to delay #UD exception caused by invalid lock prefixes.
11664
11665 For now, we'll do a mostly safe interpreter-only implementation here. */
11666 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11667 * now until tests show it's checked.. */
11668 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11669
11670 /* Register access is relatively easy and can share code. */
11671 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11672 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11673
11674 /*
11675 * Memory target.
11676 *
11677 * Intel says that RSP is incremented before it's used in any effective
11678 * address calcuations. This means some serious extra annoyance here since
11679 * we decode and calculate the effective address in one step and like to
11680 * delay committing registers till everything is done.
11681 *
11682 * So, we'll decode and calculate the effective address twice. This will
11683 * require some recoding if turned into a recompiler.
11684 */
11685 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11686
11687#ifndef TST_IEM_CHECK_MC
11688 /* Calc effective address with modified ESP. */
11689/** @todo testcase */
11690 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11691 RTGCPTR GCPtrEff;
11692 VBOXSTRICTRC rcStrict;
11693 switch (pVCpu->iem.s.enmEffOpSize)
11694 {
11695 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11696 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11697 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11699 }
11700 if (rcStrict != VINF_SUCCESS)
11701 return rcStrict;
11702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11703
11704 /* Perform the operation - this should be CImpl. */
11705 RTUINT64U TmpRsp;
11706 TmpRsp.u = pCtx->rsp;
11707 switch (pVCpu->iem.s.enmEffOpSize)
11708 {
11709 case IEMMODE_16BIT:
11710 {
11711 uint16_t u16Value;
11712 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11713 if (rcStrict == VINF_SUCCESS)
11714 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11715 break;
11716 }
11717
11718 case IEMMODE_32BIT:
11719 {
11720 uint32_t u32Value;
11721 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11722 if (rcStrict == VINF_SUCCESS)
11723 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11724 break;
11725 }
11726
11727 case IEMMODE_64BIT:
11728 {
11729 uint64_t u64Value;
11730 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11731 if (rcStrict == VINF_SUCCESS)
11732 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11733 break;
11734 }
11735
11736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11737 }
11738 if (rcStrict == VINF_SUCCESS)
11739 {
11740 pCtx->rsp = TmpRsp.u;
11741 iemRegUpdateRipAndClearRF(pVCpu);
11742 }
11743 return rcStrict;
11744
11745#else
11746 return VERR_IEM_IPE_2;
11747#endif
11748}
11749
11750
11751/** Opcode 0x8f. */
11752FNIEMOP_DEF(iemOp_Grp1A)
11753{
11754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11755 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11756 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11757
11758 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11759 /** @todo XOP decoding. */
11760 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11761 return IEMOP_RAISE_INVALID_OPCODE();
11762}
11763
11764
11765/**
11766 * Common 'xchg reg,rAX' helper.
11767 */
11768FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11769{
11770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11771
11772 iReg |= pVCpu->iem.s.uRexB;
11773 switch (pVCpu->iem.s.enmEffOpSize)
11774 {
11775 case IEMMODE_16BIT:
11776 IEM_MC_BEGIN(0, 2);
11777 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11778 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11779 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11780 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11781 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11782 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11783 IEM_MC_ADVANCE_RIP();
11784 IEM_MC_END();
11785 return VINF_SUCCESS;
11786
11787 case IEMMODE_32BIT:
11788 IEM_MC_BEGIN(0, 2);
11789 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11790 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11791 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11792 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11793 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11794 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11795 IEM_MC_ADVANCE_RIP();
11796 IEM_MC_END();
11797 return VINF_SUCCESS;
11798
11799 case IEMMODE_64BIT:
11800 IEM_MC_BEGIN(0, 2);
11801 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11802 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11803 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11804 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11805 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11806 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11807 IEM_MC_ADVANCE_RIP();
11808 IEM_MC_END();
11809 return VINF_SUCCESS;
11810
11811 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11812 }
11813}
11814
11815
11816/** Opcode 0x90. */
11817FNIEMOP_DEF(iemOp_nop)
11818{
11819 /* R8/R8D and RAX/EAX can be exchanged. */
11820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11821 {
11822 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11823 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11824 }
11825
11826 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11827 IEMOP_MNEMONIC(pause, "pause");
11828 else
11829 IEMOP_MNEMONIC(nop, "nop");
11830 IEM_MC_BEGIN(0, 0);
11831 IEM_MC_ADVANCE_RIP();
11832 IEM_MC_END();
11833 return VINF_SUCCESS;
11834}
11835
11836
11837/** Opcode 0x91. */
11838FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11839{
11840 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11841 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11842}
11843
11844
11845/** Opcode 0x92. */
11846FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11847{
11848 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11849 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11850}
11851
11852
11853/** Opcode 0x93. */
11854FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11855{
11856 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11857 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11858}
11859
11860
11861/** Opcode 0x94. */
11862FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11863{
11864 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11865 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11866}
11867
11868
11869/** Opcode 0x95. */
11870FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11871{
11872 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11873 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11874}
11875
11876
11877/** Opcode 0x96. */
11878FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11879{
11880 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11881 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11882}
11883
11884
11885/** Opcode 0x97. */
11886FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11887{
11888 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11889 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11890}
11891
11892
11893/** Opcode 0x98. */
11894FNIEMOP_DEF(iemOp_cbw)
11895{
11896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11897 switch (pVCpu->iem.s.enmEffOpSize)
11898 {
11899 case IEMMODE_16BIT:
11900 IEMOP_MNEMONIC(cbw, "cbw");
11901 IEM_MC_BEGIN(0, 1);
11902 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11903 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11904 } IEM_MC_ELSE() {
11905 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11906 } IEM_MC_ENDIF();
11907 IEM_MC_ADVANCE_RIP();
11908 IEM_MC_END();
11909 return VINF_SUCCESS;
11910
11911 case IEMMODE_32BIT:
11912 IEMOP_MNEMONIC(cwde, "cwde");
11913 IEM_MC_BEGIN(0, 1);
11914 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11915 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11916 } IEM_MC_ELSE() {
11917 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11918 } IEM_MC_ENDIF();
11919 IEM_MC_ADVANCE_RIP();
11920 IEM_MC_END();
11921 return VINF_SUCCESS;
11922
11923 case IEMMODE_64BIT:
11924 IEMOP_MNEMONIC(cdqe, "cdqe");
11925 IEM_MC_BEGIN(0, 1);
11926 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11927 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11928 } IEM_MC_ELSE() {
11929 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11930 } IEM_MC_ENDIF();
11931 IEM_MC_ADVANCE_RIP();
11932 IEM_MC_END();
11933 return VINF_SUCCESS;
11934
11935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11936 }
11937}
11938
11939
11940/** Opcode 0x99. */
11941FNIEMOP_DEF(iemOp_cwd)
11942{
11943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11944 switch (pVCpu->iem.s.enmEffOpSize)
11945 {
11946 case IEMMODE_16BIT:
11947 IEMOP_MNEMONIC(cwd, "cwd");
11948 IEM_MC_BEGIN(0, 1);
11949 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11950 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11951 } IEM_MC_ELSE() {
11952 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11953 } IEM_MC_ENDIF();
11954 IEM_MC_ADVANCE_RIP();
11955 IEM_MC_END();
11956 return VINF_SUCCESS;
11957
11958 case IEMMODE_32BIT:
11959 IEMOP_MNEMONIC(cdq, "cdq");
11960 IEM_MC_BEGIN(0, 1);
11961 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11962 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11963 } IEM_MC_ELSE() {
11964 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11965 } IEM_MC_ENDIF();
11966 IEM_MC_ADVANCE_RIP();
11967 IEM_MC_END();
11968 return VINF_SUCCESS;
11969
11970 case IEMMODE_64BIT:
11971 IEMOP_MNEMONIC(cqo, "cqo");
11972 IEM_MC_BEGIN(0, 1);
11973 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11974 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11975 } IEM_MC_ELSE() {
11976 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11977 } IEM_MC_ENDIF();
11978 IEM_MC_ADVANCE_RIP();
11979 IEM_MC_END();
11980 return VINF_SUCCESS;
11981
11982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11983 }
11984}
11985
11986
11987/** Opcode 0x9a. */
11988FNIEMOP_DEF(iemOp_call_Ap)
11989{
11990 IEMOP_MNEMONIC(call_Ap, "call Ap");
11991 IEMOP_HLP_NO_64BIT();
11992
11993 /* Decode the far pointer address and pass it on to the far call C implementation. */
11994 uint32_t offSeg;
11995 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11996 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11997 else
11998 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11999 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12001 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12002}
12003
12004
12005/** Opcode 0x9b. (aka fwait) */
12006FNIEMOP_DEF(iemOp_wait)
12007{
12008 IEMOP_MNEMONIC(wait, "wait");
12009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12010
12011 IEM_MC_BEGIN(0, 0);
12012 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12013 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12014 IEM_MC_ADVANCE_RIP();
12015 IEM_MC_END();
12016 return VINF_SUCCESS;
12017}
12018
12019
12020/** Opcode 0x9c. */
12021FNIEMOP_DEF(iemOp_pushf_Fv)
12022{
12023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12024 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12025 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12026}
12027
12028
12029/** Opcode 0x9d. */
12030FNIEMOP_DEF(iemOp_popf_Fv)
12031{
12032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12033 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12034 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12035}
12036
12037
12038/** Opcode 0x9e. */
12039FNIEMOP_DEF(iemOp_sahf)
12040{
12041 IEMOP_MNEMONIC(sahf, "sahf");
12042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12043 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12044 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12045 return IEMOP_RAISE_INVALID_OPCODE();
12046 IEM_MC_BEGIN(0, 2);
12047 IEM_MC_LOCAL(uint32_t, u32Flags);
12048 IEM_MC_LOCAL(uint32_t, EFlags);
12049 IEM_MC_FETCH_EFLAGS(EFlags);
12050 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12051 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12052 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12053 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12054 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12055 IEM_MC_COMMIT_EFLAGS(EFlags);
12056 IEM_MC_ADVANCE_RIP();
12057 IEM_MC_END();
12058 return VINF_SUCCESS;
12059}
12060
12061
12062/** Opcode 0x9f. */
12063FNIEMOP_DEF(iemOp_lahf)
12064{
12065 IEMOP_MNEMONIC(lahf, "lahf");
12066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12067 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12068 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12069 return IEMOP_RAISE_INVALID_OPCODE();
12070 IEM_MC_BEGIN(0, 1);
12071 IEM_MC_LOCAL(uint8_t, u8Flags);
12072 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12073 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12074 IEM_MC_ADVANCE_RIP();
12075 IEM_MC_END();
12076 return VINF_SUCCESS;
12077}
12078
12079
12080/**
12081 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12082 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12083 * prefixes. Will return on failures.
12084 * @param a_GCPtrMemOff The variable to store the offset in.
12085 */
12086#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12087 do \
12088 { \
12089 switch (pVCpu->iem.s.enmEffAddrMode) \
12090 { \
12091 case IEMMODE_16BIT: \
12092 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12093 break; \
12094 case IEMMODE_32BIT: \
12095 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12096 break; \
12097 case IEMMODE_64BIT: \
12098 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12099 break; \
12100 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12101 } \
12102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12103 } while (0)
12104
12105/** Opcode 0xa0. */
12106FNIEMOP_DEF(iemOp_mov_Al_Ob)
12107{
12108 /*
12109 * Get the offset and fend of lock prefixes.
12110 */
12111 RTGCPTR GCPtrMemOff;
12112 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12113
12114 /*
12115 * Fetch AL.
12116 */
12117 IEM_MC_BEGIN(0,1);
12118 IEM_MC_LOCAL(uint8_t, u8Tmp);
12119 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12120 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12121 IEM_MC_ADVANCE_RIP();
12122 IEM_MC_END();
12123 return VINF_SUCCESS;
12124}
12125
12126
12127/** Opcode 0xa1. */
12128FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12129{
12130 /*
12131 * Get the offset and fend of lock prefixes.
12132 */
12133 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12134 RTGCPTR GCPtrMemOff;
12135 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12136
12137 /*
12138 * Fetch rAX.
12139 */
12140 switch (pVCpu->iem.s.enmEffOpSize)
12141 {
12142 case IEMMODE_16BIT:
12143 IEM_MC_BEGIN(0,1);
12144 IEM_MC_LOCAL(uint16_t, u16Tmp);
12145 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12146 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12147 IEM_MC_ADVANCE_RIP();
12148 IEM_MC_END();
12149 return VINF_SUCCESS;
12150
12151 case IEMMODE_32BIT:
12152 IEM_MC_BEGIN(0,1);
12153 IEM_MC_LOCAL(uint32_t, u32Tmp);
12154 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12155 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12156 IEM_MC_ADVANCE_RIP();
12157 IEM_MC_END();
12158 return VINF_SUCCESS;
12159
12160 case IEMMODE_64BIT:
12161 IEM_MC_BEGIN(0,1);
12162 IEM_MC_LOCAL(uint64_t, u64Tmp);
12163 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12164 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12165 IEM_MC_ADVANCE_RIP();
12166 IEM_MC_END();
12167 return VINF_SUCCESS;
12168
12169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12170 }
12171}
12172
12173
12174/** Opcode 0xa2. */
12175FNIEMOP_DEF(iemOp_mov_Ob_AL)
12176{
12177 /*
12178 * Get the offset and fend of lock prefixes.
12179 */
12180 RTGCPTR GCPtrMemOff;
12181 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12182
12183 /*
12184 * Store AL.
12185 */
12186 IEM_MC_BEGIN(0,1);
12187 IEM_MC_LOCAL(uint8_t, u8Tmp);
12188 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12189 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12190 IEM_MC_ADVANCE_RIP();
12191 IEM_MC_END();
12192 return VINF_SUCCESS;
12193}
12194
12195
12196/** Opcode 0xa3. */
12197FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12198{
12199 /*
12200 * Get the offset and fend of lock prefixes.
12201 */
12202 RTGCPTR GCPtrMemOff;
12203 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12204
12205 /*
12206 * Store rAX.
12207 */
12208 switch (pVCpu->iem.s.enmEffOpSize)
12209 {
12210 case IEMMODE_16BIT:
12211 IEM_MC_BEGIN(0,1);
12212 IEM_MC_LOCAL(uint16_t, u16Tmp);
12213 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12214 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12215 IEM_MC_ADVANCE_RIP();
12216 IEM_MC_END();
12217 return VINF_SUCCESS;
12218
12219 case IEMMODE_32BIT:
12220 IEM_MC_BEGIN(0,1);
12221 IEM_MC_LOCAL(uint32_t, u32Tmp);
12222 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12223 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12224 IEM_MC_ADVANCE_RIP();
12225 IEM_MC_END();
12226 return VINF_SUCCESS;
12227
12228 case IEMMODE_64BIT:
12229 IEM_MC_BEGIN(0,1);
12230 IEM_MC_LOCAL(uint64_t, u64Tmp);
12231 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12232 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12233 IEM_MC_ADVANCE_RIP();
12234 IEM_MC_END();
12235 return VINF_SUCCESS;
12236
12237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12238 }
12239}
12240
12241/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12242#define IEM_MOVS_CASE(ValBits, AddrBits) \
12243 IEM_MC_BEGIN(0, 2); \
12244 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12245 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12246 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12247 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12248 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12249 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12250 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12251 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12252 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12253 } IEM_MC_ELSE() { \
12254 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12255 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12256 } IEM_MC_ENDIF(); \
12257 IEM_MC_ADVANCE_RIP(); \
12258 IEM_MC_END();
12259
12260/** Opcode 0xa4. */
12261FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12262{
12263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12264
12265 /*
12266 * Use the C implementation if a repeat prefix is encountered.
12267 */
12268 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12269 {
12270 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12271 switch (pVCpu->iem.s.enmEffAddrMode)
12272 {
12273 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12274 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12275 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12277 }
12278 }
12279 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12280
12281 /*
12282 * Sharing case implementation with movs[wdq] below.
12283 */
12284 switch (pVCpu->iem.s.enmEffAddrMode)
12285 {
12286 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12287 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12288 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12290 }
12291 return VINF_SUCCESS;
12292}
12293
12294
12295/** Opcode 0xa5. */
12296FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12297{
12298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12299
12300 /*
12301 * Use the C implementation if a repeat prefix is encountered.
12302 */
12303 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12304 {
12305 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12306 switch (pVCpu->iem.s.enmEffOpSize)
12307 {
12308 case IEMMODE_16BIT:
12309 switch (pVCpu->iem.s.enmEffAddrMode)
12310 {
12311 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12312 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12313 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12315 }
12316 break;
12317 case IEMMODE_32BIT:
12318 switch (pVCpu->iem.s.enmEffAddrMode)
12319 {
12320 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12321 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12322 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12324 }
12325 case IEMMODE_64BIT:
12326 switch (pVCpu->iem.s.enmEffAddrMode)
12327 {
12328 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12329 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12330 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12332 }
12333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12334 }
12335 }
12336 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12337
12338 /*
12339 * Annoying double switch here.
12340 * Using ugly macro for implementing the cases, sharing it with movsb.
12341 */
12342 switch (pVCpu->iem.s.enmEffOpSize)
12343 {
12344 case IEMMODE_16BIT:
12345 switch (pVCpu->iem.s.enmEffAddrMode)
12346 {
12347 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12348 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12349 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12351 }
12352 break;
12353
12354 case IEMMODE_32BIT:
12355 switch (pVCpu->iem.s.enmEffAddrMode)
12356 {
12357 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12358 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12359 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12361 }
12362 break;
12363
12364 case IEMMODE_64BIT:
12365 switch (pVCpu->iem.s.enmEffAddrMode)
12366 {
12367 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12368 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12369 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12371 }
12372 break;
12373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12374 }
12375 return VINF_SUCCESS;
12376}
12377
12378#undef IEM_MOVS_CASE
12379
12380/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12381#define IEM_CMPS_CASE(ValBits, AddrBits) \
12382 IEM_MC_BEGIN(3, 3); \
12383 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12384 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12385 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12386 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12387 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12388 \
12389 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12390 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12391 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12392 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12393 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12394 IEM_MC_REF_EFLAGS(pEFlags); \
12395 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12396 \
12397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12398 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12399 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12400 } IEM_MC_ELSE() { \
12401 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12402 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12403 } IEM_MC_ENDIF(); \
12404 IEM_MC_ADVANCE_RIP(); \
12405 IEM_MC_END(); \
12406
12407/** Opcode 0xa6. */
12408FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12409{
12410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12411
12412 /*
12413 * Use the C implementation if a repeat prefix is encountered.
12414 */
12415 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12416 {
12417 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12418 switch (pVCpu->iem.s.enmEffAddrMode)
12419 {
12420 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12421 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12422 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12424 }
12425 }
12426 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12427 {
12428 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12429 switch (pVCpu->iem.s.enmEffAddrMode)
12430 {
12431 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12432 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12433 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12435 }
12436 }
12437 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12438
12439 /*
12440 * Sharing case implementation with cmps[wdq] below.
12441 */
12442 switch (pVCpu->iem.s.enmEffAddrMode)
12443 {
12444 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12445 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12446 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12448 }
12449 return VINF_SUCCESS;
12450
12451}
12452
12453
12454/** Opcode 0xa7. */
12455FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12456{
12457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12458
12459 /*
12460 * Use the C implementation if a repeat prefix is encountered.
12461 */
12462 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12463 {
12464 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12465 switch (pVCpu->iem.s.enmEffOpSize)
12466 {
12467 case IEMMODE_16BIT:
12468 switch (pVCpu->iem.s.enmEffAddrMode)
12469 {
12470 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12471 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12472 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12474 }
12475 break;
12476 case IEMMODE_32BIT:
12477 switch (pVCpu->iem.s.enmEffAddrMode)
12478 {
12479 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12480 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12481 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12483 }
12484 case IEMMODE_64BIT:
12485 switch (pVCpu->iem.s.enmEffAddrMode)
12486 {
12487 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12488 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12489 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12491 }
12492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12493 }
12494 }
12495
12496 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12497 {
12498 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12499 switch (pVCpu->iem.s.enmEffOpSize)
12500 {
12501 case IEMMODE_16BIT:
12502 switch (pVCpu->iem.s.enmEffAddrMode)
12503 {
12504 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12505 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12506 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12508 }
12509 break;
12510 case IEMMODE_32BIT:
12511 switch (pVCpu->iem.s.enmEffAddrMode)
12512 {
12513 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12514 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12515 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12517 }
12518 case IEMMODE_64BIT:
12519 switch (pVCpu->iem.s.enmEffAddrMode)
12520 {
12521 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12522 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12523 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12525 }
12526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12527 }
12528 }
12529
12530 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12531
12532 /*
12533 * Annoying double switch here.
12534 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12535 */
12536 switch (pVCpu->iem.s.enmEffOpSize)
12537 {
12538 case IEMMODE_16BIT:
12539 switch (pVCpu->iem.s.enmEffAddrMode)
12540 {
12541 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12542 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12543 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12545 }
12546 break;
12547
12548 case IEMMODE_32BIT:
12549 switch (pVCpu->iem.s.enmEffAddrMode)
12550 {
12551 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12552 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12553 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12555 }
12556 break;
12557
12558 case IEMMODE_64BIT:
12559 switch (pVCpu->iem.s.enmEffAddrMode)
12560 {
12561 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12562 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12563 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12565 }
12566 break;
12567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12568 }
12569 return VINF_SUCCESS;
12570
12571}
12572
12573#undef IEM_CMPS_CASE
12574
12575/** Opcode 0xa8. */
12576FNIEMOP_DEF(iemOp_test_AL_Ib)
12577{
12578 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12579 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12580 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12581}
12582
12583
12584/** Opcode 0xa9. */
12585FNIEMOP_DEF(iemOp_test_eAX_Iz)
12586{
12587 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12589 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12590}
12591
12592
12593/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12594#define IEM_STOS_CASE(ValBits, AddrBits) \
12595 IEM_MC_BEGIN(0, 2); \
12596 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12597 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12598 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12599 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12600 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12602 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12603 } IEM_MC_ELSE() { \
12604 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12605 } IEM_MC_ENDIF(); \
12606 IEM_MC_ADVANCE_RIP(); \
12607 IEM_MC_END(); \
12608
12609/** Opcode 0xaa. */
12610FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12611{
12612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12613
12614 /*
12615 * Use the C implementation if a repeat prefix is encountered.
12616 */
12617 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12618 {
12619 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12620 switch (pVCpu->iem.s.enmEffAddrMode)
12621 {
12622 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12623 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12624 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12626 }
12627 }
12628 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12629
12630 /*
12631 * Sharing case implementation with stos[wdq] below.
12632 */
12633 switch (pVCpu->iem.s.enmEffAddrMode)
12634 {
12635 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12636 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12637 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12639 }
12640 return VINF_SUCCESS;
12641}
12642
12643
12644/** Opcode 0xab. */
12645FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12646{
12647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12648
12649 /*
12650 * Use the C implementation if a repeat prefix is encountered.
12651 */
12652 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12653 {
12654 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12655 switch (pVCpu->iem.s.enmEffOpSize)
12656 {
12657 case IEMMODE_16BIT:
12658 switch (pVCpu->iem.s.enmEffAddrMode)
12659 {
12660 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12661 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12662 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12664 }
12665 break;
12666 case IEMMODE_32BIT:
12667 switch (pVCpu->iem.s.enmEffAddrMode)
12668 {
12669 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12670 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12671 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12673 }
12674 case IEMMODE_64BIT:
12675 switch (pVCpu->iem.s.enmEffAddrMode)
12676 {
12677 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12678 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12679 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12681 }
12682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12683 }
12684 }
12685 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12686
12687 /*
12688 * Annoying double switch here.
12689 * Using ugly macro for implementing the cases, sharing it with stosb.
12690 */
12691 switch (pVCpu->iem.s.enmEffOpSize)
12692 {
12693 case IEMMODE_16BIT:
12694 switch (pVCpu->iem.s.enmEffAddrMode)
12695 {
12696 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12697 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12698 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12700 }
12701 break;
12702
12703 case IEMMODE_32BIT:
12704 switch (pVCpu->iem.s.enmEffAddrMode)
12705 {
12706 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12707 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12708 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12710 }
12711 break;
12712
12713 case IEMMODE_64BIT:
12714 switch (pVCpu->iem.s.enmEffAddrMode)
12715 {
12716 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12717 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12718 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12720 }
12721 break;
12722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12723 }
12724 return VINF_SUCCESS;
12725}
12726
12727#undef IEM_STOS_CASE
12728
12729/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12730#define IEM_LODS_CASE(ValBits, AddrBits) \
12731 IEM_MC_BEGIN(0, 2); \
12732 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12733 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12734 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12735 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12736 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12737 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12738 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12739 } IEM_MC_ELSE() { \
12740 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12741 } IEM_MC_ENDIF(); \
12742 IEM_MC_ADVANCE_RIP(); \
12743 IEM_MC_END();
12744
12745/** Opcode 0xac. */
12746FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12747{
12748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12749
12750 /*
12751 * Use the C implementation if a repeat prefix is encountered.
12752 */
12753 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12754 {
12755 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12756 switch (pVCpu->iem.s.enmEffAddrMode)
12757 {
12758 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12759 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12760 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12762 }
12763 }
12764 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12765
12766 /*
12767 * Sharing case implementation with stos[wdq] below.
12768 */
12769 switch (pVCpu->iem.s.enmEffAddrMode)
12770 {
12771 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12772 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12773 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12775 }
12776 return VINF_SUCCESS;
12777}
12778
12779
12780/** Opcode 0xad. */
12781FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12782{
12783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12784
12785 /*
12786 * Use the C implementation if a repeat prefix is encountered.
12787 */
12788 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12789 {
12790 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12791 switch (pVCpu->iem.s.enmEffOpSize)
12792 {
12793 case IEMMODE_16BIT:
12794 switch (pVCpu->iem.s.enmEffAddrMode)
12795 {
12796 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12797 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12798 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12800 }
12801 break;
12802 case IEMMODE_32BIT:
12803 switch (pVCpu->iem.s.enmEffAddrMode)
12804 {
12805 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12806 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12807 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12809 }
12810 case IEMMODE_64BIT:
12811 switch (pVCpu->iem.s.enmEffAddrMode)
12812 {
12813 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12814 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12815 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12817 }
12818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12819 }
12820 }
12821 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12822
12823 /*
12824 * Annoying double switch here.
12825 * Using ugly macro for implementing the cases, sharing it with lodsb.
12826 */
12827 switch (pVCpu->iem.s.enmEffOpSize)
12828 {
12829 case IEMMODE_16BIT:
12830 switch (pVCpu->iem.s.enmEffAddrMode)
12831 {
12832 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12833 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12834 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12836 }
12837 break;
12838
12839 case IEMMODE_32BIT:
12840 switch (pVCpu->iem.s.enmEffAddrMode)
12841 {
12842 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12843 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12844 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12846 }
12847 break;
12848
12849 case IEMMODE_64BIT:
12850 switch (pVCpu->iem.s.enmEffAddrMode)
12851 {
12852 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12853 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12854 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12856 }
12857 break;
12858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12859 }
12860 return VINF_SUCCESS;
12861}
12862
12863#undef IEM_LODS_CASE
12864
12865/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12866#define IEM_SCAS_CASE(ValBits, AddrBits) \
12867 IEM_MC_BEGIN(3, 2); \
12868 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12869 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12870 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12871 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12872 \
12873 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12874 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12875 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12876 IEM_MC_REF_EFLAGS(pEFlags); \
12877 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12878 \
12879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12880 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12881 } IEM_MC_ELSE() { \
12882 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12883 } IEM_MC_ENDIF(); \
12884 IEM_MC_ADVANCE_RIP(); \
12885 IEM_MC_END();
12886
12887/** Opcode 0xae. */
12888FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12889{
12890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12891
12892 /*
12893 * Use the C implementation if a repeat prefix is encountered.
12894 */
12895 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12896 {
12897 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12898 switch (pVCpu->iem.s.enmEffAddrMode)
12899 {
12900 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12901 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12902 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12904 }
12905 }
12906 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12907 {
12908 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12909 switch (pVCpu->iem.s.enmEffAddrMode)
12910 {
12911 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12912 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12913 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12915 }
12916 }
12917 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12918
12919 /*
12920 * Sharing case implementation with stos[wdq] below.
12921 */
12922 switch (pVCpu->iem.s.enmEffAddrMode)
12923 {
12924 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12925 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12926 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12928 }
12929 return VINF_SUCCESS;
12930}
12931
12932
12933/** Opcode 0xaf. */
12934FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12935{
12936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12937
12938 /*
12939 * Use the C implementation if a repeat prefix is encountered.
12940 */
12941 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12942 {
12943 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12944 switch (pVCpu->iem.s.enmEffOpSize)
12945 {
12946 case IEMMODE_16BIT:
12947 switch (pVCpu->iem.s.enmEffAddrMode)
12948 {
12949 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12950 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12951 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12953 }
12954 break;
12955 case IEMMODE_32BIT:
12956 switch (pVCpu->iem.s.enmEffAddrMode)
12957 {
12958 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12959 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12960 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12962 }
12963 case IEMMODE_64BIT:
12964 switch (pVCpu->iem.s.enmEffAddrMode)
12965 {
12966 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12967 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12968 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12970 }
12971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12972 }
12973 }
12974 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12975 {
12976 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12977 switch (pVCpu->iem.s.enmEffOpSize)
12978 {
12979 case IEMMODE_16BIT:
12980 switch (pVCpu->iem.s.enmEffAddrMode)
12981 {
12982 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12983 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12984 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12986 }
12987 break;
12988 case IEMMODE_32BIT:
12989 switch (pVCpu->iem.s.enmEffAddrMode)
12990 {
12991 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12992 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12993 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12995 }
12996 case IEMMODE_64BIT:
12997 switch (pVCpu->iem.s.enmEffAddrMode)
12998 {
12999 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13000 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13001 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13003 }
13004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13005 }
13006 }
13007 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13008
13009 /*
13010 * Annoying double switch here.
13011 * Using ugly macro for implementing the cases, sharing it with scasb.
13012 */
13013 switch (pVCpu->iem.s.enmEffOpSize)
13014 {
13015 case IEMMODE_16BIT:
13016 switch (pVCpu->iem.s.enmEffAddrMode)
13017 {
13018 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13019 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13020 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13022 }
13023 break;
13024
13025 case IEMMODE_32BIT:
13026 switch (pVCpu->iem.s.enmEffAddrMode)
13027 {
13028 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13029 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13030 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13032 }
13033 break;
13034
13035 case IEMMODE_64BIT:
13036 switch (pVCpu->iem.s.enmEffAddrMode)
13037 {
13038 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13039 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13040 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13042 }
13043 break;
13044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13045 }
13046 return VINF_SUCCESS;
13047}
13048
13049#undef IEM_SCAS_CASE
13050
13051/**
13052 * Common 'mov r8, imm8' helper.
13053 */
13054FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13055{
13056 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13058
13059 IEM_MC_BEGIN(0, 1);
13060 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13061 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13062 IEM_MC_ADVANCE_RIP();
13063 IEM_MC_END();
13064
13065 return VINF_SUCCESS;
13066}
13067
13068
13069/** Opcode 0xb0. */
13070FNIEMOP_DEF(iemOp_mov_AL_Ib)
13071{
13072 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13073 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13074}
13075
13076
13077/** Opcode 0xb1. */
13078FNIEMOP_DEF(iemOp_CL_Ib)
13079{
13080 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13081 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13082}
13083
13084
13085/** Opcode 0xb2. */
13086FNIEMOP_DEF(iemOp_DL_Ib)
13087{
13088 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13089 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13090}
13091
13092
13093/** Opcode 0xb3. */
13094FNIEMOP_DEF(iemOp_BL_Ib)
13095{
13096 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13097 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13098}
13099
13100
13101/** Opcode 0xb4. */
13102FNIEMOP_DEF(iemOp_mov_AH_Ib)
13103{
13104 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13105 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13106}
13107
13108
13109/** Opcode 0xb5. */
13110FNIEMOP_DEF(iemOp_CH_Ib)
13111{
13112 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13113 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13114}
13115
13116
13117/** Opcode 0xb6. */
13118FNIEMOP_DEF(iemOp_DH_Ib)
13119{
13120 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13121 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13122}
13123
13124
13125/** Opcode 0xb7. */
13126FNIEMOP_DEF(iemOp_BH_Ib)
13127{
13128 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13129 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13130}
13131
13132
13133/**
13134 * Common 'mov regX,immX' helper.
13135 */
13136FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13137{
13138 switch (pVCpu->iem.s.enmEffOpSize)
13139 {
13140 case IEMMODE_16BIT:
13141 {
13142 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13144
13145 IEM_MC_BEGIN(0, 1);
13146 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13147 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13148 IEM_MC_ADVANCE_RIP();
13149 IEM_MC_END();
13150 break;
13151 }
13152
13153 case IEMMODE_32BIT:
13154 {
13155 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13157
13158 IEM_MC_BEGIN(0, 1);
13159 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13160 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13161 IEM_MC_ADVANCE_RIP();
13162 IEM_MC_END();
13163 break;
13164 }
13165 case IEMMODE_64BIT:
13166 {
13167 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13169
13170 IEM_MC_BEGIN(0, 1);
13171 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13172 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13173 IEM_MC_ADVANCE_RIP();
13174 IEM_MC_END();
13175 break;
13176 }
13177 }
13178
13179 return VINF_SUCCESS;
13180}
13181
13182
13183/** Opcode 0xb8. */
13184FNIEMOP_DEF(iemOp_eAX_Iv)
13185{
13186 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13187 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13188}
13189
13190
13191/** Opcode 0xb9. */
13192FNIEMOP_DEF(iemOp_eCX_Iv)
13193{
13194 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13195 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13196}
13197
13198
13199/** Opcode 0xba. */
13200FNIEMOP_DEF(iemOp_eDX_Iv)
13201{
13202 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13203 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13204}
13205
13206
13207/** Opcode 0xbb. */
13208FNIEMOP_DEF(iemOp_eBX_Iv)
13209{
13210 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13211 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13212}
13213
13214
13215/** Opcode 0xbc. */
13216FNIEMOP_DEF(iemOp_eSP_Iv)
13217{
13218 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13219 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13220}
13221
13222
13223/** Opcode 0xbd. */
13224FNIEMOP_DEF(iemOp_eBP_Iv)
13225{
13226 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13227 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13228}
13229
13230
13231/** Opcode 0xbe. */
13232FNIEMOP_DEF(iemOp_eSI_Iv)
13233{
13234 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13235 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13236}
13237
13238
13239/** Opcode 0xbf. */
13240FNIEMOP_DEF(iemOp_eDI_Iv)
13241{
13242 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13243 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13244}
13245
13246
13247/** Opcode 0xc0. */
13248FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13249{
13250 IEMOP_HLP_MIN_186();
13251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13252 PCIEMOPSHIFTSIZES pImpl;
13253 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13254 {
13255 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13256 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13257 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13258 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13259 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13260 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13261 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13262 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13263 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13264 }
13265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13266
13267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13268 {
13269 /* register */
13270 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13272 IEM_MC_BEGIN(3, 0);
13273 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13274 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13275 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13276 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13277 IEM_MC_REF_EFLAGS(pEFlags);
13278 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13279 IEM_MC_ADVANCE_RIP();
13280 IEM_MC_END();
13281 }
13282 else
13283 {
13284 /* memory */
13285 IEM_MC_BEGIN(3, 2);
13286 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13287 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13288 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13290
13291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13292 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13293 IEM_MC_ASSIGN(cShiftArg, cShift);
13294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13295 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13296 IEM_MC_FETCH_EFLAGS(EFlags);
13297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13298
13299 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13300 IEM_MC_COMMIT_EFLAGS(EFlags);
13301 IEM_MC_ADVANCE_RIP();
13302 IEM_MC_END();
13303 }
13304 return VINF_SUCCESS;
13305}
13306
13307
13308/** Opcode 0xc1. */
13309FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13310{
13311 IEMOP_HLP_MIN_186();
13312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13313 PCIEMOPSHIFTSIZES pImpl;
13314 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13315 {
13316 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13317 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13318 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13319 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13320 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13321 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13322 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13323 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13324 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13325 }
13326 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13327
13328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13329 {
13330 /* register */
13331 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 switch (pVCpu->iem.s.enmEffOpSize)
13334 {
13335 case IEMMODE_16BIT:
13336 IEM_MC_BEGIN(3, 0);
13337 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13338 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13340 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13341 IEM_MC_REF_EFLAGS(pEFlags);
13342 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13343 IEM_MC_ADVANCE_RIP();
13344 IEM_MC_END();
13345 return VINF_SUCCESS;
13346
13347 case IEMMODE_32BIT:
13348 IEM_MC_BEGIN(3, 0);
13349 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13350 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13351 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13352 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13353 IEM_MC_REF_EFLAGS(pEFlags);
13354 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13355 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13356 IEM_MC_ADVANCE_RIP();
13357 IEM_MC_END();
13358 return VINF_SUCCESS;
13359
13360 case IEMMODE_64BIT:
13361 IEM_MC_BEGIN(3, 0);
13362 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13363 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13365 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13366 IEM_MC_REF_EFLAGS(pEFlags);
13367 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13368 IEM_MC_ADVANCE_RIP();
13369 IEM_MC_END();
13370 return VINF_SUCCESS;
13371
13372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13373 }
13374 }
13375 else
13376 {
13377 /* memory */
13378 switch (pVCpu->iem.s.enmEffOpSize)
13379 {
13380 case IEMMODE_16BIT:
13381 IEM_MC_BEGIN(3, 2);
13382 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13383 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13384 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13386
13387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13388 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13389 IEM_MC_ASSIGN(cShiftArg, cShift);
13390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13391 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13392 IEM_MC_FETCH_EFLAGS(EFlags);
13393 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13394
13395 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13396 IEM_MC_COMMIT_EFLAGS(EFlags);
13397 IEM_MC_ADVANCE_RIP();
13398 IEM_MC_END();
13399 return VINF_SUCCESS;
13400
13401 case IEMMODE_32BIT:
13402 IEM_MC_BEGIN(3, 2);
13403 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13404 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13405 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13407
13408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13409 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13410 IEM_MC_ASSIGN(cShiftArg, cShift);
13411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13412 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13413 IEM_MC_FETCH_EFLAGS(EFlags);
13414 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13415
13416 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13417 IEM_MC_COMMIT_EFLAGS(EFlags);
13418 IEM_MC_ADVANCE_RIP();
13419 IEM_MC_END();
13420 return VINF_SUCCESS;
13421
13422 case IEMMODE_64BIT:
13423 IEM_MC_BEGIN(3, 2);
13424 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13425 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13426 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13428
13429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13430 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13431 IEM_MC_ASSIGN(cShiftArg, cShift);
13432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13433 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13434 IEM_MC_FETCH_EFLAGS(EFlags);
13435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13436
13437 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13438 IEM_MC_COMMIT_EFLAGS(EFlags);
13439 IEM_MC_ADVANCE_RIP();
13440 IEM_MC_END();
13441 return VINF_SUCCESS;
13442
13443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13444 }
13445 }
13446}
13447
13448
13449/** Opcode 0xc2. */
13450FNIEMOP_DEF(iemOp_retn_Iw)
13451{
13452 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13453 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13455 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13456 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13457}
13458
13459
13460/** Opcode 0xc3. */
13461FNIEMOP_DEF(iemOp_retn)
13462{
13463 IEMOP_MNEMONIC(retn, "retn");
13464 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13466 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13467}
13468
13469
13470/** Opcode 0xc4. */
13471FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13472{
13473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13474 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13475 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13476 {
13477 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13478 /* The LES instruction is invalid 64-bit mode. In legacy and
13479 compatability mode it is invalid with MOD=3.
13480 The use as a VEX prefix is made possible by assigning the inverted
13481 REX.R to the top MOD bit, and the top bit in the inverted register
13482 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13483 to accessing registers 0..7 in this VEX form. */
13484 /** @todo VEX: Just use new tables for it. */
13485 return IEMOP_RAISE_INVALID_OPCODE();
13486 }
13487 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13488 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13489}
13490
13491
13492/** Opcode 0xc5. */
13493FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13494{
13495 /* The LDS instruction is invalid 64-bit mode. In legacy and
13496 compatability mode it is invalid with MOD=3.
13497 The use as a VEX prefix is made possible by assigning the inverted
13498 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13499 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13501 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13502 {
13503 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13504 {
13505 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13506 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13507 }
13508 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13509 }
13510
13511 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13512 /** @todo Test when exctly the VEX conformance checks kick in during
13513 * instruction decoding and fetching (using \#PF). */
13514 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13515 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13516 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13517#if 0 /* will make sense of this next week... */
13518 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13519 &&
13520 )
13521 {
13522
13523 }
13524#endif
13525
13526 /** @todo VEX: Just use new tables for it. */
13527 return IEMOP_RAISE_INVALID_OPCODE();
13528}
13529
13530
13531/** Opcode 0xc6. */
13532FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13533{
13534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13535 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13536 return IEMOP_RAISE_INVALID_OPCODE();
13537 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13538
13539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13540 {
13541 /* register access */
13542 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13544 IEM_MC_BEGIN(0, 0);
13545 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13546 IEM_MC_ADVANCE_RIP();
13547 IEM_MC_END();
13548 }
13549 else
13550 {
13551 /* memory access. */
13552 IEM_MC_BEGIN(0, 1);
13553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13555 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13557 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13558 IEM_MC_ADVANCE_RIP();
13559 IEM_MC_END();
13560 }
13561 return VINF_SUCCESS;
13562}
13563
13564
13565/** Opcode 0xc7. */
13566FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13567{
13568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13569 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13570 return IEMOP_RAISE_INVALID_OPCODE();
13571 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13572
13573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13574 {
13575 /* register access */
13576 switch (pVCpu->iem.s.enmEffOpSize)
13577 {
13578 case IEMMODE_16BIT:
13579 IEM_MC_BEGIN(0, 0);
13580 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13582 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13583 IEM_MC_ADVANCE_RIP();
13584 IEM_MC_END();
13585 return VINF_SUCCESS;
13586
13587 case IEMMODE_32BIT:
13588 IEM_MC_BEGIN(0, 0);
13589 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13591 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13592 IEM_MC_ADVANCE_RIP();
13593 IEM_MC_END();
13594 return VINF_SUCCESS;
13595
13596 case IEMMODE_64BIT:
13597 IEM_MC_BEGIN(0, 0);
13598 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13600 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13601 IEM_MC_ADVANCE_RIP();
13602 IEM_MC_END();
13603 return VINF_SUCCESS;
13604
13605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13606 }
13607 }
13608 else
13609 {
13610 /* memory access. */
13611 switch (pVCpu->iem.s.enmEffOpSize)
13612 {
13613 case IEMMODE_16BIT:
13614 IEM_MC_BEGIN(0, 1);
13615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13617 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13619 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13620 IEM_MC_ADVANCE_RIP();
13621 IEM_MC_END();
13622 return VINF_SUCCESS;
13623
13624 case IEMMODE_32BIT:
13625 IEM_MC_BEGIN(0, 1);
13626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13628 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13630 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13631 IEM_MC_ADVANCE_RIP();
13632 IEM_MC_END();
13633 return VINF_SUCCESS;
13634
13635 case IEMMODE_64BIT:
13636 IEM_MC_BEGIN(0, 1);
13637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13639 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13641 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13642 IEM_MC_ADVANCE_RIP();
13643 IEM_MC_END();
13644 return VINF_SUCCESS;
13645
13646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13647 }
13648 }
13649}
13650
13651
13652
13653
13654/** Opcode 0xc8. */
13655FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13656{
13657 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13658 IEMOP_HLP_MIN_186();
13659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13660 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13661 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13663 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13664}
13665
13666
13667/** Opcode 0xc9. */
13668FNIEMOP_DEF(iemOp_leave)
13669{
13670 IEMOP_MNEMONIC(leave, "leave");
13671 IEMOP_HLP_MIN_186();
13672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13674 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13675}
13676
13677
13678/** Opcode 0xca. */
13679FNIEMOP_DEF(iemOp_retf_Iw)
13680{
13681 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13682 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13684 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13685 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13686}
13687
13688
13689/** Opcode 0xcb. */
13690FNIEMOP_DEF(iemOp_retf)
13691{
13692 IEMOP_MNEMONIC(retf, "retf");
13693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13694 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13695 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13696}
13697
13698
13699/** Opcode 0xcc. */
13700FNIEMOP_DEF(iemOp_int_3)
13701{
13702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13703 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13704}
13705
13706
13707/** Opcode 0xcd. */
13708FNIEMOP_DEF(iemOp_int_Ib)
13709{
13710 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13712 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13713}
13714
13715
13716/** Opcode 0xce. */
13717FNIEMOP_DEF(iemOp_into)
13718{
13719 IEMOP_MNEMONIC(into, "into");
13720 IEMOP_HLP_NO_64BIT();
13721
13722 IEM_MC_BEGIN(2, 0);
13723 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13724 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13725 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13726 IEM_MC_END();
13727 return VINF_SUCCESS;
13728}
13729
13730
13731/** Opcode 0xcf. */
13732FNIEMOP_DEF(iemOp_iret)
13733{
13734 IEMOP_MNEMONIC(iret, "iret");
13735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13736 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13737}
13738
13739
13740/** Opcode 0xd0. */
13741FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13742{
13743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13744 PCIEMOPSHIFTSIZES pImpl;
13745 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13746 {
13747 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13748 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13749 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13750 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13751 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13752 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13753 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13754 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13755 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13756 }
13757 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13758
13759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13760 {
13761 /* register */
13762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13763 IEM_MC_BEGIN(3, 0);
13764 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13765 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13766 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13767 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13768 IEM_MC_REF_EFLAGS(pEFlags);
13769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13770 IEM_MC_ADVANCE_RIP();
13771 IEM_MC_END();
13772 }
13773 else
13774 {
13775 /* memory */
13776 IEM_MC_BEGIN(3, 2);
13777 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13778 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13779 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13781
13782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13784 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13785 IEM_MC_FETCH_EFLAGS(EFlags);
13786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13787
13788 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13789 IEM_MC_COMMIT_EFLAGS(EFlags);
13790 IEM_MC_ADVANCE_RIP();
13791 IEM_MC_END();
13792 }
13793 return VINF_SUCCESS;
13794}
13795
13796
13797
13798/** Opcode 0xd1. */
13799FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13800{
13801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13802 PCIEMOPSHIFTSIZES pImpl;
13803 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13804 {
13805 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13806 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13807 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13808 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13809 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13810 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13811 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13812 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13813 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13814 }
13815 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13816
13817 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13818 {
13819 /* register */
13820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13821 switch (pVCpu->iem.s.enmEffOpSize)
13822 {
13823 case IEMMODE_16BIT:
13824 IEM_MC_BEGIN(3, 0);
13825 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13826 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13827 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13828 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13829 IEM_MC_REF_EFLAGS(pEFlags);
13830 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13831 IEM_MC_ADVANCE_RIP();
13832 IEM_MC_END();
13833 return VINF_SUCCESS;
13834
13835 case IEMMODE_32BIT:
13836 IEM_MC_BEGIN(3, 0);
13837 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13838 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13839 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13840 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13841 IEM_MC_REF_EFLAGS(pEFlags);
13842 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13843 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13844 IEM_MC_ADVANCE_RIP();
13845 IEM_MC_END();
13846 return VINF_SUCCESS;
13847
13848 case IEMMODE_64BIT:
13849 IEM_MC_BEGIN(3, 0);
13850 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13851 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13852 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13853 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13854 IEM_MC_REF_EFLAGS(pEFlags);
13855 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13856 IEM_MC_ADVANCE_RIP();
13857 IEM_MC_END();
13858 return VINF_SUCCESS;
13859
13860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13861 }
13862 }
13863 else
13864 {
13865 /* memory */
13866 switch (pVCpu->iem.s.enmEffOpSize)
13867 {
13868 case IEMMODE_16BIT:
13869 IEM_MC_BEGIN(3, 2);
13870 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13871 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13872 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13874
13875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13877 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13878 IEM_MC_FETCH_EFLAGS(EFlags);
13879 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13880
13881 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13882 IEM_MC_COMMIT_EFLAGS(EFlags);
13883 IEM_MC_ADVANCE_RIP();
13884 IEM_MC_END();
13885 return VINF_SUCCESS;
13886
13887 case IEMMODE_32BIT:
13888 IEM_MC_BEGIN(3, 2);
13889 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13890 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13891 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13893
13894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13896 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13897 IEM_MC_FETCH_EFLAGS(EFlags);
13898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13899
13900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13901 IEM_MC_COMMIT_EFLAGS(EFlags);
13902 IEM_MC_ADVANCE_RIP();
13903 IEM_MC_END();
13904 return VINF_SUCCESS;
13905
13906 case IEMMODE_64BIT:
13907 IEM_MC_BEGIN(3, 2);
13908 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13909 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13910 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13912
13913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13915 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13916 IEM_MC_FETCH_EFLAGS(EFlags);
13917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13918
13919 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13920 IEM_MC_COMMIT_EFLAGS(EFlags);
13921 IEM_MC_ADVANCE_RIP();
13922 IEM_MC_END();
13923 return VINF_SUCCESS;
13924
13925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13926 }
13927 }
13928}
13929
13930
13931/** Opcode 0xd2. */
13932FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13933{
13934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13935 PCIEMOPSHIFTSIZES pImpl;
13936 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13937 {
13938 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13939 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13940 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13941 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13942 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13943 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13944 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13945 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13946 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13947 }
13948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13949
13950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13951 {
13952 /* register */
13953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13954 IEM_MC_BEGIN(3, 0);
13955 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13956 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13957 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13958 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13959 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13960 IEM_MC_REF_EFLAGS(pEFlags);
13961 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13962 IEM_MC_ADVANCE_RIP();
13963 IEM_MC_END();
13964 }
13965 else
13966 {
13967 /* memory */
13968 IEM_MC_BEGIN(3, 2);
13969 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13970 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13971 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13973
13974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13976 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13977 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13978 IEM_MC_FETCH_EFLAGS(EFlags);
13979 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13980
13981 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13982 IEM_MC_COMMIT_EFLAGS(EFlags);
13983 IEM_MC_ADVANCE_RIP();
13984 IEM_MC_END();
13985 }
13986 return VINF_SUCCESS;
13987}
13988
13989
13990/** Opcode 0xd3. */
13991FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13992{
13993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13994 PCIEMOPSHIFTSIZES pImpl;
13995 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13996 {
13997 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13998 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13999 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14000 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14001 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14002 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14003 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14004 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14005 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14006 }
14007 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14008
14009 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14010 {
14011 /* register */
14012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14013 switch (pVCpu->iem.s.enmEffOpSize)
14014 {
14015 case IEMMODE_16BIT:
14016 IEM_MC_BEGIN(3, 0);
14017 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14018 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14020 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14021 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14022 IEM_MC_REF_EFLAGS(pEFlags);
14023 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14024 IEM_MC_ADVANCE_RIP();
14025 IEM_MC_END();
14026 return VINF_SUCCESS;
14027
14028 case IEMMODE_32BIT:
14029 IEM_MC_BEGIN(3, 0);
14030 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14031 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14032 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14033 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14034 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14035 IEM_MC_REF_EFLAGS(pEFlags);
14036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14037 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14038 IEM_MC_ADVANCE_RIP();
14039 IEM_MC_END();
14040 return VINF_SUCCESS;
14041
14042 case IEMMODE_64BIT:
14043 IEM_MC_BEGIN(3, 0);
14044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14045 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14047 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14048 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14049 IEM_MC_REF_EFLAGS(pEFlags);
14050 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14051 IEM_MC_ADVANCE_RIP();
14052 IEM_MC_END();
14053 return VINF_SUCCESS;
14054
14055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14056 }
14057 }
14058 else
14059 {
14060 /* memory */
14061 switch (pVCpu->iem.s.enmEffOpSize)
14062 {
14063 case IEMMODE_16BIT:
14064 IEM_MC_BEGIN(3, 2);
14065 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14066 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14067 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14069
14070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14072 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14073 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14074 IEM_MC_FETCH_EFLAGS(EFlags);
14075 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14076
14077 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14078 IEM_MC_COMMIT_EFLAGS(EFlags);
14079 IEM_MC_ADVANCE_RIP();
14080 IEM_MC_END();
14081 return VINF_SUCCESS;
14082
14083 case IEMMODE_32BIT:
14084 IEM_MC_BEGIN(3, 2);
14085 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14086 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14087 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14089
14090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14092 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14093 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14094 IEM_MC_FETCH_EFLAGS(EFlags);
14095 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14096
14097 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14098 IEM_MC_COMMIT_EFLAGS(EFlags);
14099 IEM_MC_ADVANCE_RIP();
14100 IEM_MC_END();
14101 return VINF_SUCCESS;
14102
14103 case IEMMODE_64BIT:
14104 IEM_MC_BEGIN(3, 2);
14105 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14106 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14107 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14109
14110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14112 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14113 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14114 IEM_MC_FETCH_EFLAGS(EFlags);
14115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14116
14117 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14118 IEM_MC_COMMIT_EFLAGS(EFlags);
14119 IEM_MC_ADVANCE_RIP();
14120 IEM_MC_END();
14121 return VINF_SUCCESS;
14122
14123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14124 }
14125 }
14126}
14127
14128/** Opcode 0xd4. */
14129FNIEMOP_DEF(iemOp_aam_Ib)
14130{
14131 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14132 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14134 IEMOP_HLP_NO_64BIT();
14135 if (!bImm)
14136 return IEMOP_RAISE_DIVIDE_ERROR();
14137 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14138}
14139
14140
14141/** Opcode 0xd5. */
14142FNIEMOP_DEF(iemOp_aad_Ib)
14143{
14144 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14145 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14147 IEMOP_HLP_NO_64BIT();
14148 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14149}
14150
14151
14152/** Opcode 0xd6. */
14153FNIEMOP_DEF(iemOp_salc)
14154{
14155 IEMOP_MNEMONIC(salc, "salc");
14156 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14157 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14159 IEMOP_HLP_NO_64BIT();
14160
14161 IEM_MC_BEGIN(0, 0);
14162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14163 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14164 } IEM_MC_ELSE() {
14165 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14166 } IEM_MC_ENDIF();
14167 IEM_MC_ADVANCE_RIP();
14168 IEM_MC_END();
14169 return VINF_SUCCESS;
14170}
14171
14172
14173/** Opcode 0xd7. */
14174FNIEMOP_DEF(iemOp_xlat)
14175{
14176 IEMOP_MNEMONIC(xlat, "xlat");
14177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14178 switch (pVCpu->iem.s.enmEffAddrMode)
14179 {
14180 case IEMMODE_16BIT:
14181 IEM_MC_BEGIN(2, 0);
14182 IEM_MC_LOCAL(uint8_t, u8Tmp);
14183 IEM_MC_LOCAL(uint16_t, u16Addr);
14184 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14185 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14186 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14187 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14188 IEM_MC_ADVANCE_RIP();
14189 IEM_MC_END();
14190 return VINF_SUCCESS;
14191
14192 case IEMMODE_32BIT:
14193 IEM_MC_BEGIN(2, 0);
14194 IEM_MC_LOCAL(uint8_t, u8Tmp);
14195 IEM_MC_LOCAL(uint32_t, u32Addr);
14196 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14197 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14198 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14199 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14200 IEM_MC_ADVANCE_RIP();
14201 IEM_MC_END();
14202 return VINF_SUCCESS;
14203
14204 case IEMMODE_64BIT:
14205 IEM_MC_BEGIN(2, 0);
14206 IEM_MC_LOCAL(uint8_t, u8Tmp);
14207 IEM_MC_LOCAL(uint64_t, u64Addr);
14208 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14209 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14210 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14211 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14212 IEM_MC_ADVANCE_RIP();
14213 IEM_MC_END();
14214 return VINF_SUCCESS;
14215
14216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14217 }
14218}
14219
14220
14221/**
14222 * Common worker for FPU instructions working on ST0 and STn, and storing the
14223 * result in ST0.
14224 *
14225 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14226 */
14227FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14228{
14229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14230
14231 IEM_MC_BEGIN(3, 1);
14232 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14233 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14234 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14235 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14236
14237 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14238 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14239 IEM_MC_PREPARE_FPU_USAGE();
14240 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14241 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14242 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14243 IEM_MC_ELSE()
14244 IEM_MC_FPU_STACK_UNDERFLOW(0);
14245 IEM_MC_ENDIF();
14246 IEM_MC_ADVANCE_RIP();
14247
14248 IEM_MC_END();
14249 return VINF_SUCCESS;
14250}
14251
14252
14253/**
14254 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14255 * flags.
14256 *
14257 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14258 */
14259FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14260{
14261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14262
14263 IEM_MC_BEGIN(3, 1);
14264 IEM_MC_LOCAL(uint16_t, u16Fsw);
14265 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14266 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14267 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14268
14269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14271 IEM_MC_PREPARE_FPU_USAGE();
14272 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14273 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14274 IEM_MC_UPDATE_FSW(u16Fsw);
14275 IEM_MC_ELSE()
14276 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14277 IEM_MC_ENDIF();
14278 IEM_MC_ADVANCE_RIP();
14279
14280 IEM_MC_END();
14281 return VINF_SUCCESS;
14282}
14283
14284
14285/**
14286 * Common worker for FPU instructions working on ST0 and STn, only affecting
14287 * flags, and popping when done.
14288 *
14289 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14290 */
14291FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14292{
14293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14294
14295 IEM_MC_BEGIN(3, 1);
14296 IEM_MC_LOCAL(uint16_t, u16Fsw);
14297 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14298 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14299 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14300
14301 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14302 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14303 IEM_MC_PREPARE_FPU_USAGE();
14304 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14305 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14306 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14307 IEM_MC_ELSE()
14308 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14309 IEM_MC_ENDIF();
14310 IEM_MC_ADVANCE_RIP();
14311
14312 IEM_MC_END();
14313 return VINF_SUCCESS;
14314}
14315
14316
14317/** Opcode 0xd8 11/0. */
14318FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14319{
14320 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14321 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14322}
14323
14324
14325/** Opcode 0xd8 11/1. */
14326FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14327{
14328 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14329 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14330}
14331
14332
14333/** Opcode 0xd8 11/2. */
14334FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14335{
14336 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14337 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14338}
14339
14340
14341/** Opcode 0xd8 11/3. */
14342FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14343{
14344 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14345 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14346}
14347
14348
14349/** Opcode 0xd8 11/4. */
14350FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14351{
14352 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14353 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14354}
14355
14356
14357/** Opcode 0xd8 11/5. */
14358FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14359{
14360 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14361 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14362}
14363
14364
14365/** Opcode 0xd8 11/6. */
14366FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14367{
14368 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14369 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14370}
14371
14372
14373/** Opcode 0xd8 11/7. */
14374FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14375{
14376 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14377 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14378}
14379
14380
14381/**
14382 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14383 * the result in ST0.
14384 *
14385 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14386 */
14387FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14388{
14389 IEM_MC_BEGIN(3, 3);
14390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14391 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14392 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14393 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14394 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14395 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14396
14397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14399
14400 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14401 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14402 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14403
14404 IEM_MC_PREPARE_FPU_USAGE();
14405 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14406 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14407 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14408 IEM_MC_ELSE()
14409 IEM_MC_FPU_STACK_UNDERFLOW(0);
14410 IEM_MC_ENDIF();
14411 IEM_MC_ADVANCE_RIP();
14412
14413 IEM_MC_END();
14414 return VINF_SUCCESS;
14415}
14416
14417
14418/** Opcode 0xd8 !11/0. */
14419FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14420{
14421 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14422 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14423}
14424
14425
14426/** Opcode 0xd8 !11/1. */
14427FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14428{
14429 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14430 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14431}
14432
14433
14434/** Opcode 0xd8 !11/2. */
14435FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14436{
14437 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14438
14439 IEM_MC_BEGIN(3, 3);
14440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14441 IEM_MC_LOCAL(uint16_t, u16Fsw);
14442 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14443 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14444 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14445 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14446
14447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14449
14450 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14451 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14452 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14453
14454 IEM_MC_PREPARE_FPU_USAGE();
14455 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14456 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14457 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14458 IEM_MC_ELSE()
14459 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14460 IEM_MC_ENDIF();
14461 IEM_MC_ADVANCE_RIP();
14462
14463 IEM_MC_END();
14464 return VINF_SUCCESS;
14465}
14466
14467
14468/** Opcode 0xd8 !11/3. */
14469FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14470{
14471 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14472
14473 IEM_MC_BEGIN(3, 3);
14474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14475 IEM_MC_LOCAL(uint16_t, u16Fsw);
14476 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14477 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14478 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14479 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14480
14481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14483
14484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14486 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14487
14488 IEM_MC_PREPARE_FPU_USAGE();
14489 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14490 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14491 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14492 IEM_MC_ELSE()
14493 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14494 IEM_MC_ENDIF();
14495 IEM_MC_ADVANCE_RIP();
14496
14497 IEM_MC_END();
14498 return VINF_SUCCESS;
14499}
14500
14501
14502/** Opcode 0xd8 !11/4. */
14503FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14504{
14505 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14506 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14507}
14508
14509
14510/** Opcode 0xd8 !11/5. */
14511FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14512{
14513 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14514 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14515}
14516
14517
14518/** Opcode 0xd8 !11/6. */
14519FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14520{
14521 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14522 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14523}
14524
14525
14526/** Opcode 0xd8 !11/7. */
14527FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14528{
14529 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14530 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14531}
14532
14533
14534/** Opcode 0xd8. */
14535FNIEMOP_DEF(iemOp_EscF0)
14536{
14537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14538 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14539
14540 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14541 {
14542 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14543 {
14544 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14545 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14546 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14547 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14548 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14549 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14550 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14551 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14553 }
14554 }
14555 else
14556 {
14557 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14558 {
14559 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14560 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14561 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14562 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14563 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14564 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14565 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14566 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14568 }
14569 }
14570}
14571
14572
14573/** Opcode 0xd9 /0 mem32real
14574 * @sa iemOp_fld_m64r */
14575FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14576{
14577 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14578
14579 IEM_MC_BEGIN(2, 3);
14580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14581 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14582 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14583 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14584 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14585
14586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14588
14589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14590 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14591 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14592
14593 IEM_MC_PREPARE_FPU_USAGE();
14594 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14595 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14596 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14597 IEM_MC_ELSE()
14598 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14599 IEM_MC_ENDIF();
14600 IEM_MC_ADVANCE_RIP();
14601
14602 IEM_MC_END();
14603 return VINF_SUCCESS;
14604}
14605
14606
14607/** Opcode 0xd9 !11/2 mem32real */
14608FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14609{
14610 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14611 IEM_MC_BEGIN(3, 2);
14612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14613 IEM_MC_LOCAL(uint16_t, u16Fsw);
14614 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14615 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14616 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14617
14618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14622
14623 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14624 IEM_MC_PREPARE_FPU_USAGE();
14625 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14626 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14627 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14628 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14629 IEM_MC_ELSE()
14630 IEM_MC_IF_FCW_IM()
14631 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14632 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14633 IEM_MC_ENDIF();
14634 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14635 IEM_MC_ENDIF();
14636 IEM_MC_ADVANCE_RIP();
14637
14638 IEM_MC_END();
14639 return VINF_SUCCESS;
14640}
14641
14642
14643/** Opcode 0xd9 !11/3 */
14644FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14645{
14646 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14647 IEM_MC_BEGIN(3, 2);
14648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14649 IEM_MC_LOCAL(uint16_t, u16Fsw);
14650 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14651 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14652 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14653
14654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14656 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14657 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14658
14659 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14660 IEM_MC_PREPARE_FPU_USAGE();
14661 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14662 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14663 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14664 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14665 IEM_MC_ELSE()
14666 IEM_MC_IF_FCW_IM()
14667 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14668 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14669 IEM_MC_ENDIF();
14670 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14671 IEM_MC_ENDIF();
14672 IEM_MC_ADVANCE_RIP();
14673
14674 IEM_MC_END();
14675 return VINF_SUCCESS;
14676}
14677
14678
14679/** Opcode 0xd9 !11/4 */
14680FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14681{
14682 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14683 IEM_MC_BEGIN(3, 0);
14684 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14685 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14686 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14689 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14690 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14691 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14692 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14693 IEM_MC_END();
14694 return VINF_SUCCESS;
14695}
14696
14697
14698/** Opcode 0xd9 !11/5 */
14699FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14700{
14701 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14702 IEM_MC_BEGIN(1, 1);
14703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14704 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14708 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14709 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14710 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14711 IEM_MC_END();
14712 return VINF_SUCCESS;
14713}
14714
14715
14716/** Opcode 0xd9 !11/6 */
14717FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14718{
14719 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14720 IEM_MC_BEGIN(3, 0);
14721 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14722 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14723 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14726 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14727 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14728 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14729 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14730 IEM_MC_END();
14731 return VINF_SUCCESS;
14732}
14733
14734
14735/** Opcode 0xd9 !11/7 */
14736FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14737{
14738 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14739 IEM_MC_BEGIN(2, 0);
14740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14741 IEM_MC_LOCAL(uint16_t, u16Fcw);
14742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14745 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14746 IEM_MC_FETCH_FCW(u16Fcw);
14747 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14748 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14749 IEM_MC_END();
14750 return VINF_SUCCESS;
14751}
14752
14753
14754/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14755FNIEMOP_DEF(iemOp_fnop)
14756{
14757 IEMOP_MNEMONIC(fnop, "fnop");
14758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14759
14760 IEM_MC_BEGIN(0, 0);
14761 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14762 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14763 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14764 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14765 * intel optimizations. Investigate. */
14766 IEM_MC_UPDATE_FPU_OPCODE_IP();
14767 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14768 IEM_MC_END();
14769 return VINF_SUCCESS;
14770}
14771
14772
14773/** Opcode 0xd9 11/0 stN */
14774FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14775{
14776 IEMOP_MNEMONIC(fld_stN, "fld stN");
14777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14778
14779 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14780 * indicates that it does. */
14781 IEM_MC_BEGIN(0, 2);
14782 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14783 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14784 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14785 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14786
14787 IEM_MC_PREPARE_FPU_USAGE();
14788 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14789 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14790 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14791 IEM_MC_ELSE()
14792 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14793 IEM_MC_ENDIF();
14794
14795 IEM_MC_ADVANCE_RIP();
14796 IEM_MC_END();
14797
14798 return VINF_SUCCESS;
14799}
14800
14801
14802/** Opcode 0xd9 11/3 stN */
14803FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14804{
14805 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14807
14808 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14809 * indicates that it does. */
14810 IEM_MC_BEGIN(1, 3);
14811 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14812 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14813 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14814 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14815 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14816 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14817
14818 IEM_MC_PREPARE_FPU_USAGE();
14819 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14820 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14821 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14822 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14823 IEM_MC_ELSE()
14824 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14825 IEM_MC_ENDIF();
14826
14827 IEM_MC_ADVANCE_RIP();
14828 IEM_MC_END();
14829
14830 return VINF_SUCCESS;
14831}
14832
14833
14834/** Opcode 0xd9 11/4, 0xdd 11/2. */
14835FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14836{
14837 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14839
14840 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14841 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14842 if (!iDstReg)
14843 {
14844 IEM_MC_BEGIN(0, 1);
14845 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14847 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14848
14849 IEM_MC_PREPARE_FPU_USAGE();
14850 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14851 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14852 IEM_MC_ELSE()
14853 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14854 IEM_MC_ENDIF();
14855
14856 IEM_MC_ADVANCE_RIP();
14857 IEM_MC_END();
14858 }
14859 else
14860 {
14861 IEM_MC_BEGIN(0, 2);
14862 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14863 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14865 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14866
14867 IEM_MC_PREPARE_FPU_USAGE();
14868 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14869 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14870 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14871 IEM_MC_ELSE()
14872 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14873 IEM_MC_ENDIF();
14874
14875 IEM_MC_ADVANCE_RIP();
14876 IEM_MC_END();
14877 }
14878 return VINF_SUCCESS;
14879}
14880
14881
14882/**
14883 * Common worker for FPU instructions working on ST0 and replaces it with the
14884 * result, i.e. unary operators.
14885 *
14886 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14887 */
14888FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14889{
14890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14891
14892 IEM_MC_BEGIN(2, 1);
14893 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14894 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14895 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14896
14897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14899 IEM_MC_PREPARE_FPU_USAGE();
14900 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14901 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14902 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14903 IEM_MC_ELSE()
14904 IEM_MC_FPU_STACK_UNDERFLOW(0);
14905 IEM_MC_ENDIF();
14906 IEM_MC_ADVANCE_RIP();
14907
14908 IEM_MC_END();
14909 return VINF_SUCCESS;
14910}
14911
14912
14913/** Opcode 0xd9 0xe0. */
14914FNIEMOP_DEF(iemOp_fchs)
14915{
14916 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14917 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14918}
14919
14920
14921/** Opcode 0xd9 0xe1. */
14922FNIEMOP_DEF(iemOp_fabs)
14923{
14924 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14925 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14926}
14927
14928
14929/**
14930 * Common worker for FPU instructions working on ST0 and only returns FSW.
14931 *
14932 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14933 */
14934FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14935{
14936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14937
14938 IEM_MC_BEGIN(2, 1);
14939 IEM_MC_LOCAL(uint16_t, u16Fsw);
14940 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14941 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14942
14943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14944 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14945 IEM_MC_PREPARE_FPU_USAGE();
14946 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14947 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14948 IEM_MC_UPDATE_FSW(u16Fsw);
14949 IEM_MC_ELSE()
14950 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14951 IEM_MC_ENDIF();
14952 IEM_MC_ADVANCE_RIP();
14953
14954 IEM_MC_END();
14955 return VINF_SUCCESS;
14956}
14957
14958
14959/** Opcode 0xd9 0xe4. */
14960FNIEMOP_DEF(iemOp_ftst)
14961{
14962 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14963 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14964}
14965
14966
14967/** Opcode 0xd9 0xe5. */
14968FNIEMOP_DEF(iemOp_fxam)
14969{
14970 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14971 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14972}
14973
14974
14975/**
14976 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14977 *
14978 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14979 */
14980FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14981{
14982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14983
14984 IEM_MC_BEGIN(1, 1);
14985 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14986 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14987
14988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14990 IEM_MC_PREPARE_FPU_USAGE();
14991 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14992 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14993 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14994 IEM_MC_ELSE()
14995 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14996 IEM_MC_ENDIF();
14997 IEM_MC_ADVANCE_RIP();
14998
14999 IEM_MC_END();
15000 return VINF_SUCCESS;
15001}
15002
15003
15004/** Opcode 0xd9 0xe8. */
15005FNIEMOP_DEF(iemOp_fld1)
15006{
15007 IEMOP_MNEMONIC(fld1, "fld1");
15008 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15009}
15010
15011
15012/** Opcode 0xd9 0xe9. */
15013FNIEMOP_DEF(iemOp_fldl2t)
15014{
15015 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15016 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15017}
15018
15019
15020/** Opcode 0xd9 0xea. */
15021FNIEMOP_DEF(iemOp_fldl2e)
15022{
15023 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15024 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15025}
15026
15027/** Opcode 0xd9 0xeb. */
15028FNIEMOP_DEF(iemOp_fldpi)
15029{
15030 IEMOP_MNEMONIC(fldpi, "fldpi");
15031 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15032}
15033
15034
15035/** Opcode 0xd9 0xec. */
15036FNIEMOP_DEF(iemOp_fldlg2)
15037{
15038 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15039 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15040}
15041
15042/** Opcode 0xd9 0xed. */
15043FNIEMOP_DEF(iemOp_fldln2)
15044{
15045 IEMOP_MNEMONIC(fldln2, "fldln2");
15046 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15047}
15048
15049
15050/** Opcode 0xd9 0xee. */
15051FNIEMOP_DEF(iemOp_fldz)
15052{
15053 IEMOP_MNEMONIC(fldz, "fldz");
15054 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15055}
15056
15057
15058/** Opcode 0xd9 0xf0. */
15059FNIEMOP_DEF(iemOp_f2xm1)
15060{
15061 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15062 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15063}
15064
15065
15066/**
15067 * Common worker for FPU instructions working on STn and ST0, storing the result
15068 * in STn, and popping the stack unless IE, DE or ZE was raised.
15069 *
15070 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15071 */
15072FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15073{
15074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15075
15076 IEM_MC_BEGIN(3, 1);
15077 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15078 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15079 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15080 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15081
15082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15083 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15084
15085 IEM_MC_PREPARE_FPU_USAGE();
15086 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15087 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15088 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15089 IEM_MC_ELSE()
15090 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15091 IEM_MC_ENDIF();
15092 IEM_MC_ADVANCE_RIP();
15093
15094 IEM_MC_END();
15095 return VINF_SUCCESS;
15096}
15097
15098
15099/** Opcode 0xd9 0xf1. */
15100FNIEMOP_DEF(iemOp_fyl2x)
15101{
15102 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15103 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15104}
15105
15106
15107/**
15108 * Common worker for FPU instructions working on ST0 and having two outputs, one
15109 * replacing ST0 and one pushed onto the stack.
15110 *
15111 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15112 */
15113FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15114{
15115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15116
15117 IEM_MC_BEGIN(2, 1);
15118 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15119 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15120 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15121
15122 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15123 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15124 IEM_MC_PREPARE_FPU_USAGE();
15125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15126 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15127 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15128 IEM_MC_ELSE()
15129 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15130 IEM_MC_ENDIF();
15131 IEM_MC_ADVANCE_RIP();
15132
15133 IEM_MC_END();
15134 return VINF_SUCCESS;
15135}
15136
15137
15138/** Opcode 0xd9 0xf2. */
15139FNIEMOP_DEF(iemOp_fptan)
15140{
15141 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15142 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15143}
15144
15145
15146/** Opcode 0xd9 0xf3. */
15147FNIEMOP_DEF(iemOp_fpatan)
15148{
15149 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15150 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15151}
15152
15153
15154/** Opcode 0xd9 0xf4. */
15155FNIEMOP_DEF(iemOp_fxtract)
15156{
15157 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15158 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15159}
15160
15161
15162/** Opcode 0xd9 0xf5. */
15163FNIEMOP_DEF(iemOp_fprem1)
15164{
15165 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15166 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15167}
15168
15169
15170/** Opcode 0xd9 0xf6. */
15171FNIEMOP_DEF(iemOp_fdecstp)
15172{
15173 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15175 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15176 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15177 * FINCSTP and FDECSTP. */
15178
15179 IEM_MC_BEGIN(0,0);
15180
15181 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15182 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15183
15184 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15185 IEM_MC_FPU_STACK_DEC_TOP();
15186 IEM_MC_UPDATE_FSW_CONST(0);
15187
15188 IEM_MC_ADVANCE_RIP();
15189 IEM_MC_END();
15190 return VINF_SUCCESS;
15191}
15192
15193
15194/** Opcode 0xd9 0xf7. */
15195FNIEMOP_DEF(iemOp_fincstp)
15196{
15197 IEMOP_MNEMONIC(fincstp, "fincstp");
15198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15199 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15200 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15201 * FINCSTP and FDECSTP. */
15202
15203 IEM_MC_BEGIN(0,0);
15204
15205 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15206 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15207
15208 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15209 IEM_MC_FPU_STACK_INC_TOP();
15210 IEM_MC_UPDATE_FSW_CONST(0);
15211
15212 IEM_MC_ADVANCE_RIP();
15213 IEM_MC_END();
15214 return VINF_SUCCESS;
15215}
15216
15217
15218/** Opcode 0xd9 0xf8. */
15219FNIEMOP_DEF(iemOp_fprem)
15220{
15221 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15222 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15223}
15224
15225
15226/** Opcode 0xd9 0xf9. */
15227FNIEMOP_DEF(iemOp_fyl2xp1)
15228{
15229 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15230 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15231}
15232
15233
15234/** Opcode 0xd9 0xfa. */
15235FNIEMOP_DEF(iemOp_fsqrt)
15236{
15237 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15238 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15239}
15240
15241
15242/** Opcode 0xd9 0xfb. */
15243FNIEMOP_DEF(iemOp_fsincos)
15244{
15245 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15246 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15247}
15248
15249
15250/** Opcode 0xd9 0xfc. */
15251FNIEMOP_DEF(iemOp_frndint)
15252{
15253 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15254 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15255}
15256
15257
15258/** Opcode 0xd9 0xfd. */
15259FNIEMOP_DEF(iemOp_fscale)
15260{
15261 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15262 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15263}
15264
15265
15266/** Opcode 0xd9 0xfe. */
15267FNIEMOP_DEF(iemOp_fsin)
15268{
15269 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15270 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15271}
15272
15273
15274/** Opcode 0xd9 0xff. */
15275FNIEMOP_DEF(iemOp_fcos)
15276{
15277 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15278 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15279}
15280
15281
15282/** Used by iemOp_EscF1. */
15283IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15284{
15285 /* 0xe0 */ iemOp_fchs,
15286 /* 0xe1 */ iemOp_fabs,
15287 /* 0xe2 */ iemOp_Invalid,
15288 /* 0xe3 */ iemOp_Invalid,
15289 /* 0xe4 */ iemOp_ftst,
15290 /* 0xe5 */ iemOp_fxam,
15291 /* 0xe6 */ iemOp_Invalid,
15292 /* 0xe7 */ iemOp_Invalid,
15293 /* 0xe8 */ iemOp_fld1,
15294 /* 0xe9 */ iemOp_fldl2t,
15295 /* 0xea */ iemOp_fldl2e,
15296 /* 0xeb */ iemOp_fldpi,
15297 /* 0xec */ iemOp_fldlg2,
15298 /* 0xed */ iemOp_fldln2,
15299 /* 0xee */ iemOp_fldz,
15300 /* 0xef */ iemOp_Invalid,
15301 /* 0xf0 */ iemOp_f2xm1,
15302 /* 0xf1 */ iemOp_fyl2x,
15303 /* 0xf2 */ iemOp_fptan,
15304 /* 0xf3 */ iemOp_fpatan,
15305 /* 0xf4 */ iemOp_fxtract,
15306 /* 0xf5 */ iemOp_fprem1,
15307 /* 0xf6 */ iemOp_fdecstp,
15308 /* 0xf7 */ iemOp_fincstp,
15309 /* 0xf8 */ iemOp_fprem,
15310 /* 0xf9 */ iemOp_fyl2xp1,
15311 /* 0xfa */ iemOp_fsqrt,
15312 /* 0xfb */ iemOp_fsincos,
15313 /* 0xfc */ iemOp_frndint,
15314 /* 0xfd */ iemOp_fscale,
15315 /* 0xfe */ iemOp_fsin,
15316 /* 0xff */ iemOp_fcos
15317};
15318
15319
15320/** Opcode 0xd9. */
15321FNIEMOP_DEF(iemOp_EscF1)
15322{
15323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15324 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15325
15326 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15327 {
15328 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15329 {
15330 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15331 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15332 case 2:
15333 if (bRm == 0xd0)
15334 return FNIEMOP_CALL(iemOp_fnop);
15335 return IEMOP_RAISE_INVALID_OPCODE();
15336 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15337 case 4:
15338 case 5:
15339 case 6:
15340 case 7:
15341 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15342 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15344 }
15345 }
15346 else
15347 {
15348 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15349 {
15350 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15351 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15352 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15353 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15354 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15355 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15356 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15357 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15359 }
15360 }
15361}
15362
15363
15364/** Opcode 0xda 11/0. */
15365FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15366{
15367 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15369
15370 IEM_MC_BEGIN(0, 1);
15371 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15372
15373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15374 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15375
15376 IEM_MC_PREPARE_FPU_USAGE();
15377 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15378 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15379 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15380 IEM_MC_ENDIF();
15381 IEM_MC_UPDATE_FPU_OPCODE_IP();
15382 IEM_MC_ELSE()
15383 IEM_MC_FPU_STACK_UNDERFLOW(0);
15384 IEM_MC_ENDIF();
15385 IEM_MC_ADVANCE_RIP();
15386
15387 IEM_MC_END();
15388 return VINF_SUCCESS;
15389}
15390
15391
15392/** Opcode 0xda 11/1. */
15393FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15394{
15395 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15397
15398 IEM_MC_BEGIN(0, 1);
15399 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15400
15401 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15402 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15403
15404 IEM_MC_PREPARE_FPU_USAGE();
15405 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15406 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15407 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15408 IEM_MC_ENDIF();
15409 IEM_MC_UPDATE_FPU_OPCODE_IP();
15410 IEM_MC_ELSE()
15411 IEM_MC_FPU_STACK_UNDERFLOW(0);
15412 IEM_MC_ENDIF();
15413 IEM_MC_ADVANCE_RIP();
15414
15415 IEM_MC_END();
15416 return VINF_SUCCESS;
15417}
15418
15419
15420/** Opcode 0xda 11/2. */
15421FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15422{
15423 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15425
15426 IEM_MC_BEGIN(0, 1);
15427 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15428
15429 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15430 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15431
15432 IEM_MC_PREPARE_FPU_USAGE();
15433 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15434 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15435 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15436 IEM_MC_ENDIF();
15437 IEM_MC_UPDATE_FPU_OPCODE_IP();
15438 IEM_MC_ELSE()
15439 IEM_MC_FPU_STACK_UNDERFLOW(0);
15440 IEM_MC_ENDIF();
15441 IEM_MC_ADVANCE_RIP();
15442
15443 IEM_MC_END();
15444 return VINF_SUCCESS;
15445}
15446
15447
15448/** Opcode 0xda 11/3. */
15449FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15450{
15451 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15453
15454 IEM_MC_BEGIN(0, 1);
15455 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15456
15457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15458 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15459
15460 IEM_MC_PREPARE_FPU_USAGE();
15461 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15463 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15464 IEM_MC_ENDIF();
15465 IEM_MC_UPDATE_FPU_OPCODE_IP();
15466 IEM_MC_ELSE()
15467 IEM_MC_FPU_STACK_UNDERFLOW(0);
15468 IEM_MC_ENDIF();
15469 IEM_MC_ADVANCE_RIP();
15470
15471 IEM_MC_END();
15472 return VINF_SUCCESS;
15473}
15474
15475
15476/**
15477 * Common worker for FPU instructions working on ST0 and STn, only affecting
15478 * flags, and popping twice when done.
15479 *
15480 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15481 */
15482FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15483{
15484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15485
15486 IEM_MC_BEGIN(3, 1);
15487 IEM_MC_LOCAL(uint16_t, u16Fsw);
15488 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15489 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15490 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15491
15492 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15493 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15494
15495 IEM_MC_PREPARE_FPU_USAGE();
15496 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15497 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15498 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15499 IEM_MC_ELSE()
15500 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15501 IEM_MC_ENDIF();
15502 IEM_MC_ADVANCE_RIP();
15503
15504 IEM_MC_END();
15505 return VINF_SUCCESS;
15506}
15507
15508
15509/** Opcode 0xda 0xe9. */
15510FNIEMOP_DEF(iemOp_fucompp)
15511{
15512 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15513 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15514}
15515
15516
15517/**
15518 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15519 * the result in ST0.
15520 *
15521 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15522 */
15523FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15524{
15525 IEM_MC_BEGIN(3, 3);
15526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15527 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15528 IEM_MC_LOCAL(int32_t, i32Val2);
15529 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15530 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15531 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15532
15533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15535
15536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15538 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15539
15540 IEM_MC_PREPARE_FPU_USAGE();
15541 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15542 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15543 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15544 IEM_MC_ELSE()
15545 IEM_MC_FPU_STACK_UNDERFLOW(0);
15546 IEM_MC_ENDIF();
15547 IEM_MC_ADVANCE_RIP();
15548
15549 IEM_MC_END();
15550 return VINF_SUCCESS;
15551}
15552
15553
15554/** Opcode 0xda !11/0. */
15555FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15556{
15557 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15558 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15559}
15560
15561
15562/** Opcode 0xda !11/1. */
15563FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15564{
15565 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15566 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15567}
15568
15569
15570/** Opcode 0xda !11/2. */
15571FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15572{
15573 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15574
15575 IEM_MC_BEGIN(3, 3);
15576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15577 IEM_MC_LOCAL(uint16_t, u16Fsw);
15578 IEM_MC_LOCAL(int32_t, i32Val2);
15579 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15580 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15581 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15582
15583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15585
15586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15587 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15588 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15589
15590 IEM_MC_PREPARE_FPU_USAGE();
15591 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15592 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15593 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15594 IEM_MC_ELSE()
15595 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15596 IEM_MC_ENDIF();
15597 IEM_MC_ADVANCE_RIP();
15598
15599 IEM_MC_END();
15600 return VINF_SUCCESS;
15601}
15602
15603
15604/** Opcode 0xda !11/3. */
15605FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15606{
15607 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15608
15609 IEM_MC_BEGIN(3, 3);
15610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15611 IEM_MC_LOCAL(uint16_t, u16Fsw);
15612 IEM_MC_LOCAL(int32_t, i32Val2);
15613 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15614 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15615 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15616
15617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15619
15620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15622 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15623
15624 IEM_MC_PREPARE_FPU_USAGE();
15625 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15626 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15627 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15628 IEM_MC_ELSE()
15629 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15630 IEM_MC_ENDIF();
15631 IEM_MC_ADVANCE_RIP();
15632
15633 IEM_MC_END();
15634 return VINF_SUCCESS;
15635}
15636
15637
15638/** Opcode 0xda !11/4. */
15639FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15640{
15641 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15642 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15643}
15644
15645
15646/** Opcode 0xda !11/5. */
15647FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15648{
15649 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15650 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15651}
15652
15653
15654/** Opcode 0xda !11/6. */
15655FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15656{
15657 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15658 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15659}
15660
15661
15662/** Opcode 0xda !11/7. */
15663FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15664{
15665 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15666 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15667}
15668
15669
15670/** Opcode 0xda. */
15671FNIEMOP_DEF(iemOp_EscF2)
15672{
15673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15674 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15676 {
15677 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15678 {
15679 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15680 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15681 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15682 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15683 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15684 case 5:
15685 if (bRm == 0xe9)
15686 return FNIEMOP_CALL(iemOp_fucompp);
15687 return IEMOP_RAISE_INVALID_OPCODE();
15688 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15689 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15691 }
15692 }
15693 else
15694 {
15695 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15696 {
15697 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15698 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15699 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15700 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15701 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15702 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15703 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15704 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15706 }
15707 }
15708}
15709
15710
15711/** Opcode 0xdb !11/0. */
15712FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15713{
15714 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15715
15716 IEM_MC_BEGIN(2, 3);
15717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15718 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15719 IEM_MC_LOCAL(int32_t, i32Val);
15720 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15721 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15722
15723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15725
15726 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15727 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15728 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15729
15730 IEM_MC_PREPARE_FPU_USAGE();
15731 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15732 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15733 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15734 IEM_MC_ELSE()
15735 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15736 IEM_MC_ENDIF();
15737 IEM_MC_ADVANCE_RIP();
15738
15739 IEM_MC_END();
15740 return VINF_SUCCESS;
15741}
15742
15743
15744/** Opcode 0xdb !11/1. */
15745FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15746{
15747 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15748 IEM_MC_BEGIN(3, 2);
15749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15750 IEM_MC_LOCAL(uint16_t, u16Fsw);
15751 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15752 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15753 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15754
15755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15759
15760 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15761 IEM_MC_PREPARE_FPU_USAGE();
15762 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15763 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15764 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15765 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15766 IEM_MC_ELSE()
15767 IEM_MC_IF_FCW_IM()
15768 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15769 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15770 IEM_MC_ENDIF();
15771 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15772 IEM_MC_ENDIF();
15773 IEM_MC_ADVANCE_RIP();
15774
15775 IEM_MC_END();
15776 return VINF_SUCCESS;
15777}
15778
15779
15780/** Opcode 0xdb !11/2. */
15781FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15782{
15783 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15784 IEM_MC_BEGIN(3, 2);
15785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15786 IEM_MC_LOCAL(uint16_t, u16Fsw);
15787 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15788 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15789 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15790
15791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15794 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15795
15796 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15797 IEM_MC_PREPARE_FPU_USAGE();
15798 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15799 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15800 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15801 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15802 IEM_MC_ELSE()
15803 IEM_MC_IF_FCW_IM()
15804 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15805 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15806 IEM_MC_ENDIF();
15807 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15808 IEM_MC_ENDIF();
15809 IEM_MC_ADVANCE_RIP();
15810
15811 IEM_MC_END();
15812 return VINF_SUCCESS;
15813}
15814
15815
15816/** Opcode 0xdb !11/3. */
15817FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15818{
15819 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15820 IEM_MC_BEGIN(3, 2);
15821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15822 IEM_MC_LOCAL(uint16_t, u16Fsw);
15823 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15824 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15825 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15826
15827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15831
15832 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15833 IEM_MC_PREPARE_FPU_USAGE();
15834 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15835 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15836 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15837 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15838 IEM_MC_ELSE()
15839 IEM_MC_IF_FCW_IM()
15840 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15841 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15842 IEM_MC_ENDIF();
15843 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15844 IEM_MC_ENDIF();
15845 IEM_MC_ADVANCE_RIP();
15846
15847 IEM_MC_END();
15848 return VINF_SUCCESS;
15849}
15850
15851
15852/** Opcode 0xdb !11/5. */
15853FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15854{
15855 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15856
15857 IEM_MC_BEGIN(2, 3);
15858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15859 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15860 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15861 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15862 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15863
15864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15866
15867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15869 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15870
15871 IEM_MC_PREPARE_FPU_USAGE();
15872 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15873 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15874 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15875 IEM_MC_ELSE()
15876 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15877 IEM_MC_ENDIF();
15878 IEM_MC_ADVANCE_RIP();
15879
15880 IEM_MC_END();
15881 return VINF_SUCCESS;
15882}
15883
15884
15885/** Opcode 0xdb !11/7. */
15886FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15887{
15888 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15889 IEM_MC_BEGIN(3, 2);
15890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15891 IEM_MC_LOCAL(uint16_t, u16Fsw);
15892 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15893 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15894 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15895
15896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15898 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15899 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15900
15901 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15902 IEM_MC_PREPARE_FPU_USAGE();
15903 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15904 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15905 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15906 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15907 IEM_MC_ELSE()
15908 IEM_MC_IF_FCW_IM()
15909 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15910 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15911 IEM_MC_ENDIF();
15912 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15913 IEM_MC_ENDIF();
15914 IEM_MC_ADVANCE_RIP();
15915
15916 IEM_MC_END();
15917 return VINF_SUCCESS;
15918}
15919
15920
15921/** Opcode 0xdb 11/0. */
15922FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15923{
15924 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15926
15927 IEM_MC_BEGIN(0, 1);
15928 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15929
15930 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15931 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15932
15933 IEM_MC_PREPARE_FPU_USAGE();
15934 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15935 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15936 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15937 IEM_MC_ENDIF();
15938 IEM_MC_UPDATE_FPU_OPCODE_IP();
15939 IEM_MC_ELSE()
15940 IEM_MC_FPU_STACK_UNDERFLOW(0);
15941 IEM_MC_ENDIF();
15942 IEM_MC_ADVANCE_RIP();
15943
15944 IEM_MC_END();
15945 return VINF_SUCCESS;
15946}
15947
15948
15949/** Opcode 0xdb 11/1. */
15950FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15951{
15952 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15954
15955 IEM_MC_BEGIN(0, 1);
15956 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15957
15958 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15959 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15960
15961 IEM_MC_PREPARE_FPU_USAGE();
15962 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15963 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15964 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15965 IEM_MC_ENDIF();
15966 IEM_MC_UPDATE_FPU_OPCODE_IP();
15967 IEM_MC_ELSE()
15968 IEM_MC_FPU_STACK_UNDERFLOW(0);
15969 IEM_MC_ENDIF();
15970 IEM_MC_ADVANCE_RIP();
15971
15972 IEM_MC_END();
15973 return VINF_SUCCESS;
15974}
15975
15976
15977/** Opcode 0xdb 11/2. */
15978FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15979{
15980 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15982
15983 IEM_MC_BEGIN(0, 1);
15984 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15985
15986 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15987 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15988
15989 IEM_MC_PREPARE_FPU_USAGE();
15990 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15991 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15992 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15993 IEM_MC_ENDIF();
15994 IEM_MC_UPDATE_FPU_OPCODE_IP();
15995 IEM_MC_ELSE()
15996 IEM_MC_FPU_STACK_UNDERFLOW(0);
15997 IEM_MC_ENDIF();
15998 IEM_MC_ADVANCE_RIP();
15999
16000 IEM_MC_END();
16001 return VINF_SUCCESS;
16002}
16003
16004
16005/** Opcode 0xdb 11/3. */
16006FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16007{
16008 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16010
16011 IEM_MC_BEGIN(0, 1);
16012 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16013
16014 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16015 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16016
16017 IEM_MC_PREPARE_FPU_USAGE();
16018 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16019 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16020 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16021 IEM_MC_ENDIF();
16022 IEM_MC_UPDATE_FPU_OPCODE_IP();
16023 IEM_MC_ELSE()
16024 IEM_MC_FPU_STACK_UNDERFLOW(0);
16025 IEM_MC_ENDIF();
16026 IEM_MC_ADVANCE_RIP();
16027
16028 IEM_MC_END();
16029 return VINF_SUCCESS;
16030}
16031
16032
16033/** Opcode 0xdb 0xe0. */
16034FNIEMOP_DEF(iemOp_fneni)
16035{
16036 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16038 IEM_MC_BEGIN(0,0);
16039 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16040 IEM_MC_ADVANCE_RIP();
16041 IEM_MC_END();
16042 return VINF_SUCCESS;
16043}
16044
16045
16046/** Opcode 0xdb 0xe1. */
16047FNIEMOP_DEF(iemOp_fndisi)
16048{
16049 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16051 IEM_MC_BEGIN(0,0);
16052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16053 IEM_MC_ADVANCE_RIP();
16054 IEM_MC_END();
16055 return VINF_SUCCESS;
16056}
16057
16058
16059/** Opcode 0xdb 0xe2. */
16060FNIEMOP_DEF(iemOp_fnclex)
16061{
16062 IEMOP_MNEMONIC(fnclex, "fnclex");
16063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16064
16065 IEM_MC_BEGIN(0,0);
16066 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16067 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16068 IEM_MC_CLEAR_FSW_EX();
16069 IEM_MC_ADVANCE_RIP();
16070 IEM_MC_END();
16071 return VINF_SUCCESS;
16072}
16073
16074
16075/** Opcode 0xdb 0xe3. */
16076FNIEMOP_DEF(iemOp_fninit)
16077{
16078 IEMOP_MNEMONIC(fninit, "fninit");
16079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16080 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16081}
16082
16083
16084/** Opcode 0xdb 0xe4. */
16085FNIEMOP_DEF(iemOp_fnsetpm)
16086{
16087 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16089 IEM_MC_BEGIN(0,0);
16090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16091 IEM_MC_ADVANCE_RIP();
16092 IEM_MC_END();
16093 return VINF_SUCCESS;
16094}
16095
16096
16097/** Opcode 0xdb 0xe5. */
16098FNIEMOP_DEF(iemOp_frstpm)
16099{
16100 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16101#if 0 /* #UDs on newer CPUs */
16102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16103 IEM_MC_BEGIN(0,0);
16104 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16105 IEM_MC_ADVANCE_RIP();
16106 IEM_MC_END();
16107 return VINF_SUCCESS;
16108#else
16109 return IEMOP_RAISE_INVALID_OPCODE();
16110#endif
16111}
16112
16113
16114/** Opcode 0xdb 11/5. */
16115FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16116{
16117 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16118 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16119}
16120
16121
16122/** Opcode 0xdb 11/6. */
16123FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16124{
16125 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16126 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16127}
16128
16129
16130/** Opcode 0xdb. */
16131FNIEMOP_DEF(iemOp_EscF3)
16132{
16133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16134 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16136 {
16137 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16138 {
16139 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16140 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16141 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16142 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16143 case 4:
16144 switch (bRm)
16145 {
16146 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16147 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16148 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16149 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16150 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16151 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16152 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16153 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16155 }
16156 break;
16157 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16158 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16159 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16161 }
16162 }
16163 else
16164 {
16165 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16166 {
16167 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16168 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16169 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16170 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16171 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16172 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16173 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16174 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16176 }
16177 }
16178}
16179
16180
16181/**
16182 * Common worker for FPU instructions working on STn and ST0, and storing the
16183 * result in STn unless IE, DE or ZE was raised.
16184 *
16185 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16186 */
16187FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16188{
16189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16190
16191 IEM_MC_BEGIN(3, 1);
16192 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16193 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16194 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16195 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16196
16197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16198 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16199
16200 IEM_MC_PREPARE_FPU_USAGE();
16201 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16202 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16203 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16204 IEM_MC_ELSE()
16205 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16206 IEM_MC_ENDIF();
16207 IEM_MC_ADVANCE_RIP();
16208
16209 IEM_MC_END();
16210 return VINF_SUCCESS;
16211}
16212
16213
16214/** Opcode 0xdc 11/0. */
16215FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16216{
16217 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16218 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16219}
16220
16221
16222/** Opcode 0xdc 11/1. */
16223FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16224{
16225 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16226 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16227}
16228
16229
16230/** Opcode 0xdc 11/4. */
16231FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16232{
16233 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16234 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16235}
16236
16237
16238/** Opcode 0xdc 11/5. */
16239FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16240{
16241 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16242 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16243}
16244
16245
16246/** Opcode 0xdc 11/6. */
16247FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16248{
16249 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16250 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16251}
16252
16253
16254/** Opcode 0xdc 11/7. */
16255FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16256{
16257 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16258 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16259}
16260
16261
16262/**
16263 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16264 * memory operand, and storing the result in ST0.
16265 *
16266 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16267 */
16268FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16269{
16270 IEM_MC_BEGIN(3, 3);
16271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16272 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16273 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16274 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16275 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16276 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16277
16278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16282
16283 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16284 IEM_MC_PREPARE_FPU_USAGE();
16285 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16286 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16287 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16288 IEM_MC_ELSE()
16289 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16290 IEM_MC_ENDIF();
16291 IEM_MC_ADVANCE_RIP();
16292
16293 IEM_MC_END();
16294 return VINF_SUCCESS;
16295}
16296
16297
16298/** Opcode 0xdc !11/0. */
16299FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16300{
16301 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16302 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16303}
16304
16305
16306/** Opcode 0xdc !11/1. */
16307FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16308{
16309 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16310 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16311}
16312
16313
16314/** Opcode 0xdc !11/2. */
16315FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16316{
16317 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16318
16319 IEM_MC_BEGIN(3, 3);
16320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16321 IEM_MC_LOCAL(uint16_t, u16Fsw);
16322 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16323 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16324 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16325 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16326
16327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16329
16330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16332 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16333
16334 IEM_MC_PREPARE_FPU_USAGE();
16335 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16336 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16337 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16338 IEM_MC_ELSE()
16339 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16340 IEM_MC_ENDIF();
16341 IEM_MC_ADVANCE_RIP();
16342
16343 IEM_MC_END();
16344 return VINF_SUCCESS;
16345}
16346
16347
16348/** Opcode 0xdc !11/3. */
16349FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16350{
16351 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16352
16353 IEM_MC_BEGIN(3, 3);
16354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16355 IEM_MC_LOCAL(uint16_t, u16Fsw);
16356 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16357 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16358 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16359 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16360
16361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16363
16364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16366 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16367
16368 IEM_MC_PREPARE_FPU_USAGE();
16369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16370 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16371 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16372 IEM_MC_ELSE()
16373 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16374 IEM_MC_ENDIF();
16375 IEM_MC_ADVANCE_RIP();
16376
16377 IEM_MC_END();
16378 return VINF_SUCCESS;
16379}
16380
16381
16382/** Opcode 0xdc !11/4. */
16383FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16384{
16385 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16386 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16387}
16388
16389
16390/** Opcode 0xdc !11/5. */
16391FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16392{
16393 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16394 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16395}
16396
16397
16398/** Opcode 0xdc !11/6. */
16399FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16400{
16401 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16402 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16403}
16404
16405
16406/** Opcode 0xdc !11/7. */
16407FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16408{
16409 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16410 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16411}
16412
16413
16414/** Opcode 0xdc. */
16415FNIEMOP_DEF(iemOp_EscF4)
16416{
16417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16418 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16419 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16420 {
16421 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16422 {
16423 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16424 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16425 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16426 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16427 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16428 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16429 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16430 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16432 }
16433 }
16434 else
16435 {
16436 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16437 {
16438 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16439 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16440 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16441 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16442 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16443 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16444 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16445 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16447 }
16448 }
16449}
16450
16451
16452/** Opcode 0xdd !11/0.
16453 * @sa iemOp_fld_m32r */
16454FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16455{
16456 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16457
16458 IEM_MC_BEGIN(2, 3);
16459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16460 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16461 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16462 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16463 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16464
16465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16467 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16468 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16469
16470 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16471 IEM_MC_PREPARE_FPU_USAGE();
16472 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16473 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16474 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16475 IEM_MC_ELSE()
16476 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16477 IEM_MC_ENDIF();
16478 IEM_MC_ADVANCE_RIP();
16479
16480 IEM_MC_END();
16481 return VINF_SUCCESS;
16482}
16483
16484
16485/** Opcode 0xdd !11/0. */
16486FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16487{
16488 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16489 IEM_MC_BEGIN(3, 2);
16490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16491 IEM_MC_LOCAL(uint16_t, u16Fsw);
16492 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16493 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16494 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16495
16496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16500
16501 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16502 IEM_MC_PREPARE_FPU_USAGE();
16503 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16504 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16505 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16506 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16507 IEM_MC_ELSE()
16508 IEM_MC_IF_FCW_IM()
16509 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16510 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16511 IEM_MC_ENDIF();
16512 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16513 IEM_MC_ENDIF();
16514 IEM_MC_ADVANCE_RIP();
16515
16516 IEM_MC_END();
16517 return VINF_SUCCESS;
16518}
16519
16520
16521/** Opcode 0xdd !11/0. */
16522FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16523{
16524 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16525 IEM_MC_BEGIN(3, 2);
16526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16527 IEM_MC_LOCAL(uint16_t, u16Fsw);
16528 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16529 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16530 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16531
16532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16536
16537 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16538 IEM_MC_PREPARE_FPU_USAGE();
16539 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16540 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16541 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16542 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16543 IEM_MC_ELSE()
16544 IEM_MC_IF_FCW_IM()
16545 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16546 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16547 IEM_MC_ENDIF();
16548 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16549 IEM_MC_ENDIF();
16550 IEM_MC_ADVANCE_RIP();
16551
16552 IEM_MC_END();
16553 return VINF_SUCCESS;
16554}
16555
16556
16557
16558
16559/** Opcode 0xdd !11/0. */
16560FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16561{
16562 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16563 IEM_MC_BEGIN(3, 2);
16564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16565 IEM_MC_LOCAL(uint16_t, u16Fsw);
16566 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16567 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16568 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16569
16570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16572 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16573 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16574
16575 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16576 IEM_MC_PREPARE_FPU_USAGE();
16577 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16578 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16579 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16580 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16581 IEM_MC_ELSE()
16582 IEM_MC_IF_FCW_IM()
16583 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16584 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16585 IEM_MC_ENDIF();
16586 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16587 IEM_MC_ENDIF();
16588 IEM_MC_ADVANCE_RIP();
16589
16590 IEM_MC_END();
16591 return VINF_SUCCESS;
16592}
16593
16594
16595/** Opcode 0xdd !11/0. */
16596FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16597{
16598 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16599 IEM_MC_BEGIN(3, 0);
16600 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16601 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16602 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16605 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16606 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16607 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16608 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16609 IEM_MC_END();
16610 return VINF_SUCCESS;
16611}
16612
16613
16614/** Opcode 0xdd !11/0. */
16615FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16616{
16617 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16618 IEM_MC_BEGIN(3, 0);
16619 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16620 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16621 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16625 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16626 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16627 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16628 IEM_MC_END();
16629 return VINF_SUCCESS;
16630
16631}
16632
16633/** Opcode 0xdd !11/0. */
16634FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16635{
16636 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16637
16638 IEM_MC_BEGIN(0, 2);
16639 IEM_MC_LOCAL(uint16_t, u16Tmp);
16640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16641
16642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16645
16646 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16647 IEM_MC_FETCH_FSW(u16Tmp);
16648 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16649 IEM_MC_ADVANCE_RIP();
16650
16651/** @todo Debug / drop a hint to the verifier that things may differ
16652 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16653 * NT4SP1. (X86_FSW_PE) */
16654 IEM_MC_END();
16655 return VINF_SUCCESS;
16656}
16657
16658
16659/** Opcode 0xdd 11/0. */
16660FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16661{
16662 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16664 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16665 unmodified. */
16666
16667 IEM_MC_BEGIN(0, 0);
16668
16669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16671
16672 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16673 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16674 IEM_MC_UPDATE_FPU_OPCODE_IP();
16675
16676 IEM_MC_ADVANCE_RIP();
16677 IEM_MC_END();
16678 return VINF_SUCCESS;
16679}
16680
16681
16682/** Opcode 0xdd 11/1. */
16683FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16684{
16685 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16687
16688 IEM_MC_BEGIN(0, 2);
16689 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16690 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16692 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16693
16694 IEM_MC_PREPARE_FPU_USAGE();
16695 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16696 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16697 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16698 IEM_MC_ELSE()
16699 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16700 IEM_MC_ENDIF();
16701
16702 IEM_MC_ADVANCE_RIP();
16703 IEM_MC_END();
16704 return VINF_SUCCESS;
16705}
16706
16707
16708/** Opcode 0xdd 11/3. */
16709FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16710{
16711 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16712 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16713}
16714
16715
16716/** Opcode 0xdd 11/4. */
16717FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16718{
16719 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16720 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16721}
16722
16723
16724/** Opcode 0xdd. */
16725FNIEMOP_DEF(iemOp_EscF5)
16726{
16727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16728 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16730 {
16731 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16732 {
16733 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16734 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16735 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16736 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16737 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16738 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16739 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16740 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16742 }
16743 }
16744 else
16745 {
16746 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16747 {
16748 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16749 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16750 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16751 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16752 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16753 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16754 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16755 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16757 }
16758 }
16759}
16760
16761
16762/** Opcode 0xde 11/0. */
16763FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16764{
16765 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16766 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16767}
16768
16769
16770/** Opcode 0xde 11/0. */
16771FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16772{
16773 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16774 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16775}
16776
16777
16778/** Opcode 0xde 0xd9. */
16779FNIEMOP_DEF(iemOp_fcompp)
16780{
16781 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16782 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16783}
16784
16785
16786/** Opcode 0xde 11/4. */
16787FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16788{
16789 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16790 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16791}
16792
16793
16794/** Opcode 0xde 11/5. */
16795FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16796{
16797 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16798 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16799}
16800
16801
16802/** Opcode 0xde 11/6. */
16803FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16804{
16805 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16806 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16807}
16808
16809
16810/** Opcode 0xde 11/7. */
16811FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16812{
16813 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16814 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16815}
16816
16817
16818/**
16819 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16820 * the result in ST0.
16821 *
16822 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16823 */
16824FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16825{
16826 IEM_MC_BEGIN(3, 3);
16827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16828 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16829 IEM_MC_LOCAL(int16_t, i16Val2);
16830 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16831 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16832 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16833
16834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16836
16837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16839 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16840
16841 IEM_MC_PREPARE_FPU_USAGE();
16842 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16843 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16844 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16845 IEM_MC_ELSE()
16846 IEM_MC_FPU_STACK_UNDERFLOW(0);
16847 IEM_MC_ENDIF();
16848 IEM_MC_ADVANCE_RIP();
16849
16850 IEM_MC_END();
16851 return VINF_SUCCESS;
16852}
16853
16854
16855/** Opcode 0xde !11/0. */
16856FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16857{
16858 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16859 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16860}
16861
16862
16863/** Opcode 0xde !11/1. */
16864FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16865{
16866 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16867 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16868}
16869
16870
16871/** Opcode 0xde !11/2. */
16872FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16873{
16874 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16875
16876 IEM_MC_BEGIN(3, 3);
16877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16878 IEM_MC_LOCAL(uint16_t, u16Fsw);
16879 IEM_MC_LOCAL(int16_t, i16Val2);
16880 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16881 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16882 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16883
16884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16886
16887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16888 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16889 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16890
16891 IEM_MC_PREPARE_FPU_USAGE();
16892 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16893 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16894 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16895 IEM_MC_ELSE()
16896 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16897 IEM_MC_ENDIF();
16898 IEM_MC_ADVANCE_RIP();
16899
16900 IEM_MC_END();
16901 return VINF_SUCCESS;
16902}
16903
16904
16905/** Opcode 0xde !11/3. */
16906FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16907{
16908 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16909
16910 IEM_MC_BEGIN(3, 3);
16911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16912 IEM_MC_LOCAL(uint16_t, u16Fsw);
16913 IEM_MC_LOCAL(int16_t, i16Val2);
16914 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16915 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16916 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16917
16918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16920
16921 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16922 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16923 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16924
16925 IEM_MC_PREPARE_FPU_USAGE();
16926 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16927 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16928 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16929 IEM_MC_ELSE()
16930 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16931 IEM_MC_ENDIF();
16932 IEM_MC_ADVANCE_RIP();
16933
16934 IEM_MC_END();
16935 return VINF_SUCCESS;
16936}
16937
16938
16939/** Opcode 0xde !11/4. */
16940FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16941{
16942 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16943 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16944}
16945
16946
16947/** Opcode 0xde !11/5. */
16948FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16949{
16950 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16951 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16952}
16953
16954
16955/** Opcode 0xde !11/6. */
16956FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16957{
16958 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16959 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16960}
16961
16962
16963/** Opcode 0xde !11/7. */
16964FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16965{
16966 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16967 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16968}
16969
16970
16971/** Opcode 0xde. */
16972FNIEMOP_DEF(iemOp_EscF6)
16973{
16974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16975 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16977 {
16978 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16979 {
16980 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16981 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16982 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16983 case 3: if (bRm == 0xd9)
16984 return FNIEMOP_CALL(iemOp_fcompp);
16985 return IEMOP_RAISE_INVALID_OPCODE();
16986 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16987 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16988 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16989 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16991 }
16992 }
16993 else
16994 {
16995 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16996 {
16997 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16998 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16999 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17000 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17001 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17002 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17003 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17004 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17006 }
17007 }
17008}
17009
17010
17011/** Opcode 0xdf 11/0.
17012 * Undocument instruction, assumed to work like ffree + fincstp. */
17013FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17014{
17015 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17017
17018 IEM_MC_BEGIN(0, 0);
17019
17020 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17021 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17022
17023 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17024 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17025 IEM_MC_FPU_STACK_INC_TOP();
17026 IEM_MC_UPDATE_FPU_OPCODE_IP();
17027
17028 IEM_MC_ADVANCE_RIP();
17029 IEM_MC_END();
17030 return VINF_SUCCESS;
17031}
17032
17033
17034/** Opcode 0xdf 0xe0. */
17035FNIEMOP_DEF(iemOp_fnstsw_ax)
17036{
17037 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17039
17040 IEM_MC_BEGIN(0, 1);
17041 IEM_MC_LOCAL(uint16_t, u16Tmp);
17042 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17043 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17044 IEM_MC_FETCH_FSW(u16Tmp);
17045 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17046 IEM_MC_ADVANCE_RIP();
17047 IEM_MC_END();
17048 return VINF_SUCCESS;
17049}
17050
17051
17052/** Opcode 0xdf 11/5. */
17053FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17054{
17055 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17056 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17057}
17058
17059
17060/** Opcode 0xdf 11/6. */
17061FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17062{
17063 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17064 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17065}
17066
17067
17068/** Opcode 0xdf !11/0. */
17069FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17070{
17071 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17072
17073 IEM_MC_BEGIN(2, 3);
17074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17075 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17076 IEM_MC_LOCAL(int16_t, i16Val);
17077 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17078 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17079
17080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17082
17083 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17084 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17085 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17086
17087 IEM_MC_PREPARE_FPU_USAGE();
17088 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17089 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17090 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17091 IEM_MC_ELSE()
17092 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17093 IEM_MC_ENDIF();
17094 IEM_MC_ADVANCE_RIP();
17095
17096 IEM_MC_END();
17097 return VINF_SUCCESS;
17098}
17099
17100
17101/** Opcode 0xdf !11/1. */
17102FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17103{
17104 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17105 IEM_MC_BEGIN(3, 2);
17106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17107 IEM_MC_LOCAL(uint16_t, u16Fsw);
17108 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17109 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17110 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17111
17112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17115 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17116
17117 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17118 IEM_MC_PREPARE_FPU_USAGE();
17119 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17120 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17121 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17122 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17123 IEM_MC_ELSE()
17124 IEM_MC_IF_FCW_IM()
17125 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17126 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17127 IEM_MC_ENDIF();
17128 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17129 IEM_MC_ENDIF();
17130 IEM_MC_ADVANCE_RIP();
17131
17132 IEM_MC_END();
17133 return VINF_SUCCESS;
17134}
17135
17136
17137/** Opcode 0xdf !11/2. */
17138FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17139{
17140 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17141 IEM_MC_BEGIN(3, 2);
17142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17143 IEM_MC_LOCAL(uint16_t, u16Fsw);
17144 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17145 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17146 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17147
17148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17152
17153 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17154 IEM_MC_PREPARE_FPU_USAGE();
17155 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17156 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17157 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17158 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17159 IEM_MC_ELSE()
17160 IEM_MC_IF_FCW_IM()
17161 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17162 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17163 IEM_MC_ENDIF();
17164 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17165 IEM_MC_ENDIF();
17166 IEM_MC_ADVANCE_RIP();
17167
17168 IEM_MC_END();
17169 return VINF_SUCCESS;
17170}
17171
17172
17173/** Opcode 0xdf !11/3. */
17174FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17175{
17176 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17177 IEM_MC_BEGIN(3, 2);
17178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17179 IEM_MC_LOCAL(uint16_t, u16Fsw);
17180 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17181 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17182 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17183
17184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17186 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17187 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17188
17189 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17190 IEM_MC_PREPARE_FPU_USAGE();
17191 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17192 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17193 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17194 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17195 IEM_MC_ELSE()
17196 IEM_MC_IF_FCW_IM()
17197 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17198 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17199 IEM_MC_ENDIF();
17200 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17201 IEM_MC_ENDIF();
17202 IEM_MC_ADVANCE_RIP();
17203
17204 IEM_MC_END();
17205 return VINF_SUCCESS;
17206}
17207
17208
17209/** Opcode 0xdf !11/4. */
17210FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17211
17212
17213/** Opcode 0xdf !11/5. */
17214FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17215{
17216 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17217
17218 IEM_MC_BEGIN(2, 3);
17219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17220 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17221 IEM_MC_LOCAL(int64_t, i64Val);
17222 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17223 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17224
17225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17227
17228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17230 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17231
17232 IEM_MC_PREPARE_FPU_USAGE();
17233 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17234 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17235 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17236 IEM_MC_ELSE()
17237 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17238 IEM_MC_ENDIF();
17239 IEM_MC_ADVANCE_RIP();
17240
17241 IEM_MC_END();
17242 return VINF_SUCCESS;
17243}
17244
17245
17246/** Opcode 0xdf !11/6. */
17247FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17248
17249
17250/** Opcode 0xdf !11/7. */
17251FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17252{
17253 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17254 IEM_MC_BEGIN(3, 2);
17255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17256 IEM_MC_LOCAL(uint16_t, u16Fsw);
17257 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17258 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17259 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17260
17261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17263 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17264 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17265
17266 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17267 IEM_MC_PREPARE_FPU_USAGE();
17268 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17269 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17270 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17271 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17272 IEM_MC_ELSE()
17273 IEM_MC_IF_FCW_IM()
17274 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17275 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17276 IEM_MC_ENDIF();
17277 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17278 IEM_MC_ENDIF();
17279 IEM_MC_ADVANCE_RIP();
17280
17281 IEM_MC_END();
17282 return VINF_SUCCESS;
17283}
17284
17285
17286/** Opcode 0xdf. */
17287FNIEMOP_DEF(iemOp_EscF7)
17288{
17289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17291 {
17292 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17293 {
17294 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17295 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17296 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17297 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17298 case 4: if (bRm == 0xe0)
17299 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17300 return IEMOP_RAISE_INVALID_OPCODE();
17301 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17302 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17303 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17305 }
17306 }
17307 else
17308 {
17309 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17310 {
17311 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17312 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17313 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17314 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17315 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17316 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17317 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17318 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17320 }
17321 }
17322}
17323
17324
17325/** Opcode 0xe0. */
17326FNIEMOP_DEF(iemOp_loopne_Jb)
17327{
17328 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17329 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17331 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17332
17333 switch (pVCpu->iem.s.enmEffAddrMode)
17334 {
17335 case IEMMODE_16BIT:
17336 IEM_MC_BEGIN(0,0);
17337 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17338 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17339 IEM_MC_REL_JMP_S8(i8Imm);
17340 } IEM_MC_ELSE() {
17341 IEM_MC_ADVANCE_RIP();
17342 } IEM_MC_ENDIF();
17343 IEM_MC_END();
17344 return VINF_SUCCESS;
17345
17346 case IEMMODE_32BIT:
17347 IEM_MC_BEGIN(0,0);
17348 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17349 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17350 IEM_MC_REL_JMP_S8(i8Imm);
17351 } IEM_MC_ELSE() {
17352 IEM_MC_ADVANCE_RIP();
17353 } IEM_MC_ENDIF();
17354 IEM_MC_END();
17355 return VINF_SUCCESS;
17356
17357 case IEMMODE_64BIT:
17358 IEM_MC_BEGIN(0,0);
17359 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17360 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17361 IEM_MC_REL_JMP_S8(i8Imm);
17362 } IEM_MC_ELSE() {
17363 IEM_MC_ADVANCE_RIP();
17364 } IEM_MC_ENDIF();
17365 IEM_MC_END();
17366 return VINF_SUCCESS;
17367
17368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17369 }
17370}
17371
17372
17373/** Opcode 0xe1. */
17374FNIEMOP_DEF(iemOp_loope_Jb)
17375{
17376 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17377 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17379 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17380
17381 switch (pVCpu->iem.s.enmEffAddrMode)
17382 {
17383 case IEMMODE_16BIT:
17384 IEM_MC_BEGIN(0,0);
17385 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17386 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17387 IEM_MC_REL_JMP_S8(i8Imm);
17388 } IEM_MC_ELSE() {
17389 IEM_MC_ADVANCE_RIP();
17390 } IEM_MC_ENDIF();
17391 IEM_MC_END();
17392 return VINF_SUCCESS;
17393
17394 case IEMMODE_32BIT:
17395 IEM_MC_BEGIN(0,0);
17396 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17397 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17398 IEM_MC_REL_JMP_S8(i8Imm);
17399 } IEM_MC_ELSE() {
17400 IEM_MC_ADVANCE_RIP();
17401 } IEM_MC_ENDIF();
17402 IEM_MC_END();
17403 return VINF_SUCCESS;
17404
17405 case IEMMODE_64BIT:
17406 IEM_MC_BEGIN(0,0);
17407 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17408 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17409 IEM_MC_REL_JMP_S8(i8Imm);
17410 } IEM_MC_ELSE() {
17411 IEM_MC_ADVANCE_RIP();
17412 } IEM_MC_ENDIF();
17413 IEM_MC_END();
17414 return VINF_SUCCESS;
17415
17416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17417 }
17418}
17419
17420
17421/** Opcode 0xe2. */
17422FNIEMOP_DEF(iemOp_loop_Jb)
17423{
17424 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17425 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17427 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17428
17429 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17430 * using the 32-bit operand size override. How can that be restarted? See
17431 * weird pseudo code in intel manual. */
17432 switch (pVCpu->iem.s.enmEffAddrMode)
17433 {
17434 case IEMMODE_16BIT:
17435 IEM_MC_BEGIN(0,0);
17436 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17437 {
17438 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17439 IEM_MC_IF_CX_IS_NZ() {
17440 IEM_MC_REL_JMP_S8(i8Imm);
17441 } IEM_MC_ELSE() {
17442 IEM_MC_ADVANCE_RIP();
17443 } IEM_MC_ENDIF();
17444 }
17445 else
17446 {
17447 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17448 IEM_MC_ADVANCE_RIP();
17449 }
17450 IEM_MC_END();
17451 return VINF_SUCCESS;
17452
17453 case IEMMODE_32BIT:
17454 IEM_MC_BEGIN(0,0);
17455 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17456 {
17457 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17458 IEM_MC_IF_ECX_IS_NZ() {
17459 IEM_MC_REL_JMP_S8(i8Imm);
17460 } IEM_MC_ELSE() {
17461 IEM_MC_ADVANCE_RIP();
17462 } IEM_MC_ENDIF();
17463 }
17464 else
17465 {
17466 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17467 IEM_MC_ADVANCE_RIP();
17468 }
17469 IEM_MC_END();
17470 return VINF_SUCCESS;
17471
17472 case IEMMODE_64BIT:
17473 IEM_MC_BEGIN(0,0);
17474 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17475 {
17476 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17477 IEM_MC_IF_RCX_IS_NZ() {
17478 IEM_MC_REL_JMP_S8(i8Imm);
17479 } IEM_MC_ELSE() {
17480 IEM_MC_ADVANCE_RIP();
17481 } IEM_MC_ENDIF();
17482 }
17483 else
17484 {
17485 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17486 IEM_MC_ADVANCE_RIP();
17487 }
17488 IEM_MC_END();
17489 return VINF_SUCCESS;
17490
17491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17492 }
17493}
17494
17495
17496/** Opcode 0xe3. */
17497FNIEMOP_DEF(iemOp_jecxz_Jb)
17498{
17499 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17500 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17502 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17503
17504 switch (pVCpu->iem.s.enmEffAddrMode)
17505 {
17506 case IEMMODE_16BIT:
17507 IEM_MC_BEGIN(0,0);
17508 IEM_MC_IF_CX_IS_NZ() {
17509 IEM_MC_ADVANCE_RIP();
17510 } IEM_MC_ELSE() {
17511 IEM_MC_REL_JMP_S8(i8Imm);
17512 } IEM_MC_ENDIF();
17513 IEM_MC_END();
17514 return VINF_SUCCESS;
17515
17516 case IEMMODE_32BIT:
17517 IEM_MC_BEGIN(0,0);
17518 IEM_MC_IF_ECX_IS_NZ() {
17519 IEM_MC_ADVANCE_RIP();
17520 } IEM_MC_ELSE() {
17521 IEM_MC_REL_JMP_S8(i8Imm);
17522 } IEM_MC_ENDIF();
17523 IEM_MC_END();
17524 return VINF_SUCCESS;
17525
17526 case IEMMODE_64BIT:
17527 IEM_MC_BEGIN(0,0);
17528 IEM_MC_IF_RCX_IS_NZ() {
17529 IEM_MC_ADVANCE_RIP();
17530 } IEM_MC_ELSE() {
17531 IEM_MC_REL_JMP_S8(i8Imm);
17532 } IEM_MC_ENDIF();
17533 IEM_MC_END();
17534 return VINF_SUCCESS;
17535
17536 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17537 }
17538}
17539
17540
17541/** Opcode 0xe4 */
17542FNIEMOP_DEF(iemOp_in_AL_Ib)
17543{
17544 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17545 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17547 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17548}
17549
17550
17551/** Opcode 0xe5 */
17552FNIEMOP_DEF(iemOp_in_eAX_Ib)
17553{
17554 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17555 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17557 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17558}
17559
17560
17561/** Opcode 0xe6 */
17562FNIEMOP_DEF(iemOp_out_Ib_AL)
17563{
17564 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17565 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17567 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17568}
17569
17570
17571/** Opcode 0xe7 */
17572FNIEMOP_DEF(iemOp_out_Ib_eAX)
17573{
17574 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17575 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17577 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17578}
17579
17580
17581/** Opcode 0xe8. */
17582FNIEMOP_DEF(iemOp_call_Jv)
17583{
17584 IEMOP_MNEMONIC(call_Jv, "call Jv");
17585 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17586 switch (pVCpu->iem.s.enmEffOpSize)
17587 {
17588 case IEMMODE_16BIT:
17589 {
17590 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17591 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17592 }
17593
17594 case IEMMODE_32BIT:
17595 {
17596 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17597 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17598 }
17599
17600 case IEMMODE_64BIT:
17601 {
17602 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17603 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17604 }
17605
17606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17607 }
17608}
17609
17610
17611/** Opcode 0xe9. */
17612FNIEMOP_DEF(iemOp_jmp_Jv)
17613{
17614 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17615 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17616 switch (pVCpu->iem.s.enmEffOpSize)
17617 {
17618 case IEMMODE_16BIT:
17619 {
17620 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17621 IEM_MC_BEGIN(0, 0);
17622 IEM_MC_REL_JMP_S16(i16Imm);
17623 IEM_MC_END();
17624 return VINF_SUCCESS;
17625 }
17626
17627 case IEMMODE_64BIT:
17628 case IEMMODE_32BIT:
17629 {
17630 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17631 IEM_MC_BEGIN(0, 0);
17632 IEM_MC_REL_JMP_S32(i32Imm);
17633 IEM_MC_END();
17634 return VINF_SUCCESS;
17635 }
17636
17637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17638 }
17639}
17640
17641
17642/** Opcode 0xea. */
17643FNIEMOP_DEF(iemOp_jmp_Ap)
17644{
17645 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17646 IEMOP_HLP_NO_64BIT();
17647
17648 /* Decode the far pointer address and pass it on to the far call C implementation. */
17649 uint32_t offSeg;
17650 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17651 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17652 else
17653 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17654 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17656 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17657}
17658
17659
17660/** Opcode 0xeb. */
17661FNIEMOP_DEF(iemOp_jmp_Jb)
17662{
17663 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17664 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17666 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17667
17668 IEM_MC_BEGIN(0, 0);
17669 IEM_MC_REL_JMP_S8(i8Imm);
17670 IEM_MC_END();
17671 return VINF_SUCCESS;
17672}
17673
17674
17675/** Opcode 0xec */
17676FNIEMOP_DEF(iemOp_in_AL_DX)
17677{
17678 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17680 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17681}
17682
17683
17684/** Opcode 0xed */
17685FNIEMOP_DEF(iemOp_eAX_DX)
17686{
17687 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17689 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17690}
17691
17692
17693/** Opcode 0xee */
17694FNIEMOP_DEF(iemOp_out_DX_AL)
17695{
17696 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17698 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17699}
17700
17701
17702/** Opcode 0xef */
17703FNIEMOP_DEF(iemOp_out_DX_eAX)
17704{
17705 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17707 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17708}
17709
17710
17711/** Opcode 0xf0. */
17712FNIEMOP_DEF(iemOp_lock)
17713{
17714 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17715 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17716
17717 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17718 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17719}
17720
17721
17722/** Opcode 0xf1. */
17723FNIEMOP_DEF(iemOp_int_1)
17724{
17725 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17726 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17727 /** @todo testcase! */
17728 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17729}
17730
17731
17732/** Opcode 0xf2. */
17733FNIEMOP_DEF(iemOp_repne)
17734{
17735 /* This overrides any previous REPE prefix. */
17736 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17737 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17738 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17739
17740 /* For the 4 entry opcode tables, REPNZ overrides any previous
17741 REPZ and operand size prefixes. */
17742 pVCpu->iem.s.idxPrefix = 3;
17743
17744 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17745 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17746}
17747
17748
17749/** Opcode 0xf3. */
17750FNIEMOP_DEF(iemOp_repe)
17751{
17752 /* This overrides any previous REPNE prefix. */
17753 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17754 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17755 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17756
17757 /* For the 4 entry opcode tables, REPNZ overrides any previous
17758 REPNZ and operand size prefixes. */
17759 pVCpu->iem.s.idxPrefix = 2;
17760
17761 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17762 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17763}
17764
17765
17766/** Opcode 0xf4. */
17767FNIEMOP_DEF(iemOp_hlt)
17768{
17769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17770 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17771}
17772
17773
17774/** Opcode 0xf5. */
17775FNIEMOP_DEF(iemOp_cmc)
17776{
17777 IEMOP_MNEMONIC(cmc, "cmc");
17778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17779 IEM_MC_BEGIN(0, 0);
17780 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17781 IEM_MC_ADVANCE_RIP();
17782 IEM_MC_END();
17783 return VINF_SUCCESS;
17784}
17785
17786
17787/**
17788 * Common implementation of 'inc/dec/not/neg Eb'.
17789 *
17790 * @param bRm The RM byte.
17791 * @param pImpl The instruction implementation.
17792 */
17793FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17794{
17795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17796 {
17797 /* register access */
17798 IEM_MC_BEGIN(2, 0);
17799 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17800 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17801 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17802 IEM_MC_REF_EFLAGS(pEFlags);
17803 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17804 IEM_MC_ADVANCE_RIP();
17805 IEM_MC_END();
17806 }
17807 else
17808 {
17809 /* memory access. */
17810 IEM_MC_BEGIN(2, 2);
17811 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17814
17815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17816 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17817 IEM_MC_FETCH_EFLAGS(EFlags);
17818 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17819 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17820 else
17821 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17822
17823 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17824 IEM_MC_COMMIT_EFLAGS(EFlags);
17825 IEM_MC_ADVANCE_RIP();
17826 IEM_MC_END();
17827 }
17828 return VINF_SUCCESS;
17829}
17830
17831
17832/**
17833 * Common implementation of 'inc/dec/not/neg Ev'.
17834 *
17835 * @param bRm The RM byte.
17836 * @param pImpl The instruction implementation.
17837 */
17838FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17839{
17840 /* Registers are handled by a common worker. */
17841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17842 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17843
17844 /* Memory we do here. */
17845 switch (pVCpu->iem.s.enmEffOpSize)
17846 {
17847 case IEMMODE_16BIT:
17848 IEM_MC_BEGIN(2, 2);
17849 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17850 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17852
17853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17854 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17855 IEM_MC_FETCH_EFLAGS(EFlags);
17856 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17857 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17858 else
17859 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17860
17861 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17862 IEM_MC_COMMIT_EFLAGS(EFlags);
17863 IEM_MC_ADVANCE_RIP();
17864 IEM_MC_END();
17865 return VINF_SUCCESS;
17866
17867 case IEMMODE_32BIT:
17868 IEM_MC_BEGIN(2, 2);
17869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17870 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17872
17873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17874 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17875 IEM_MC_FETCH_EFLAGS(EFlags);
17876 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17877 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17878 else
17879 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17880
17881 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17882 IEM_MC_COMMIT_EFLAGS(EFlags);
17883 IEM_MC_ADVANCE_RIP();
17884 IEM_MC_END();
17885 return VINF_SUCCESS;
17886
17887 case IEMMODE_64BIT:
17888 IEM_MC_BEGIN(2, 2);
17889 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17892
17893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17894 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17895 IEM_MC_FETCH_EFLAGS(EFlags);
17896 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17897 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17898 else
17899 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17900
17901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17902 IEM_MC_COMMIT_EFLAGS(EFlags);
17903 IEM_MC_ADVANCE_RIP();
17904 IEM_MC_END();
17905 return VINF_SUCCESS;
17906
17907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17908 }
17909}
17910
17911
17912/** Opcode 0xf6 /0. */
17913FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17914{
17915 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17916 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17917
17918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17919 {
17920 /* register access */
17921 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17923
17924 IEM_MC_BEGIN(3, 0);
17925 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17926 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17928 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17929 IEM_MC_REF_EFLAGS(pEFlags);
17930 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17931 IEM_MC_ADVANCE_RIP();
17932 IEM_MC_END();
17933 }
17934 else
17935 {
17936 /* memory access. */
17937 IEM_MC_BEGIN(3, 2);
17938 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17939 IEM_MC_ARG(uint8_t, u8Src, 1);
17940 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17942
17943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17944 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17945 IEM_MC_ASSIGN(u8Src, u8Imm);
17946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17947 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17948 IEM_MC_FETCH_EFLAGS(EFlags);
17949 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17950
17951 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17952 IEM_MC_COMMIT_EFLAGS(EFlags);
17953 IEM_MC_ADVANCE_RIP();
17954 IEM_MC_END();
17955 }
17956 return VINF_SUCCESS;
17957}
17958
17959
17960/** Opcode 0xf7 /0. */
17961FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17962{
17963 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17964 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17965
17966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17967 {
17968 /* register access */
17969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17970 switch (pVCpu->iem.s.enmEffOpSize)
17971 {
17972 case IEMMODE_16BIT:
17973 {
17974 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17975 IEM_MC_BEGIN(3, 0);
17976 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17977 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17979 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17980 IEM_MC_REF_EFLAGS(pEFlags);
17981 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17982 IEM_MC_ADVANCE_RIP();
17983 IEM_MC_END();
17984 return VINF_SUCCESS;
17985 }
17986
17987 case IEMMODE_32BIT:
17988 {
17989 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17990 IEM_MC_BEGIN(3, 0);
17991 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17992 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17993 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17994 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17995 IEM_MC_REF_EFLAGS(pEFlags);
17996 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17997 /* No clearing the high dword here - test doesn't write back the result. */
17998 IEM_MC_ADVANCE_RIP();
17999 IEM_MC_END();
18000 return VINF_SUCCESS;
18001 }
18002
18003 case IEMMODE_64BIT:
18004 {
18005 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18006 IEM_MC_BEGIN(3, 0);
18007 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18008 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18009 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18010 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18011 IEM_MC_REF_EFLAGS(pEFlags);
18012 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18013 IEM_MC_ADVANCE_RIP();
18014 IEM_MC_END();
18015 return VINF_SUCCESS;
18016 }
18017
18018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18019 }
18020 }
18021 else
18022 {
18023 /* memory access. */
18024 switch (pVCpu->iem.s.enmEffOpSize)
18025 {
18026 case IEMMODE_16BIT:
18027 {
18028 IEM_MC_BEGIN(3, 2);
18029 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18030 IEM_MC_ARG(uint16_t, u16Src, 1);
18031 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18033
18034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18035 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18036 IEM_MC_ASSIGN(u16Src, u16Imm);
18037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18038 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18039 IEM_MC_FETCH_EFLAGS(EFlags);
18040 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18041
18042 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18043 IEM_MC_COMMIT_EFLAGS(EFlags);
18044 IEM_MC_ADVANCE_RIP();
18045 IEM_MC_END();
18046 return VINF_SUCCESS;
18047 }
18048
18049 case IEMMODE_32BIT:
18050 {
18051 IEM_MC_BEGIN(3, 2);
18052 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18053 IEM_MC_ARG(uint32_t, u32Src, 1);
18054 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18056
18057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18058 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18059 IEM_MC_ASSIGN(u32Src, u32Imm);
18060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18061 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18062 IEM_MC_FETCH_EFLAGS(EFlags);
18063 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18064
18065 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18066 IEM_MC_COMMIT_EFLAGS(EFlags);
18067 IEM_MC_ADVANCE_RIP();
18068 IEM_MC_END();
18069 return VINF_SUCCESS;
18070 }
18071
18072 case IEMMODE_64BIT:
18073 {
18074 IEM_MC_BEGIN(3, 2);
18075 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18076 IEM_MC_ARG(uint64_t, u64Src, 1);
18077 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18079
18080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18081 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18082 IEM_MC_ASSIGN(u64Src, u64Imm);
18083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18084 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18085 IEM_MC_FETCH_EFLAGS(EFlags);
18086 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18087
18088 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18089 IEM_MC_COMMIT_EFLAGS(EFlags);
18090 IEM_MC_ADVANCE_RIP();
18091 IEM_MC_END();
18092 return VINF_SUCCESS;
18093 }
18094
18095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18096 }
18097 }
18098}
18099
18100
18101/** Opcode 0xf6 /4, /5, /6 and /7. */
18102FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18103{
18104 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18105 {
18106 /* register access */
18107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18108 IEM_MC_BEGIN(3, 1);
18109 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18110 IEM_MC_ARG(uint8_t, u8Value, 1);
18111 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18112 IEM_MC_LOCAL(int32_t, rc);
18113
18114 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18115 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18116 IEM_MC_REF_EFLAGS(pEFlags);
18117 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18118 IEM_MC_IF_LOCAL_IS_Z(rc) {
18119 IEM_MC_ADVANCE_RIP();
18120 } IEM_MC_ELSE() {
18121 IEM_MC_RAISE_DIVIDE_ERROR();
18122 } IEM_MC_ENDIF();
18123
18124 IEM_MC_END();
18125 }
18126 else
18127 {
18128 /* memory access. */
18129 IEM_MC_BEGIN(3, 2);
18130 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18131 IEM_MC_ARG(uint8_t, u8Value, 1);
18132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18134 IEM_MC_LOCAL(int32_t, rc);
18135
18136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18138 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18139 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18140 IEM_MC_REF_EFLAGS(pEFlags);
18141 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18142 IEM_MC_IF_LOCAL_IS_Z(rc) {
18143 IEM_MC_ADVANCE_RIP();
18144 } IEM_MC_ELSE() {
18145 IEM_MC_RAISE_DIVIDE_ERROR();
18146 } IEM_MC_ENDIF();
18147
18148 IEM_MC_END();
18149 }
18150 return VINF_SUCCESS;
18151}
18152
18153
18154/** Opcode 0xf7 /4, /5, /6 and /7. */
18155FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18156{
18157 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18158
18159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18160 {
18161 /* register access */
18162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18163 switch (pVCpu->iem.s.enmEffOpSize)
18164 {
18165 case IEMMODE_16BIT:
18166 {
18167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18168 IEM_MC_BEGIN(4, 1);
18169 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18170 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18171 IEM_MC_ARG(uint16_t, u16Value, 2);
18172 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18173 IEM_MC_LOCAL(int32_t, rc);
18174
18175 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18176 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18177 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18178 IEM_MC_REF_EFLAGS(pEFlags);
18179 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18180 IEM_MC_IF_LOCAL_IS_Z(rc) {
18181 IEM_MC_ADVANCE_RIP();
18182 } IEM_MC_ELSE() {
18183 IEM_MC_RAISE_DIVIDE_ERROR();
18184 } IEM_MC_ENDIF();
18185
18186 IEM_MC_END();
18187 return VINF_SUCCESS;
18188 }
18189
18190 case IEMMODE_32BIT:
18191 {
18192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18193 IEM_MC_BEGIN(4, 1);
18194 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18195 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18196 IEM_MC_ARG(uint32_t, u32Value, 2);
18197 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18198 IEM_MC_LOCAL(int32_t, rc);
18199
18200 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18201 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18202 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18203 IEM_MC_REF_EFLAGS(pEFlags);
18204 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18205 IEM_MC_IF_LOCAL_IS_Z(rc) {
18206 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18207 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18208 IEM_MC_ADVANCE_RIP();
18209 } IEM_MC_ELSE() {
18210 IEM_MC_RAISE_DIVIDE_ERROR();
18211 } IEM_MC_ENDIF();
18212
18213 IEM_MC_END();
18214 return VINF_SUCCESS;
18215 }
18216
18217 case IEMMODE_64BIT:
18218 {
18219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18220 IEM_MC_BEGIN(4, 1);
18221 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18222 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18223 IEM_MC_ARG(uint64_t, u64Value, 2);
18224 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18225 IEM_MC_LOCAL(int32_t, rc);
18226
18227 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18228 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18229 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18230 IEM_MC_REF_EFLAGS(pEFlags);
18231 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18232 IEM_MC_IF_LOCAL_IS_Z(rc) {
18233 IEM_MC_ADVANCE_RIP();
18234 } IEM_MC_ELSE() {
18235 IEM_MC_RAISE_DIVIDE_ERROR();
18236 } IEM_MC_ENDIF();
18237
18238 IEM_MC_END();
18239 return VINF_SUCCESS;
18240 }
18241
18242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18243 }
18244 }
18245 else
18246 {
18247 /* memory access. */
18248 switch (pVCpu->iem.s.enmEffOpSize)
18249 {
18250 case IEMMODE_16BIT:
18251 {
18252 IEM_MC_BEGIN(4, 2);
18253 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18254 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18255 IEM_MC_ARG(uint16_t, u16Value, 2);
18256 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18258 IEM_MC_LOCAL(int32_t, rc);
18259
18260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18262 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18263 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18264 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18265 IEM_MC_REF_EFLAGS(pEFlags);
18266 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18267 IEM_MC_IF_LOCAL_IS_Z(rc) {
18268 IEM_MC_ADVANCE_RIP();
18269 } IEM_MC_ELSE() {
18270 IEM_MC_RAISE_DIVIDE_ERROR();
18271 } IEM_MC_ENDIF();
18272
18273 IEM_MC_END();
18274 return VINF_SUCCESS;
18275 }
18276
18277 case IEMMODE_32BIT:
18278 {
18279 IEM_MC_BEGIN(4, 2);
18280 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18281 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18282 IEM_MC_ARG(uint32_t, u32Value, 2);
18283 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18285 IEM_MC_LOCAL(int32_t, rc);
18286
18287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18289 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18290 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18291 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18292 IEM_MC_REF_EFLAGS(pEFlags);
18293 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18294 IEM_MC_IF_LOCAL_IS_Z(rc) {
18295 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18296 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18297 IEM_MC_ADVANCE_RIP();
18298 } IEM_MC_ELSE() {
18299 IEM_MC_RAISE_DIVIDE_ERROR();
18300 } IEM_MC_ENDIF();
18301
18302 IEM_MC_END();
18303 return VINF_SUCCESS;
18304 }
18305
18306 case IEMMODE_64BIT:
18307 {
18308 IEM_MC_BEGIN(4, 2);
18309 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18310 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18311 IEM_MC_ARG(uint64_t, u64Value, 2);
18312 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18314 IEM_MC_LOCAL(int32_t, rc);
18315
18316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18318 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18319 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18320 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18321 IEM_MC_REF_EFLAGS(pEFlags);
18322 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18323 IEM_MC_IF_LOCAL_IS_Z(rc) {
18324 IEM_MC_ADVANCE_RIP();
18325 } IEM_MC_ELSE() {
18326 IEM_MC_RAISE_DIVIDE_ERROR();
18327 } IEM_MC_ENDIF();
18328
18329 IEM_MC_END();
18330 return VINF_SUCCESS;
18331 }
18332
18333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18334 }
18335 }
18336}
18337
18338/** Opcode 0xf6. */
18339FNIEMOP_DEF(iemOp_Grp3_Eb)
18340{
18341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18342 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18343 {
18344 case 0:
18345 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18346 case 1:
18347/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18348 return IEMOP_RAISE_INVALID_OPCODE();
18349 case 2:
18350 IEMOP_MNEMONIC(not_Eb, "not Eb");
18351 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18352 case 3:
18353 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18354 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18355 case 4:
18356 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18358 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18359 case 5:
18360 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18361 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18362 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18363 case 6:
18364 IEMOP_MNEMONIC(div_Eb, "div Eb");
18365 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18366 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18367 case 7:
18368 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18369 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18370 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18372 }
18373}
18374
18375
18376/** Opcode 0xf7. */
18377FNIEMOP_DEF(iemOp_Grp3_Ev)
18378{
18379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18380 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18381 {
18382 case 0:
18383 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18384 case 1:
18385/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18386 return IEMOP_RAISE_INVALID_OPCODE();
18387 case 2:
18388 IEMOP_MNEMONIC(not_Ev, "not Ev");
18389 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18390 case 3:
18391 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18392 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18393 case 4:
18394 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18395 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18396 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18397 case 5:
18398 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18399 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18400 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18401 case 6:
18402 IEMOP_MNEMONIC(div_Ev, "div Ev");
18403 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18404 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18405 case 7:
18406 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18407 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18408 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18410 }
18411}
18412
18413
18414/** Opcode 0xf8. */
18415FNIEMOP_DEF(iemOp_clc)
18416{
18417 IEMOP_MNEMONIC(clc, "clc");
18418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18419 IEM_MC_BEGIN(0, 0);
18420 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18421 IEM_MC_ADVANCE_RIP();
18422 IEM_MC_END();
18423 return VINF_SUCCESS;
18424}
18425
18426
18427/** Opcode 0xf9. */
18428FNIEMOP_DEF(iemOp_stc)
18429{
18430 IEMOP_MNEMONIC(stc, "stc");
18431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18432 IEM_MC_BEGIN(0, 0);
18433 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18434 IEM_MC_ADVANCE_RIP();
18435 IEM_MC_END();
18436 return VINF_SUCCESS;
18437}
18438
18439
18440/** Opcode 0xfa. */
18441FNIEMOP_DEF(iemOp_cli)
18442{
18443 IEMOP_MNEMONIC(cli, "cli");
18444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18445 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18446}
18447
18448
18449FNIEMOP_DEF(iemOp_sti)
18450{
18451 IEMOP_MNEMONIC(sti, "sti");
18452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18453 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18454}
18455
18456
18457/** Opcode 0xfc. */
18458FNIEMOP_DEF(iemOp_cld)
18459{
18460 IEMOP_MNEMONIC(cld, "cld");
18461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18462 IEM_MC_BEGIN(0, 0);
18463 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18464 IEM_MC_ADVANCE_RIP();
18465 IEM_MC_END();
18466 return VINF_SUCCESS;
18467}
18468
18469
18470/** Opcode 0xfd. */
18471FNIEMOP_DEF(iemOp_std)
18472{
18473 IEMOP_MNEMONIC(std, "std");
18474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18475 IEM_MC_BEGIN(0, 0);
18476 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18477 IEM_MC_ADVANCE_RIP();
18478 IEM_MC_END();
18479 return VINF_SUCCESS;
18480}
18481
18482
18483/** Opcode 0xfe. */
18484FNIEMOP_DEF(iemOp_Grp4)
18485{
18486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18487 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18488 {
18489 case 0:
18490 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18491 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18492 case 1:
18493 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18494 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18495 default:
18496 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18497 return IEMOP_RAISE_INVALID_OPCODE();
18498 }
18499}
18500
18501
18502/**
18503 * Opcode 0xff /2.
18504 * @param bRm The RM byte.
18505 */
18506FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18507{
18508 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18510
18511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18512 {
18513 /* The new RIP is taken from a register. */
18514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18515 switch (pVCpu->iem.s.enmEffOpSize)
18516 {
18517 case IEMMODE_16BIT:
18518 IEM_MC_BEGIN(1, 0);
18519 IEM_MC_ARG(uint16_t, u16Target, 0);
18520 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18521 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18522 IEM_MC_END()
18523 return VINF_SUCCESS;
18524
18525 case IEMMODE_32BIT:
18526 IEM_MC_BEGIN(1, 0);
18527 IEM_MC_ARG(uint32_t, u32Target, 0);
18528 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18529 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18530 IEM_MC_END()
18531 return VINF_SUCCESS;
18532
18533 case IEMMODE_64BIT:
18534 IEM_MC_BEGIN(1, 0);
18535 IEM_MC_ARG(uint64_t, u64Target, 0);
18536 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18537 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18538 IEM_MC_END()
18539 return VINF_SUCCESS;
18540
18541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18542 }
18543 }
18544 else
18545 {
18546 /* The new RIP is taken from a register. */
18547 switch (pVCpu->iem.s.enmEffOpSize)
18548 {
18549 case IEMMODE_16BIT:
18550 IEM_MC_BEGIN(1, 1);
18551 IEM_MC_ARG(uint16_t, u16Target, 0);
18552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18555 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18556 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18557 IEM_MC_END()
18558 return VINF_SUCCESS;
18559
18560 case IEMMODE_32BIT:
18561 IEM_MC_BEGIN(1, 1);
18562 IEM_MC_ARG(uint32_t, u32Target, 0);
18563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18566 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18567 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18568 IEM_MC_END()
18569 return VINF_SUCCESS;
18570
18571 case IEMMODE_64BIT:
18572 IEM_MC_BEGIN(1, 1);
18573 IEM_MC_ARG(uint64_t, u64Target, 0);
18574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18577 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18578 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18579 IEM_MC_END()
18580 return VINF_SUCCESS;
18581
18582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18583 }
18584 }
18585}
18586
18587typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18588
18589FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18590{
18591 /* Registers? How?? */
18592 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18593 { /* likely */ }
18594 else
18595 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18596
18597 /* Far pointer loaded from memory. */
18598 switch (pVCpu->iem.s.enmEffOpSize)
18599 {
18600 case IEMMODE_16BIT:
18601 IEM_MC_BEGIN(3, 1);
18602 IEM_MC_ARG(uint16_t, u16Sel, 0);
18603 IEM_MC_ARG(uint16_t, offSeg, 1);
18604 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18608 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18609 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18610 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18611 IEM_MC_END();
18612 return VINF_SUCCESS;
18613
18614 case IEMMODE_64BIT:
18615 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18616 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18617 * and call far qword [rsp] encodings. */
18618 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18619 {
18620 IEM_MC_BEGIN(3, 1);
18621 IEM_MC_ARG(uint16_t, u16Sel, 0);
18622 IEM_MC_ARG(uint64_t, offSeg, 1);
18623 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18627 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18628 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18629 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18630 IEM_MC_END();
18631 return VINF_SUCCESS;
18632 }
18633 /* AMD falls thru. */
18634
18635 case IEMMODE_32BIT:
18636 IEM_MC_BEGIN(3, 1);
18637 IEM_MC_ARG(uint16_t, u16Sel, 0);
18638 IEM_MC_ARG(uint32_t, offSeg, 1);
18639 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18643 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18644 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18645 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18646 IEM_MC_END();
18647 return VINF_SUCCESS;
18648
18649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18650 }
18651}
18652
18653
18654/**
18655 * Opcode 0xff /3.
18656 * @param bRm The RM byte.
18657 */
18658FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18659{
18660 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18661 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18662}
18663
18664
18665/**
18666 * Opcode 0xff /4.
18667 * @param bRm The RM byte.
18668 */
18669FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18670{
18671 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18673
18674 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18675 {
18676 /* The new RIP is taken from a register. */
18677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18678 switch (pVCpu->iem.s.enmEffOpSize)
18679 {
18680 case IEMMODE_16BIT:
18681 IEM_MC_BEGIN(0, 1);
18682 IEM_MC_LOCAL(uint16_t, u16Target);
18683 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18684 IEM_MC_SET_RIP_U16(u16Target);
18685 IEM_MC_END()
18686 return VINF_SUCCESS;
18687
18688 case IEMMODE_32BIT:
18689 IEM_MC_BEGIN(0, 1);
18690 IEM_MC_LOCAL(uint32_t, u32Target);
18691 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18692 IEM_MC_SET_RIP_U32(u32Target);
18693 IEM_MC_END()
18694 return VINF_SUCCESS;
18695
18696 case IEMMODE_64BIT:
18697 IEM_MC_BEGIN(0, 1);
18698 IEM_MC_LOCAL(uint64_t, u64Target);
18699 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18700 IEM_MC_SET_RIP_U64(u64Target);
18701 IEM_MC_END()
18702 return VINF_SUCCESS;
18703
18704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18705 }
18706 }
18707 else
18708 {
18709 /* The new RIP is taken from a memory location. */
18710 switch (pVCpu->iem.s.enmEffOpSize)
18711 {
18712 case IEMMODE_16BIT:
18713 IEM_MC_BEGIN(0, 2);
18714 IEM_MC_LOCAL(uint16_t, u16Target);
18715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18718 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18719 IEM_MC_SET_RIP_U16(u16Target);
18720 IEM_MC_END()
18721 return VINF_SUCCESS;
18722
18723 case IEMMODE_32BIT:
18724 IEM_MC_BEGIN(0, 2);
18725 IEM_MC_LOCAL(uint32_t, u32Target);
18726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18729 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18730 IEM_MC_SET_RIP_U32(u32Target);
18731 IEM_MC_END()
18732 return VINF_SUCCESS;
18733
18734 case IEMMODE_64BIT:
18735 IEM_MC_BEGIN(0, 2);
18736 IEM_MC_LOCAL(uint64_t, u64Target);
18737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18740 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18741 IEM_MC_SET_RIP_U64(u64Target);
18742 IEM_MC_END()
18743 return VINF_SUCCESS;
18744
18745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18746 }
18747 }
18748}
18749
18750
18751/**
18752 * Opcode 0xff /5.
18753 * @param bRm The RM byte.
18754 */
18755FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18756{
18757 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18758 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18759}
18760
18761
18762/**
18763 * Opcode 0xff /6.
18764 * @param bRm The RM byte.
18765 */
18766FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18767{
18768 IEMOP_MNEMONIC(push_Ev, "push Ev");
18769
18770 /* Registers are handled by a common worker. */
18771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18772 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18773
18774 /* Memory we do here. */
18775 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18776 switch (pVCpu->iem.s.enmEffOpSize)
18777 {
18778 case IEMMODE_16BIT:
18779 IEM_MC_BEGIN(0, 2);
18780 IEM_MC_LOCAL(uint16_t, u16Src);
18781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18784 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18785 IEM_MC_PUSH_U16(u16Src);
18786 IEM_MC_ADVANCE_RIP();
18787 IEM_MC_END();
18788 return VINF_SUCCESS;
18789
18790 case IEMMODE_32BIT:
18791 IEM_MC_BEGIN(0, 2);
18792 IEM_MC_LOCAL(uint32_t, u32Src);
18793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18796 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18797 IEM_MC_PUSH_U32(u32Src);
18798 IEM_MC_ADVANCE_RIP();
18799 IEM_MC_END();
18800 return VINF_SUCCESS;
18801
18802 case IEMMODE_64BIT:
18803 IEM_MC_BEGIN(0, 2);
18804 IEM_MC_LOCAL(uint64_t, u64Src);
18805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18808 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18809 IEM_MC_PUSH_U64(u64Src);
18810 IEM_MC_ADVANCE_RIP();
18811 IEM_MC_END();
18812 return VINF_SUCCESS;
18813
18814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18815 }
18816}
18817
18818
18819/** Opcode 0xff. */
18820FNIEMOP_DEF(iemOp_Grp5)
18821{
18822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18823 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18824 {
18825 case 0:
18826 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18827 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18828 case 1:
18829 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18830 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18831 case 2:
18832 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18833 case 3:
18834 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18835 case 4:
18836 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18837 case 5:
18838 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18839 case 6:
18840 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18841 case 7:
18842 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18843 return IEMOP_RAISE_INVALID_OPCODE();
18844 }
18845 AssertFailedReturn(VERR_IEM_IPE_3);
18846}
18847
18848
18849
18850const PFNIEMOP g_apfnOneByteMap[256] =
18851{
18852 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18853 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18854 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18855 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18856 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18857 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18858 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18859 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18860 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18861 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18862 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18863 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18864 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18865 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18866 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18867 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18868 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18869 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18870 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18871 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18872 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18873 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18874 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18875 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18876 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18877 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18878 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18879 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18880 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18881 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18882 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18883 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18884 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18885 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18886 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18887 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18888 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18889 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18890 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18891 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18892 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18893 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18894 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18895 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18896 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18897 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18898 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18899 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18900 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18901 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18902 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18903 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18904 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18905 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18906 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18907 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18908 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18909 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18910 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18911 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18912 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18913 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18914 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18915 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18916};
18917
18918
18919/** @} */
18920
18921#ifdef _MSC_VER
18922# pragma warning(pop)
18923#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette