VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65605

Last change on this file since 65605 was 65605, checked in by vboxsync, 8 years ago

IEM: Initial prefix indexing expansion of g_apfnTwoByteMap.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 663.2 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65605 2017-02-03 17:42:00Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 RTGCPTR GCPtrEff;
560 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
561 if (rcStrict != VINF_SUCCESS)
562 return rcStrict;
563#endif
564 IEMOP_HLP_DONE_DECODING();
565 }
566 return IEMOP_RAISE_INVALID_OPCODE();
567}
568
569
570/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
571 * immediate. */
572FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
573{
574 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
575 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
576 {
577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
578#ifndef TST_IEM_CHECK_MC
579 RTGCPTR GCPtrEff;
580 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
581 if (rcStrict != VINF_SUCCESS)
582 return rcStrict;
583#endif
584 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
585 IEMOP_HLP_DONE_DECODING();
586 }
587 return IEMOP_RAISE_INVALID_OPCODE();
588}
589
590
591/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
592 * sequence. */
593FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
594{
595 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
596 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
597 {
598 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
600#ifndef TST_IEM_CHECK_MC
601 RTGCPTR GCPtrEff;
602 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
603 if (rcStrict != VINF_SUCCESS)
604 return rcStrict;
605#endif
606 IEMOP_HLP_DONE_DECODING();
607 }
608 return IEMOP_RAISE_INVALID_OPCODE();
609}
610
611
612/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
613 * a 8-byte immediate. */
614FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
615{
616 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
617 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
618 {
619 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
621#ifndef TST_IEM_CHECK_MC
622 RTGCPTR GCPtrEff;
623 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
624 if (rcStrict != VINF_SUCCESS)
625 return rcStrict;
626#endif
627 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
628 IEMOP_HLP_DONE_DECODING();
629 }
630 return IEMOP_RAISE_INVALID_OPCODE();
631}
632
633
634
635/** @name ..... opcodes.
636 *
637 * @{
638 */
639
640/** @} */
641
642
643/** @name Two byte opcodes (first byte 0x0f).
644 *
645 * @{
646 */
647
648/** Opcode 0x0f 0x00 /0. */
649FNIEMOPRM_DEF(iemOp_Grp6_sldt)
650{
651 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
652 IEMOP_HLP_MIN_286();
653 IEMOP_HLP_NO_REAL_OR_V86_MODE();
654
655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
656 {
657 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
658 switch (pVCpu->iem.s.enmEffOpSize)
659 {
660 case IEMMODE_16BIT:
661 IEM_MC_BEGIN(0, 1);
662 IEM_MC_LOCAL(uint16_t, u16Ldtr);
663 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
664 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
665 IEM_MC_ADVANCE_RIP();
666 IEM_MC_END();
667 break;
668
669 case IEMMODE_32BIT:
670 IEM_MC_BEGIN(0, 1);
671 IEM_MC_LOCAL(uint32_t, u32Ldtr);
672 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
673 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 break;
677
678 case IEMMODE_64BIT:
679 IEM_MC_BEGIN(0, 1);
680 IEM_MC_LOCAL(uint64_t, u64Ldtr);
681 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
682 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
683 IEM_MC_ADVANCE_RIP();
684 IEM_MC_END();
685 break;
686
687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
688 }
689 }
690 else
691 {
692 IEM_MC_BEGIN(0, 2);
693 IEM_MC_LOCAL(uint16_t, u16Ldtr);
694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
696 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
697 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
698 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
699 IEM_MC_ADVANCE_RIP();
700 IEM_MC_END();
701 }
702 return VINF_SUCCESS;
703}
704
705
706/** Opcode 0x0f 0x00 /1. */
707FNIEMOPRM_DEF(iemOp_Grp6_str)
708{
709 IEMOP_MNEMONIC(str, "str Rv/Mw");
710 IEMOP_HLP_MIN_286();
711 IEMOP_HLP_NO_REAL_OR_V86_MODE();
712
713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
714 {
715 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
716 switch (pVCpu->iem.s.enmEffOpSize)
717 {
718 case IEMMODE_16BIT:
719 IEM_MC_BEGIN(0, 1);
720 IEM_MC_LOCAL(uint16_t, u16Tr);
721 IEM_MC_FETCH_TR_U16(u16Tr);
722 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
723 IEM_MC_ADVANCE_RIP();
724 IEM_MC_END();
725 break;
726
727 case IEMMODE_32BIT:
728 IEM_MC_BEGIN(0, 1);
729 IEM_MC_LOCAL(uint32_t, u32Tr);
730 IEM_MC_FETCH_TR_U32(u32Tr);
731 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
732 IEM_MC_ADVANCE_RIP();
733 IEM_MC_END();
734 break;
735
736 case IEMMODE_64BIT:
737 IEM_MC_BEGIN(0, 1);
738 IEM_MC_LOCAL(uint64_t, u64Tr);
739 IEM_MC_FETCH_TR_U64(u64Tr);
740 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
741 IEM_MC_ADVANCE_RIP();
742 IEM_MC_END();
743 break;
744
745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
746 }
747 }
748 else
749 {
750 IEM_MC_BEGIN(0, 2);
751 IEM_MC_LOCAL(uint16_t, u16Tr);
752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
754 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 IEM_MC_FETCH_TR_U16(u16Tr);
756 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
757 IEM_MC_ADVANCE_RIP();
758 IEM_MC_END();
759 }
760 return VINF_SUCCESS;
761}
762
763
764/** Opcode 0x0f 0x00 /2. */
765FNIEMOPRM_DEF(iemOp_Grp6_lldt)
766{
767 IEMOP_MNEMONIC(lldt, "lldt Ew");
768 IEMOP_HLP_MIN_286();
769 IEMOP_HLP_NO_REAL_OR_V86_MODE();
770
771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
772 {
773 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
774 IEM_MC_BEGIN(1, 0);
775 IEM_MC_ARG(uint16_t, u16Sel, 0);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
778 IEM_MC_END();
779 }
780 else
781 {
782 IEM_MC_BEGIN(1, 1);
783 IEM_MC_ARG(uint16_t, u16Sel, 0);
784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
787 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
788 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 return VINF_SUCCESS;
793}
794
795
796/** Opcode 0x0f 0x00 /3. */
797FNIEMOPRM_DEF(iemOp_Grp6_ltr)
798{
799 IEMOP_MNEMONIC(ltr, "ltr Ew");
800 IEMOP_HLP_MIN_286();
801 IEMOP_HLP_NO_REAL_OR_V86_MODE();
802
803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
804 {
805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
806 IEM_MC_BEGIN(1, 0);
807 IEM_MC_ARG(uint16_t, u16Sel, 0);
808 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
809 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
810 IEM_MC_END();
811 }
812 else
813 {
814 IEM_MC_BEGIN(1, 1);
815 IEM_MC_ARG(uint16_t, u16Sel, 0);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
819 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
820 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 return VINF_SUCCESS;
825}
826
827
828/** Opcode 0x0f 0x00 /3. */
829FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
830{
831 IEMOP_HLP_MIN_286();
832 IEMOP_HLP_NO_REAL_OR_V86_MODE();
833
834 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
835 {
836 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
837 IEM_MC_BEGIN(2, 0);
838 IEM_MC_ARG(uint16_t, u16Sel, 0);
839 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
840 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
841 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
842 IEM_MC_END();
843 }
844 else
845 {
846 IEM_MC_BEGIN(2, 1);
847 IEM_MC_ARG(uint16_t, u16Sel, 0);
848 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
852 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 return VINF_SUCCESS;
857}
858
859
860/** Opcode 0x0f 0x00 /4. */
861FNIEMOPRM_DEF(iemOp_Grp6_verr)
862{
863 IEMOP_MNEMONIC(verr, "verr Ew");
864 IEMOP_HLP_MIN_286();
865 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
866}
867
868
869/** Opcode 0x0f 0x00 /5. */
870FNIEMOPRM_DEF(iemOp_Grp6_verw)
871{
872 IEMOP_MNEMONIC(verw, "verw Ew");
873 IEMOP_HLP_MIN_286();
874 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
875}
876
877
878/**
879 * Group 6 jump table.
880 */
881IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
882{
883 iemOp_Grp6_sldt,
884 iemOp_Grp6_str,
885 iemOp_Grp6_lldt,
886 iemOp_Grp6_ltr,
887 iemOp_Grp6_verr,
888 iemOp_Grp6_verw,
889 iemOp_InvalidWithRM,
890 iemOp_InvalidWithRM
891};
892
893/** Opcode 0x0f 0x00. */
894FNIEMOP_DEF(iemOp_Grp6)
895{
896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
897 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
898}
899
900
901/** Opcode 0x0f 0x01 /0. */
902FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
903{
904 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
905 IEMOP_HLP_MIN_286();
906 IEMOP_HLP_64BIT_OP_SIZE();
907 IEM_MC_BEGIN(2, 1);
908 IEM_MC_ARG(uint8_t, iEffSeg, 0);
909 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
912 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
913 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
914 IEM_MC_END();
915 return VINF_SUCCESS;
916}
917
918
919/** Opcode 0x0f 0x01 /0. */
920FNIEMOP_DEF(iemOp_Grp7_vmcall)
921{
922 IEMOP_BITCH_ABOUT_STUB();
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926
927/** Opcode 0x0f 0x01 /0. */
928FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
929{
930 IEMOP_BITCH_ABOUT_STUB();
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 /0. */
936FNIEMOP_DEF(iemOp_Grp7_vmresume)
937{
938 IEMOP_BITCH_ABOUT_STUB();
939 return IEMOP_RAISE_INVALID_OPCODE();
940}
941
942
943/** Opcode 0x0f 0x01 /0. */
944FNIEMOP_DEF(iemOp_Grp7_vmxoff)
945{
946 IEMOP_BITCH_ABOUT_STUB();
947 return IEMOP_RAISE_INVALID_OPCODE();
948}
949
950
951/** Opcode 0x0f 0x01 /1. */
952FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
953{
954 IEMOP_MNEMONIC(sidt, "sidt Ms");
955 IEMOP_HLP_MIN_286();
956 IEMOP_HLP_64BIT_OP_SIZE();
957 IEM_MC_BEGIN(2, 1);
958 IEM_MC_ARG(uint8_t, iEffSeg, 0);
959 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
962 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
963 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
964 IEM_MC_END();
965 return VINF_SUCCESS;
966}
967
968
969/** Opcode 0x0f 0x01 /1. */
970FNIEMOP_DEF(iemOp_Grp7_monitor)
971{
972 IEMOP_MNEMONIC(monitor, "monitor");
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
974 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
975}
976
977
978/** Opcode 0x0f 0x01 /1. */
979FNIEMOP_DEF(iemOp_Grp7_mwait)
980{
981 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
983 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
984}
985
986
987/** Opcode 0x0f 0x01 /2. */
988FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
989{
990 IEMOP_MNEMONIC(lgdt, "lgdt");
991 IEMOP_HLP_64BIT_OP_SIZE();
992 IEM_MC_BEGIN(3, 1);
993 IEM_MC_ARG(uint8_t, iEffSeg, 0);
994 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
995 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
998 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
999 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1000 IEM_MC_END();
1001 return VINF_SUCCESS;
1002}
1003
1004
1005/** Opcode 0x0f 0x01 0xd0. */
1006FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1007{
1008 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1009 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1010 {
1011 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1012 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1013 }
1014 return IEMOP_RAISE_INVALID_OPCODE();
1015}
1016
1017
1018/** Opcode 0x0f 0x01 0xd1. */
1019FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1020{
1021 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1022 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1023 {
1024 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1025 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1026 }
1027 return IEMOP_RAISE_INVALID_OPCODE();
1028}
1029
1030
1031/** Opcode 0x0f 0x01 /3. */
1032FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1033{
1034 IEMOP_MNEMONIC(lidt, "lidt");
1035 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1036 ? IEMMODE_64BIT
1037 : pVCpu->iem.s.enmEffOpSize;
1038 IEM_MC_BEGIN(3, 1);
1039 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1040 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1041 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1044 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1045 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1046 IEM_MC_END();
1047 return VINF_SUCCESS;
1048}
1049
1050
1051/** Opcode 0x0f 0x01 0xd8. */
1052FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1053
1054/** Opcode 0x0f 0x01 0xd9. */
1055FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1056
1057/** Opcode 0x0f 0x01 0xda. */
1058FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1059
1060/** Opcode 0x0f 0x01 0xdb. */
1061FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1062
1063/** Opcode 0x0f 0x01 0xdc. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1065
1066/** Opcode 0x0f 0x01 0xdd. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1068
1069/** Opcode 0x0f 0x01 0xde. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1071
1072/** Opcode 0x0f 0x01 0xdf. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1074
1075/** Opcode 0x0f 0x01 /4. */
1076FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1077{
1078 IEMOP_MNEMONIC(smsw, "smsw");
1079 IEMOP_HLP_MIN_286();
1080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1081 {
1082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1083 switch (pVCpu->iem.s.enmEffOpSize)
1084 {
1085 case IEMMODE_16BIT:
1086 IEM_MC_BEGIN(0, 1);
1087 IEM_MC_LOCAL(uint16_t, u16Tmp);
1088 IEM_MC_FETCH_CR0_U16(u16Tmp);
1089 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1090 { /* likely */ }
1091 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1092 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1093 else
1094 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1095 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1096 IEM_MC_ADVANCE_RIP();
1097 IEM_MC_END();
1098 return VINF_SUCCESS;
1099
1100 case IEMMODE_32BIT:
1101 IEM_MC_BEGIN(0, 1);
1102 IEM_MC_LOCAL(uint32_t, u32Tmp);
1103 IEM_MC_FETCH_CR0_U32(u32Tmp);
1104 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1105 IEM_MC_ADVANCE_RIP();
1106 IEM_MC_END();
1107 return VINF_SUCCESS;
1108
1109 case IEMMODE_64BIT:
1110 IEM_MC_BEGIN(0, 1);
1111 IEM_MC_LOCAL(uint64_t, u64Tmp);
1112 IEM_MC_FETCH_CR0_U64(u64Tmp);
1113 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1114 IEM_MC_ADVANCE_RIP();
1115 IEM_MC_END();
1116 return VINF_SUCCESS;
1117
1118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1119 }
1120 }
1121 else
1122 {
1123 /* Ignore operand size here, memory refs are always 16-bit. */
1124 IEM_MC_BEGIN(0, 2);
1125 IEM_MC_LOCAL(uint16_t, u16Tmp);
1126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1129 IEM_MC_FETCH_CR0_U16(u16Tmp);
1130 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1131 { /* likely */ }
1132 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1133 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1134 else
1135 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1136 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1137 IEM_MC_ADVANCE_RIP();
1138 IEM_MC_END();
1139 return VINF_SUCCESS;
1140 }
1141}
1142
1143
1144/** Opcode 0x0f 0x01 /6. */
1145FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1146{
1147 /* The operand size is effectively ignored, all is 16-bit and only the
1148 lower 3-bits are used. */
1149 IEMOP_MNEMONIC(lmsw, "lmsw");
1150 IEMOP_HLP_MIN_286();
1151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1152 {
1153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1154 IEM_MC_BEGIN(1, 0);
1155 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1156 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1157 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1158 IEM_MC_END();
1159 }
1160 else
1161 {
1162 IEM_MC_BEGIN(1, 1);
1163 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1168 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1169 IEM_MC_END();
1170 }
1171 return VINF_SUCCESS;
1172}
1173
1174
1175/** Opcode 0x0f 0x01 /7. */
1176FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1177{
1178 IEMOP_MNEMONIC(invlpg, "invlpg");
1179 IEMOP_HLP_MIN_486();
1180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1181 IEM_MC_BEGIN(1, 1);
1182 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1184 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1185 IEM_MC_END();
1186 return VINF_SUCCESS;
1187}
1188
1189
1190/** Opcode 0x0f 0x01 /7. */
1191FNIEMOP_DEF(iemOp_Grp7_swapgs)
1192{
1193 IEMOP_MNEMONIC(swapgs, "swapgs");
1194 IEMOP_HLP_ONLY_64BIT();
1195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1196 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1197}
1198
1199
1200/** Opcode 0x0f 0x01 /7. */
1201FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1202{
1203 NOREF(pVCpu);
1204 IEMOP_BITCH_ABOUT_STUB();
1205 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1206}
1207
1208
1209/** Opcode 0x0f 0x01. */
1210FNIEMOP_DEF(iemOp_Grp7)
1211{
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1214 {
1215 case 0:
1216 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1217 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1218 switch (bRm & X86_MODRM_RM_MASK)
1219 {
1220 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1221 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1222 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1223 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1224 }
1225 return IEMOP_RAISE_INVALID_OPCODE();
1226
1227 case 1:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1233 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1234 }
1235 return IEMOP_RAISE_INVALID_OPCODE();
1236
1237 case 2:
1238 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1239 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1240 switch (bRm & X86_MODRM_RM_MASK)
1241 {
1242 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1243 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1244 }
1245 return IEMOP_RAISE_INVALID_OPCODE();
1246
1247 case 3:
1248 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1249 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1250 switch (bRm & X86_MODRM_RM_MASK)
1251 {
1252 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1253 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1254 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1255 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1256 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1257 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1258 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1259 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1261 }
1262
1263 case 4:
1264 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1265
1266 case 5:
1267 return IEMOP_RAISE_INVALID_OPCODE();
1268
1269 case 6:
1270 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1271
1272 case 7:
1273 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1274 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1275 switch (bRm & X86_MODRM_RM_MASK)
1276 {
1277 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1278 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1279 }
1280 return IEMOP_RAISE_INVALID_OPCODE();
1281
1282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1283 }
1284}
1285
1286/** Opcode 0x0f 0x00 /3. */
1287FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1288{
1289 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291
1292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1293 {
1294 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1295 switch (pVCpu->iem.s.enmEffOpSize)
1296 {
1297 case IEMMODE_16BIT:
1298 {
1299 IEM_MC_BEGIN(3, 0);
1300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1301 IEM_MC_ARG(uint16_t, u16Sel, 1);
1302 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1303
1304 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1305 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1306 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1307
1308 IEM_MC_END();
1309 return VINF_SUCCESS;
1310 }
1311
1312 case IEMMODE_32BIT:
1313 case IEMMODE_64BIT:
1314 {
1315 IEM_MC_BEGIN(3, 0);
1316 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1317 IEM_MC_ARG(uint16_t, u16Sel, 1);
1318 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1319
1320 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1321 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1322 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1323
1324 IEM_MC_END();
1325 return VINF_SUCCESS;
1326 }
1327
1328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1329 }
1330 }
1331 else
1332 {
1333 switch (pVCpu->iem.s.enmEffOpSize)
1334 {
1335 case IEMMODE_16BIT:
1336 {
1337 IEM_MC_BEGIN(3, 1);
1338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1339 IEM_MC_ARG(uint16_t, u16Sel, 1);
1340 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1342
1343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1344 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1345
1346 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1347 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1348 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1349
1350 IEM_MC_END();
1351 return VINF_SUCCESS;
1352 }
1353
1354 case IEMMODE_32BIT:
1355 case IEMMODE_64BIT:
1356 {
1357 IEM_MC_BEGIN(3, 1);
1358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1359 IEM_MC_ARG(uint16_t, u16Sel, 1);
1360 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1362
1363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1364 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1365/** @todo testcase: make sure it's a 16-bit read. */
1366
1367 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1368 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1369 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1370
1371 IEM_MC_END();
1372 return VINF_SUCCESS;
1373 }
1374
1375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1376 }
1377 }
1378}
1379
1380
1381
1382/** Opcode 0x0f 0x02. */
1383FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1384{
1385 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1386 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1387}
1388
1389
1390/** Opcode 0x0f 0x03. */
1391FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1392{
1393 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1394 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1395}
1396
1397
1398/** Opcode 0x0f 0x05. */
1399FNIEMOP_DEF(iemOp_syscall)
1400{
1401 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1403 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1404}
1405
1406
1407/** Opcode 0x0f 0x06. */
1408FNIEMOP_DEF(iemOp_clts)
1409{
1410 IEMOP_MNEMONIC(clts, "clts");
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1413}
1414
1415
1416/** Opcode 0x0f 0x07. */
1417FNIEMOP_DEF(iemOp_sysret)
1418{
1419 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1421 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1422}
1423
1424
1425/** Opcode 0x0f 0x08. */
1426FNIEMOP_STUB(iemOp_invd);
1427// IEMOP_HLP_MIN_486();
1428
1429
1430/** Opcode 0x0f 0x09. */
1431FNIEMOP_DEF(iemOp_wbinvd)
1432{
1433 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1434 IEMOP_HLP_MIN_486();
1435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1436 IEM_MC_BEGIN(0, 0);
1437 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1438 IEM_MC_ADVANCE_RIP();
1439 IEM_MC_END();
1440 return VINF_SUCCESS; /* ignore for now */
1441}
1442
1443
1444/** Opcode 0x0f 0x0b. */
1445FNIEMOP_DEF(iemOp_ud2)
1446{
1447 IEMOP_MNEMONIC(ud2, "ud2");
1448 return IEMOP_RAISE_INVALID_OPCODE();
1449}
1450
1451/** Opcode 0x0f 0x0d. */
1452FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1453{
1454 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1455 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1456 {
1457 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1458 return IEMOP_RAISE_INVALID_OPCODE();
1459 }
1460
1461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1463 {
1464 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1465 return IEMOP_RAISE_INVALID_OPCODE();
1466 }
1467
1468 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1469 {
1470 case 2: /* Aliased to /0 for the time being. */
1471 case 4: /* Aliased to /0 for the time being. */
1472 case 5: /* Aliased to /0 for the time being. */
1473 case 6: /* Aliased to /0 for the time being. */
1474 case 7: /* Aliased to /0 for the time being. */
1475 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1476 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1477 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1479 }
1480
1481 IEM_MC_BEGIN(0, 1);
1482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1485 /* Currently a NOP. */
1486 NOREF(GCPtrEffSrc);
1487 IEM_MC_ADVANCE_RIP();
1488 IEM_MC_END();
1489 return VINF_SUCCESS;
1490}
1491
1492
1493/** Opcode 0x0f 0x0e. */
1494FNIEMOP_STUB(iemOp_femms);
1495
1496
1497/** Opcode 0x0f 0x0f 0x0c. */
1498FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1499
1500/** Opcode 0x0f 0x0f 0x0d. */
1501FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1502
1503/** Opcode 0x0f 0x0f 0x1c. */
1504FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1505
1506/** Opcode 0x0f 0x0f 0x1d. */
1507FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1508
1509/** Opcode 0x0f 0x0f 0x8a. */
1510FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x8e. */
1513FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x90. */
1516FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x94. */
1519FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x96. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x97. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x9a. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x9e. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1532
1533/** Opcode 0x0f 0x0f 0xa0. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0xa4. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0xa6. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0xa7. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xaa. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xae. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xb0. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xb4. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xb6. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xb7. */
1561FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xbb. */
1564FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xbf. */
1567FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1568
1569
1570/** Opcode 0x0f 0x0f. */
1571FNIEMOP_DEF(iemOp_3Dnow)
1572{
1573 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1574 {
1575 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577 }
1578
1579 /* This is pretty sparse, use switch instead of table. */
1580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1581 switch (b)
1582 {
1583 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1584 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1585 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1586 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1587 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1588 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1589 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1590 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1591 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1592 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1593 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1594 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1595 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1596 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1597 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1598 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1599 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1600 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1601 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1602 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1603 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1604 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1605 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1606 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1607 default:
1608 return IEMOP_RAISE_INVALID_OPCODE();
1609 }
1610}
1611
1612
1613/** Opcode 0x0f 0x10. */
1614FNIEMOP_STUB(iemOp_movups_Vps_Wps);
1615
1616/** Opcode 66h 0x0f 0x10. */
1617FNIEMOP_STUB(iemOp_movupd_Vpd_Wpd);
1618
1619/** Opcode f3h 0x0f 0x10. */
1620FNIEMOP_STUB(iemOp_movss_Vss_Hx_Wss);
1621
1622/** Opcode f2h 0x0f 0x10. */
1623FNIEMOP_STUB(iemOp_movsd_Vsd_Hx_Wsd);
1624
1625
1626/** Opcode 0x0f 0x11. */
1627FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1628{
1629 /* Quick hack. Need to restructure all of this later some time. */
1630 uint32_t const fRelevantPrefix = pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ);
1631 if (fRelevantPrefix == 0)
1632 {
1633 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1636 {
1637 /*
1638 * Register, register.
1639 */
1640 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1641 IEM_MC_BEGIN(0, 0);
1642 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1643 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1644 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1645 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1646 IEM_MC_ADVANCE_RIP();
1647 IEM_MC_END();
1648 }
1649 else
1650 {
1651 /*
1652 * Memory, register.
1653 */
1654 IEM_MC_BEGIN(0, 2);
1655 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1657
1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1661 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1662
1663 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1664 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1665
1666 IEM_MC_ADVANCE_RIP();
1667 IEM_MC_END();
1668 }
1669 }
1670 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ)
1671 {
1672 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1674 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1675 {
1676 /*
1677 * Register, register.
1678 */
1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1680 IEM_MC_BEGIN(0, 1);
1681 IEM_MC_LOCAL(uint64_t, uSrc);
1682
1683 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1684 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1685 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1686 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1687
1688 IEM_MC_ADVANCE_RIP();
1689 IEM_MC_END();
1690 }
1691 else
1692 {
1693 /*
1694 * Memory, register.
1695 */
1696 IEM_MC_BEGIN(0, 2);
1697 IEM_MC_LOCAL(uint64_t, uSrc);
1698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1699
1700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1702 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1703 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1704
1705 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1706 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1707
1708 IEM_MC_ADVANCE_RIP();
1709 IEM_MC_END();
1710 }
1711 }
1712 else
1713 {
1714 IEMOP_BITCH_ABOUT_STUB();
1715 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1716 }
1717 return VINF_SUCCESS;
1718}
1719
1720
1721/** Opcode 0x0f 0x12. */
1722FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1723
1724/** Opcode 0x66 0x0f 0x12. */
1725FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1726
1727/** Opcode 0xf3 0x0f 0x12. */
1728FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1729
1730/** Opcode 0xf2 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1732
1733
1734/** Opcode 0x0f 0x13. */
1735FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1736{
1737 /* Quick hack. Need to restructure all of this later some time. */
1738 if (pVCpu->iem.s.fPrefixes == IEM_OP_PRF_SIZE_OP)
1739 {
1740 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1741 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1743 {
1744#if 0
1745 /*
1746 * Register, register.
1747 */
1748 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1749 IEM_MC_BEGIN(0, 1);
1750 IEM_MC_LOCAL(uint64_t, uSrc);
1751 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1752 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1753 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1754 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1755 IEM_MC_ADVANCE_RIP();
1756 IEM_MC_END();
1757#else
1758 return IEMOP_RAISE_INVALID_OPCODE();
1759#endif
1760 }
1761 else
1762 {
1763 /*
1764 * Memory, register.
1765 */
1766 IEM_MC_BEGIN(0, 2);
1767 IEM_MC_LOCAL(uint64_t, uSrc);
1768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1769
1770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1772 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1773 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1774
1775 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1776 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1777
1778 IEM_MC_ADVANCE_RIP();
1779 IEM_MC_END();
1780 }
1781 return VINF_SUCCESS;
1782 }
1783
1784 IEMOP_BITCH_ABOUT_STUB();
1785 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1786}
1787
1788
1789/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1790FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1791/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1792FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1793/* Opcode 0xf3 0x0f 0x14 - invalid */
1794/* Opcode 0xf2 0x0f 0x14 - invalid */
1795/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1796FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x15 - invalid */
1800/* Opcode 0xf2 0x0f 0x15 - invalid */
1801/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1802FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1803/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1804FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1805/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1806FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1807/* Opcode 0xf2 0x0f 0x16 - invalid */
1808/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1809FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1810/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1811FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1812/* Opcode 0xf3 0x0f 0x17 - invalid */
1813/* Opcode 0xf2 0x0f 0x17 - invalid */
1814
1815
1816/** Opcode 0x0f 0x18. */
1817FNIEMOP_DEF(iemOp_prefetch_Grp16)
1818{
1819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1820 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1821 {
1822 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1823 {
1824 case 4: /* Aliased to /0 for the time being according to AMD. */
1825 case 5: /* Aliased to /0 for the time being according to AMD. */
1826 case 6: /* Aliased to /0 for the time being according to AMD. */
1827 case 7: /* Aliased to /0 for the time being according to AMD. */
1828 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1829 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1830 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1831 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1833 }
1834
1835 IEM_MC_BEGIN(0, 1);
1836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1839 /* Currently a NOP. */
1840 NOREF(GCPtrEffSrc);
1841 IEM_MC_ADVANCE_RIP();
1842 IEM_MC_END();
1843 return VINF_SUCCESS;
1844 }
1845
1846 return IEMOP_RAISE_INVALID_OPCODE();
1847}
1848
1849
1850/** Opcode 0x0f 0x19..0x1f. */
1851FNIEMOP_DEF(iemOp_nop_Ev)
1852{
1853 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1856 {
1857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1858 IEM_MC_BEGIN(0, 0);
1859 IEM_MC_ADVANCE_RIP();
1860 IEM_MC_END();
1861 }
1862 else
1863 {
1864 IEM_MC_BEGIN(0, 1);
1865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1868 /* Currently a NOP. */
1869 NOREF(GCPtrEffSrc);
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/** Opcode 0x0f 0x20. */
1878FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1879{
1880 /* mod is ignored, as is operand size overrides. */
1881 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1882 IEMOP_HLP_MIN_386();
1883 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1884 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1885 else
1886 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1887
1888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1889 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1890 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1891 {
1892 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1893 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1894 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1895 iCrReg |= 8;
1896 }
1897 switch (iCrReg)
1898 {
1899 case 0: case 2: case 3: case 4: case 8:
1900 break;
1901 default:
1902 return IEMOP_RAISE_INVALID_OPCODE();
1903 }
1904 IEMOP_HLP_DONE_DECODING();
1905
1906 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1907}
1908
1909
1910/** Opcode 0x0f 0x21. */
1911FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1912{
1913 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1914 IEMOP_HLP_MIN_386();
1915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1917 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1918 return IEMOP_RAISE_INVALID_OPCODE();
1919 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1920 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1921 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1922}
1923
1924
1925/** Opcode 0x0f 0x22. */
1926FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1927{
1928 /* mod is ignored, as is operand size overrides. */
1929 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1930 IEMOP_HLP_MIN_386();
1931 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1932 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1933 else
1934 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1935
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1938 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1939 {
1940 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1941 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1942 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1943 iCrReg |= 8;
1944 }
1945 switch (iCrReg)
1946 {
1947 case 0: case 2: case 3: case 4: case 8:
1948 break;
1949 default:
1950 return IEMOP_RAISE_INVALID_OPCODE();
1951 }
1952 IEMOP_HLP_DONE_DECODING();
1953
1954 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1955}
1956
1957
1958/** Opcode 0x0f 0x23. */
1959FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1960{
1961 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1962 IEMOP_HLP_MIN_386();
1963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1965 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1966 return IEMOP_RAISE_INVALID_OPCODE();
1967 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1968 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1969 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1970}
1971
1972
1973/** Opcode 0x0f 0x24. */
1974FNIEMOP_DEF(iemOp_mov_Rd_Td)
1975{
1976 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1977 /** @todo works on 386 and 486. */
1978 /* The RM byte is not considered, see testcase. */
1979 return IEMOP_RAISE_INVALID_OPCODE();
1980}
1981
1982
1983/** Opcode 0x0f 0x26. */
1984FNIEMOP_DEF(iemOp_mov_Td_Rd)
1985{
1986 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1987 /** @todo works on 386 and 486. */
1988 /* The RM byte is not considered, see testcase. */
1989 return IEMOP_RAISE_INVALID_OPCODE();
1990}
1991
1992
1993/** Opcode 0x0f 0x28. */
1994FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1995{
1996 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1997 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1998 else
1999 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2002 {
2003 /*
2004 * Register, register.
2005 */
2006 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2007 IEM_MC_BEGIN(0, 0);
2008 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2009 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2010 else
2011 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2029 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2031 else
2032 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2033 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2034
2035 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2036 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2037
2038 IEM_MC_ADVANCE_RIP();
2039 IEM_MC_END();
2040 }
2041 return VINF_SUCCESS;
2042}
2043
2044
2045/** Opcode 0x0f 0x29. */
2046FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2047{
2048 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2049 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2050 else
2051 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2054 {
2055 /*
2056 * Register, register.
2057 */
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2059 IEM_MC_BEGIN(0, 0);
2060 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2062 else
2063 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2065 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2066 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2067 IEM_MC_ADVANCE_RIP();
2068 IEM_MC_END();
2069 }
2070 else
2071 {
2072 /*
2073 * Memory, register.
2074 */
2075 IEM_MC_BEGIN(0, 2);
2076 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2081 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2082 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2083 else
2084 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2086
2087 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2088 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2089
2090 IEM_MC_ADVANCE_RIP();
2091 IEM_MC_END();
2092 }
2093 return VINF_SUCCESS;
2094}
2095
2096
2097/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2098FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2099/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2100FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2101/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2102FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2103/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2104FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2105
2106
2107/** Opcode 0x0f 0x2b. */
2108FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2109{
2110 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2111 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2112 else
2113 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2115 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2116 {
2117 /*
2118 * memory, register.
2119 */
2120 IEM_MC_BEGIN(0, 2);
2121 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2123
2124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2125 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2126 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2127 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2128 else
2129 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2130 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2131
2132 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2133 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2134
2135 IEM_MC_ADVANCE_RIP();
2136 IEM_MC_END();
2137 }
2138 /* The register, register encoding is invalid. */
2139 else
2140 return IEMOP_RAISE_INVALID_OPCODE();
2141 return VINF_SUCCESS;
2142}
2143
2144
2145/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2146FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2147/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2148FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2149/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2150FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2151/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2152FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2153
2154/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2155FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2156/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2157FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2158/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2159FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2160/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2161FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2162
2163/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2164FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2165/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2166FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2167/* Opcode 0xf3 0x0f 0x2e - invalid */
2168/* Opcode 0xf2 0x0f 0x2e - invalid */
2169
2170/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2171FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2172/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2173FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2174/* Opcode 0xf3 0x0f 0x2f - invalid */
2175/* Opcode 0xf2 0x0f 0x2f - invalid */
2176
2177/** Opcode 0x0f 0x30. */
2178FNIEMOP_DEF(iemOp_wrmsr)
2179{
2180 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2182 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2183}
2184
2185
2186/** Opcode 0x0f 0x31. */
2187FNIEMOP_DEF(iemOp_rdtsc)
2188{
2189 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2191 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2192}
2193
2194
2195/** Opcode 0x0f 0x33. */
2196FNIEMOP_DEF(iemOp_rdmsr)
2197{
2198 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2201}
2202
2203
2204/** Opcode 0x0f 0x34. */
2205FNIEMOP_STUB(iemOp_rdpmc);
2206/** Opcode 0x0f 0x34. */
2207FNIEMOP_STUB(iemOp_sysenter);
2208/** Opcode 0x0f 0x35. */
2209FNIEMOP_STUB(iemOp_sysexit);
2210/** Opcode 0x0f 0x37. */
2211FNIEMOP_STUB(iemOp_getsec);
2212/** Opcode 0x0f 0x38. */
2213FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2214/** Opcode 0x0f 0x3a. */
2215FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2216
2217
2218/**
2219 * Implements a conditional move.
2220 *
2221 * Wish there was an obvious way to do this where we could share and reduce
2222 * code bloat.
2223 *
2224 * @param a_Cnd The conditional "microcode" operation.
2225 */
2226#define CMOV_X(a_Cnd) \
2227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2228 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2229 { \
2230 switch (pVCpu->iem.s.enmEffOpSize) \
2231 { \
2232 case IEMMODE_16BIT: \
2233 IEM_MC_BEGIN(0, 1); \
2234 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2235 a_Cnd { \
2236 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2237 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2238 } IEM_MC_ENDIF(); \
2239 IEM_MC_ADVANCE_RIP(); \
2240 IEM_MC_END(); \
2241 return VINF_SUCCESS; \
2242 \
2243 case IEMMODE_32BIT: \
2244 IEM_MC_BEGIN(0, 1); \
2245 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2246 a_Cnd { \
2247 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2248 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2249 } IEM_MC_ELSE() { \
2250 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2251 } IEM_MC_ENDIF(); \
2252 IEM_MC_ADVANCE_RIP(); \
2253 IEM_MC_END(); \
2254 return VINF_SUCCESS; \
2255 \
2256 case IEMMODE_64BIT: \
2257 IEM_MC_BEGIN(0, 1); \
2258 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2259 a_Cnd { \
2260 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2261 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2262 } IEM_MC_ENDIF(); \
2263 IEM_MC_ADVANCE_RIP(); \
2264 IEM_MC_END(); \
2265 return VINF_SUCCESS; \
2266 \
2267 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2268 } \
2269 } \
2270 else \
2271 { \
2272 switch (pVCpu->iem.s.enmEffOpSize) \
2273 { \
2274 case IEMMODE_16BIT: \
2275 IEM_MC_BEGIN(0, 2); \
2276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2277 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2279 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2280 a_Cnd { \
2281 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2282 } IEM_MC_ENDIF(); \
2283 IEM_MC_ADVANCE_RIP(); \
2284 IEM_MC_END(); \
2285 return VINF_SUCCESS; \
2286 \
2287 case IEMMODE_32BIT: \
2288 IEM_MC_BEGIN(0, 2); \
2289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2290 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2292 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2293 a_Cnd { \
2294 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2295 } IEM_MC_ELSE() { \
2296 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2297 } IEM_MC_ENDIF(); \
2298 IEM_MC_ADVANCE_RIP(); \
2299 IEM_MC_END(); \
2300 return VINF_SUCCESS; \
2301 \
2302 case IEMMODE_64BIT: \
2303 IEM_MC_BEGIN(0, 2); \
2304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2305 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2307 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2308 a_Cnd { \
2309 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2310 } IEM_MC_ENDIF(); \
2311 IEM_MC_ADVANCE_RIP(); \
2312 IEM_MC_END(); \
2313 return VINF_SUCCESS; \
2314 \
2315 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2316 } \
2317 } do {} while (0)
2318
2319
2320
2321/** Opcode 0x0f 0x40. */
2322FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2323{
2324 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2325 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2326}
2327
2328
2329/** Opcode 0x0f 0x41. */
2330FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2331{
2332 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2333 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2334}
2335
2336
2337/** Opcode 0x0f 0x42. */
2338FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2339{
2340 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2341 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2342}
2343
2344
2345/** Opcode 0x0f 0x43. */
2346FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2347{
2348 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2349 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2350}
2351
2352
2353/** Opcode 0x0f 0x44. */
2354FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2355{
2356 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2357 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2358}
2359
2360
2361/** Opcode 0x0f 0x45. */
2362FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2363{
2364 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2365 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2366}
2367
2368
2369/** Opcode 0x0f 0x46. */
2370FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2371{
2372 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2373 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2374}
2375
2376
2377/** Opcode 0x0f 0x47. */
2378FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2379{
2380 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2381 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2382}
2383
2384
2385/** Opcode 0x0f 0x48. */
2386FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2387{
2388 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2389 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2390}
2391
2392
2393/** Opcode 0x0f 0x49. */
2394FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2395{
2396 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2397 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2398}
2399
2400
2401/** Opcode 0x0f 0x4a. */
2402FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2403{
2404 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2405 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2406}
2407
2408
2409/** Opcode 0x0f 0x4b. */
2410FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2411{
2412 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2413 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2414}
2415
2416
2417/** Opcode 0x0f 0x4c. */
2418FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2419{
2420 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2421 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2422}
2423
2424
2425/** Opcode 0x0f 0x4d. */
2426FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2427{
2428 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2429 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2430}
2431
2432
2433/** Opcode 0x0f 0x4e. */
2434FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2435{
2436 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2437 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2438}
2439
2440
2441/** Opcode 0x0f 0x4f. */
2442FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2443{
2444 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2445 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2446}
2447
2448#undef CMOV_X
2449
2450/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2451FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2452/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2453FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2454/* Opcode 0xf3 0x0f 0x50 - invalid */
2455/* Opcode 0xf2 0x0f 0x50 - invalid */
2456
2457/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2458FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2459/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2460FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2461/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2462FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2463/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2464FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2465
2466/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2467FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2468/* Opcode 0x66 0x0f 0x52 - invalid */
2469/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2470FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2471/* Opcode 0xf2 0x0f 0x52 - invalid */
2472
2473/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2474FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2475/* Opcode 0x66 0x0f 0x53 - invalid */
2476/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2477FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2478/* Opcode 0xf2 0x0f 0x53 - invalid */
2479
2480/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2481FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2482/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2483FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2484/* Opcode 0xf3 0x0f 0x54 - invalid */
2485/* Opcode 0xf2 0x0f 0x54 - invalid */
2486
2487/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2488FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2489/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2490FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2491/* Opcode 0xf3 0x0f 0x55 - invalid */
2492/* Opcode 0xf2 0x0f 0x55 - invalid */
2493
2494/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2495FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2496/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2497FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2498/* Opcode 0xf3 0x0f 0x56 - invalid */
2499/* Opcode 0xf2 0x0f 0x56 - invalid */
2500
2501/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2502FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2503/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2504FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2505/* Opcode 0xf3 0x0f 0x57 - invalid */
2506/* Opcode 0xf2 0x0f 0x57 - invalid */
2507
2508/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2509FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2510/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2511FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2512/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2513FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2514/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2515FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2516
2517/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2518FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2519/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2520FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2521/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2522FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2523/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2524FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2525
2526/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2527FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2528/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2529FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2530/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2531FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2532/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2533FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2534
2535/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2536FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2537/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2538FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2539/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2540FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2541/* Opcode 0xf2 0x0f 0x5b - invalid */
2542
2543/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2544FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2545/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2546FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2547/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2548FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2549/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2550FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2551
2552/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2553FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2554/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2555FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2556/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2557FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2558/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2559FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2560
2561/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2562FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2563/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2564FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2565/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2566FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2567/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2568FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2569
2570/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2571FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2572/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2573FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2574/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2575FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2576/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2577FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2578
2579
2580/**
2581 * Common worker for SSE2 and MMX instructions on the forms:
2582 * pxxxx xmm1, xmm2/mem128
2583 * pxxxx mm1, mm2/mem32
2584 *
2585 * The 2nd operand is the first half of a register, which in the memory case
2586 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2587 * memory accessed for MMX.
2588 *
2589 * Exceptions type 4.
2590 */
2591FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2592{
2593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2594 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2595 {
2596 case IEM_OP_PRF_SIZE_OP: /* SSE */
2597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2598 {
2599 /*
2600 * Register, register.
2601 */
2602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2603 IEM_MC_BEGIN(2, 0);
2604 IEM_MC_ARG(uint128_t *, pDst, 0);
2605 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2606 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2607 IEM_MC_PREPARE_SSE_USAGE();
2608 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2609 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2610 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2611 IEM_MC_ADVANCE_RIP();
2612 IEM_MC_END();
2613 }
2614 else
2615 {
2616 /*
2617 * Register, memory.
2618 */
2619 IEM_MC_BEGIN(2, 2);
2620 IEM_MC_ARG(uint128_t *, pDst, 0);
2621 IEM_MC_LOCAL(uint64_t, uSrc);
2622 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2624
2625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2628 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2629
2630 IEM_MC_PREPARE_SSE_USAGE();
2631 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2632 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2633
2634 IEM_MC_ADVANCE_RIP();
2635 IEM_MC_END();
2636 }
2637 return VINF_SUCCESS;
2638
2639 case 0: /* MMX */
2640 if (!pImpl->pfnU64)
2641 return IEMOP_RAISE_INVALID_OPCODE();
2642 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2643 {
2644 /*
2645 * Register, register.
2646 */
2647 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2648 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2650 IEM_MC_BEGIN(2, 0);
2651 IEM_MC_ARG(uint64_t *, pDst, 0);
2652 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2653 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2654 IEM_MC_PREPARE_FPU_USAGE();
2655 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2656 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2657 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2658 IEM_MC_ADVANCE_RIP();
2659 IEM_MC_END();
2660 }
2661 else
2662 {
2663 /*
2664 * Register, memory.
2665 */
2666 IEM_MC_BEGIN(2, 2);
2667 IEM_MC_ARG(uint64_t *, pDst, 0);
2668 IEM_MC_LOCAL(uint32_t, uSrc);
2669 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2671
2672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2674 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2675 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2676
2677 IEM_MC_PREPARE_FPU_USAGE();
2678 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2679 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2680
2681 IEM_MC_ADVANCE_RIP();
2682 IEM_MC_END();
2683 }
2684 return VINF_SUCCESS;
2685
2686 default:
2687 return IEMOP_RAISE_INVALID_OPCODE();
2688 }
2689}
2690
2691
2692/** Opcode 0x0f 0x60. */
2693FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2694{
2695 IEMOP_MNEMONIC(punpcklbw, "punpcklbw");
2696 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2697}
2698
2699
2700/** Opcode 0x0f 0x61. */
2701FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2702{
2703 IEMOP_MNEMONIC(punpcklwd, "punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2704 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2705}
2706
2707
2708/** Opcode 0x0f 0x62. */
2709FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2710{
2711 IEMOP_MNEMONIC(punpckldq, "punpckldq");
2712 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2713}
2714
2715
2716/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2717FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2718/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2719FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2720/* Opcode 0xf3 0x0f 0x63 - invalid */
2721
2722/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2723FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2724/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2725FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2726/* Opcode 0xf3 0x0f 0x64 - invalid */
2727
2728/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2729FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2730/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2731FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2732/* Opcode 0xf3 0x0f 0x65 - invalid */
2733
2734/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2735FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2736/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2737FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2738/* Opcode 0xf3 0x0f 0x66 - invalid */
2739
2740/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2741FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2742/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2743FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2744/* Opcode 0xf3 0x0f 0x67 - invalid */
2745
2746
2747/**
2748 * Common worker for SSE2 and MMX instructions on the forms:
2749 * pxxxx xmm1, xmm2/mem128
2750 * pxxxx mm1, mm2/mem64
2751 *
2752 * The 2nd operand is the second half of a register, which in the memory case
2753 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2754 * where it may read the full 128 bits or only the upper 64 bits.
2755 *
2756 * Exceptions type 4.
2757 */
2758FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2759{
2760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2761 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2762 {
2763 case IEM_OP_PRF_SIZE_OP: /* SSE */
2764 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2765 {
2766 /*
2767 * Register, register.
2768 */
2769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2770 IEM_MC_BEGIN(2, 0);
2771 IEM_MC_ARG(uint128_t *, pDst, 0);
2772 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2773 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2774 IEM_MC_PREPARE_SSE_USAGE();
2775 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2776 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2777 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 else
2782 {
2783 /*
2784 * Register, memory.
2785 */
2786 IEM_MC_BEGIN(2, 2);
2787 IEM_MC_ARG(uint128_t *, pDst, 0);
2788 IEM_MC_LOCAL(uint128_t, uSrc);
2789 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2791
2792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2794 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2795 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2796
2797 IEM_MC_PREPARE_SSE_USAGE();
2798 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2799 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2800
2801 IEM_MC_ADVANCE_RIP();
2802 IEM_MC_END();
2803 }
2804 return VINF_SUCCESS;
2805
2806 case 0: /* MMX */
2807 if (!pImpl->pfnU64)
2808 return IEMOP_RAISE_INVALID_OPCODE();
2809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2810 {
2811 /*
2812 * Register, register.
2813 */
2814 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2815 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2817 IEM_MC_BEGIN(2, 0);
2818 IEM_MC_ARG(uint64_t *, pDst, 0);
2819 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2820 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2821 IEM_MC_PREPARE_FPU_USAGE();
2822 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2823 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2824 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2825 IEM_MC_ADVANCE_RIP();
2826 IEM_MC_END();
2827 }
2828 else
2829 {
2830 /*
2831 * Register, memory.
2832 */
2833 IEM_MC_BEGIN(2, 2);
2834 IEM_MC_ARG(uint64_t *, pDst, 0);
2835 IEM_MC_LOCAL(uint64_t, uSrc);
2836 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2838
2839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2841 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2842 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2843
2844 IEM_MC_PREPARE_FPU_USAGE();
2845 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2846 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2847
2848 IEM_MC_ADVANCE_RIP();
2849 IEM_MC_END();
2850 }
2851 return VINF_SUCCESS;
2852
2853 default:
2854 return IEMOP_RAISE_INVALID_OPCODE();
2855 }
2856}
2857
2858
2859/** Opcode 0x0f 0x68. */
2860FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2861{
2862 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2863 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2864}
2865
2866
2867/** Opcode 0x0f 0x69. */
2868FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2869{
2870 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2871 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2872}
2873
2874
2875/** Opcode 0x0f 0x6a. */
2876FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2877{
2878 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2879 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2880}
2881
2882/** Opcode 0x0f 0x6b. */
2883FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2884
2885
2886/** Opcode 0x0f 0x6c. */
2887FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2888{
2889 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq");
2890 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2891}
2892
2893
2894/** Opcode 0x0f 0x6d. */
2895FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2896{
2897 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2898 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2899}
2900
2901
2902/** Opcode 0x0f 0x6e. */
2903FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2904{
2905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2906 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2907 {
2908 case IEM_OP_PRF_SIZE_OP: /* SSE */
2909 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2910 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2911 else
2912 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2914 {
2915 /* XMM, greg*/
2916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2917 IEM_MC_BEGIN(0, 1);
2918 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2919 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2920 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2921 {
2922 IEM_MC_LOCAL(uint64_t, u64Tmp);
2923 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2924 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2925 }
2926 else
2927 {
2928 IEM_MC_LOCAL(uint32_t, u32Tmp);
2929 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2930 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2931 }
2932 IEM_MC_ADVANCE_RIP();
2933 IEM_MC_END();
2934 }
2935 else
2936 {
2937 /* XMM, [mem] */
2938 IEM_MC_BEGIN(0, 2);
2939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2940 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2943 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2945 {
2946 IEM_MC_LOCAL(uint64_t, u64Tmp);
2947 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2948 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2949 }
2950 else
2951 {
2952 IEM_MC_LOCAL(uint32_t, u32Tmp);
2953 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2954 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2955 }
2956 IEM_MC_ADVANCE_RIP();
2957 IEM_MC_END();
2958 }
2959 return VINF_SUCCESS;
2960
2961 case 0: /* MMX */
2962 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2963 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2964 else
2965 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2967 {
2968 /* MMX, greg */
2969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2970 IEM_MC_BEGIN(0, 1);
2971 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2972 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2973 IEM_MC_LOCAL(uint64_t, u64Tmp);
2974 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2975 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2976 else
2977 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2978 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2979 IEM_MC_ADVANCE_RIP();
2980 IEM_MC_END();
2981 }
2982 else
2983 {
2984 /* MMX, [mem] */
2985 IEM_MC_BEGIN(0, 2);
2986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2987 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2990 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2991 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2992 {
2993 IEM_MC_LOCAL(uint64_t, u64Tmp);
2994 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2995 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2996 }
2997 else
2998 {
2999 IEM_MC_LOCAL(uint32_t, u32Tmp);
3000 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3001 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3002 }
3003 IEM_MC_ADVANCE_RIP();
3004 IEM_MC_END();
3005 }
3006 return VINF_SUCCESS;
3007
3008 default:
3009 return IEMOP_RAISE_INVALID_OPCODE();
3010 }
3011}
3012
3013
3014/** Opcode 0x0f 0x6f. */
3015FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
3016{
3017 bool fAligned = false;
3018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3019 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3020 {
3021 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3022 fAligned = true;
3023 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3024 if (fAligned)
3025 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3026 else
3027 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3029 {
3030 /*
3031 * Register, register.
3032 */
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034 IEM_MC_BEGIN(0, 0);
3035 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3036 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3037 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3038 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3039 IEM_MC_ADVANCE_RIP();
3040 IEM_MC_END();
3041 }
3042 else
3043 {
3044 /*
3045 * Register, memory.
3046 */
3047 IEM_MC_BEGIN(0, 2);
3048 IEM_MC_LOCAL(uint128_t, u128Tmp);
3049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3050
3051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3055 if (fAligned)
3056 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3057 else
3058 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3059 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3060
3061 IEM_MC_ADVANCE_RIP();
3062 IEM_MC_END();
3063 }
3064 return VINF_SUCCESS;
3065
3066 case 0: /* MMX */
3067 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3069 {
3070 /*
3071 * Register, register.
3072 */
3073 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3074 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3076 IEM_MC_BEGIN(0, 1);
3077 IEM_MC_LOCAL(uint64_t, u64Tmp);
3078 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3079 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3080 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3081 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3082 IEM_MC_ADVANCE_RIP();
3083 IEM_MC_END();
3084 }
3085 else
3086 {
3087 /*
3088 * Register, memory.
3089 */
3090 IEM_MC_BEGIN(0, 2);
3091 IEM_MC_LOCAL(uint64_t, u64Tmp);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3097 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3098 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3099 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3100
3101 IEM_MC_ADVANCE_RIP();
3102 IEM_MC_END();
3103 }
3104 return VINF_SUCCESS;
3105
3106 default:
3107 return IEMOP_RAISE_INVALID_OPCODE();
3108 }
3109}
3110
3111
3112/** Opcode 0x0f 0x70. The immediate here is evil! */
3113FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3114{
3115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3116 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3117 {
3118 case IEM_OP_PRF_SIZE_OP: /* SSE */
3119 case IEM_OP_PRF_REPNZ: /* SSE */
3120 case IEM_OP_PRF_REPZ: /* SSE */
3121 {
3122 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3123 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3124 {
3125 case IEM_OP_PRF_SIZE_OP:
3126 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3127 pfnAImpl = iemAImpl_pshufd;
3128 break;
3129 case IEM_OP_PRF_REPNZ:
3130 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3131 pfnAImpl = iemAImpl_pshuflw;
3132 break;
3133 case IEM_OP_PRF_REPZ:
3134 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3135 pfnAImpl = iemAImpl_pshufhw;
3136 break;
3137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3138 }
3139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3140 {
3141 /*
3142 * Register, register.
3143 */
3144 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3146
3147 IEM_MC_BEGIN(3, 0);
3148 IEM_MC_ARG(uint128_t *, pDst, 0);
3149 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3150 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3151 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3152 IEM_MC_PREPARE_SSE_USAGE();
3153 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3154 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3155 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3156 IEM_MC_ADVANCE_RIP();
3157 IEM_MC_END();
3158 }
3159 else
3160 {
3161 /*
3162 * Register, memory.
3163 */
3164 IEM_MC_BEGIN(3, 2);
3165 IEM_MC_ARG(uint128_t *, pDst, 0);
3166 IEM_MC_LOCAL(uint128_t, uSrc);
3167 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3169
3170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3171 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3172 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3175
3176 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3177 IEM_MC_PREPARE_SSE_USAGE();
3178 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3179 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3180
3181 IEM_MC_ADVANCE_RIP();
3182 IEM_MC_END();
3183 }
3184 return VINF_SUCCESS;
3185 }
3186
3187 case 0: /* MMX Extension */
3188 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3189 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3190 {
3191 /*
3192 * Register, register.
3193 */
3194 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3196
3197 IEM_MC_BEGIN(3, 0);
3198 IEM_MC_ARG(uint64_t *, pDst, 0);
3199 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3200 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3201 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3202 IEM_MC_PREPARE_FPU_USAGE();
3203 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3204 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3205 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3206 IEM_MC_ADVANCE_RIP();
3207 IEM_MC_END();
3208 }
3209 else
3210 {
3211 /*
3212 * Register, memory.
3213 */
3214 IEM_MC_BEGIN(3, 2);
3215 IEM_MC_ARG(uint64_t *, pDst, 0);
3216 IEM_MC_LOCAL(uint64_t, uSrc);
3217 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3219
3220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3221 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3222 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3224 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3225
3226 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3227 IEM_MC_PREPARE_FPU_USAGE();
3228 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3229 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3230
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 return VINF_SUCCESS;
3235
3236 default:
3237 return IEMOP_RAISE_INVALID_OPCODE();
3238 }
3239}
3240
3241
3242/** Opcode 0x0f 0x71 11/2. */
3243FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3244
3245/** Opcode 0x66 0x0f 0x71 11/2. */
3246FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3247
3248/** Opcode 0x0f 0x71 11/4. */
3249FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3250
3251/** Opcode 0x66 0x0f 0x71 11/4. */
3252FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3253
3254/** Opcode 0x0f 0x71 11/6. */
3255FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3256
3257/** Opcode 0x66 0x0f 0x71 11/6. */
3258FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3259
3260
3261/** Opcode 0x0f 0x71. */
3262FNIEMOP_DEF(iemOp_Grp12)
3263{
3264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3265 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3266 return IEMOP_RAISE_INVALID_OPCODE();
3267 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3268 {
3269 case 0: case 1: case 3: case 5: case 7:
3270 return IEMOP_RAISE_INVALID_OPCODE();
3271 case 2:
3272 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3273 {
3274 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3275 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3276 default: return IEMOP_RAISE_INVALID_OPCODE();
3277 }
3278 case 4:
3279 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3280 {
3281 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3282 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3283 default: return IEMOP_RAISE_INVALID_OPCODE();
3284 }
3285 case 6:
3286 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3287 {
3288 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3289 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3290 default: return IEMOP_RAISE_INVALID_OPCODE();
3291 }
3292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3293 }
3294}
3295
3296
3297/** Opcode 0x0f 0x72 11/2. */
3298FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3299
3300/** Opcode 0x66 0x0f 0x72 11/2. */
3301FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3302
3303/** Opcode 0x0f 0x72 11/4. */
3304FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3305
3306/** Opcode 0x66 0x0f 0x72 11/4. */
3307FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3308
3309/** Opcode 0x0f 0x72 11/6. */
3310FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3311
3312/** Opcode 0x66 0x0f 0x72 11/6. */
3313FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3314
3315
3316/** Opcode 0x0f 0x72. */
3317FNIEMOP_DEF(iemOp_Grp13)
3318{
3319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3320 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3321 return IEMOP_RAISE_INVALID_OPCODE();
3322 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3323 {
3324 case 0: case 1: case 3: case 5: case 7:
3325 return IEMOP_RAISE_INVALID_OPCODE();
3326 case 2:
3327 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3328 {
3329 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3330 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3331 default: return IEMOP_RAISE_INVALID_OPCODE();
3332 }
3333 case 4:
3334 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3335 {
3336 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3337 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3338 default: return IEMOP_RAISE_INVALID_OPCODE();
3339 }
3340 case 6:
3341 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3342 {
3343 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3344 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3345 default: return IEMOP_RAISE_INVALID_OPCODE();
3346 }
3347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3348 }
3349}
3350
3351
3352/** Opcode 0x0f 0x73 11/2. */
3353FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3354
3355/** Opcode 0x66 0x0f 0x73 11/2. */
3356FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3357
3358/** Opcode 0x66 0x0f 0x73 11/3. */
3359FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3360
3361/** Opcode 0x0f 0x73 11/6. */
3362FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3363
3364/** Opcode 0x66 0x0f 0x73 11/6. */
3365FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3366
3367/** Opcode 0x66 0x0f 0x73 11/7. */
3368FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3369
3370
3371/** Opcode 0x0f 0x73. */
3372FNIEMOP_DEF(iemOp_Grp14)
3373{
3374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3375 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3376 return IEMOP_RAISE_INVALID_OPCODE();
3377 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3378 {
3379 case 0: case 1: case 4: case 5:
3380 return IEMOP_RAISE_INVALID_OPCODE();
3381 case 2:
3382 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3383 {
3384 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3385 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3386 default: return IEMOP_RAISE_INVALID_OPCODE();
3387 }
3388 case 3:
3389 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3390 {
3391 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3392 default: return IEMOP_RAISE_INVALID_OPCODE();
3393 }
3394 case 6:
3395 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3396 {
3397 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3398 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3399 default: return IEMOP_RAISE_INVALID_OPCODE();
3400 }
3401 case 7:
3402 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3403 {
3404 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3405 default: return IEMOP_RAISE_INVALID_OPCODE();
3406 }
3407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3408 }
3409}
3410
3411
3412/**
3413 * Common worker for SSE2 and MMX instructions on the forms:
3414 * pxxx mm1, mm2/mem64
3415 * pxxx xmm1, xmm2/mem128
3416 *
3417 * Proper alignment of the 128-bit operand is enforced.
3418 * Exceptions type 4. SSE2 and MMX cpuid checks.
3419 */
3420FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3421{
3422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3423 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3424 {
3425 case IEM_OP_PRF_SIZE_OP: /* SSE */
3426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3427 {
3428 /*
3429 * Register, register.
3430 */
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3432 IEM_MC_BEGIN(2, 0);
3433 IEM_MC_ARG(uint128_t *, pDst, 0);
3434 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3435 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3436 IEM_MC_PREPARE_SSE_USAGE();
3437 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3438 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3439 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3440 IEM_MC_ADVANCE_RIP();
3441 IEM_MC_END();
3442 }
3443 else
3444 {
3445 /*
3446 * Register, memory.
3447 */
3448 IEM_MC_BEGIN(2, 2);
3449 IEM_MC_ARG(uint128_t *, pDst, 0);
3450 IEM_MC_LOCAL(uint128_t, uSrc);
3451 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3453
3454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3456 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3457 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3458
3459 IEM_MC_PREPARE_SSE_USAGE();
3460 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3461 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3462
3463 IEM_MC_ADVANCE_RIP();
3464 IEM_MC_END();
3465 }
3466 return VINF_SUCCESS;
3467
3468 case 0: /* MMX */
3469 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3470 {
3471 /*
3472 * Register, register.
3473 */
3474 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3475 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3477 IEM_MC_BEGIN(2, 0);
3478 IEM_MC_ARG(uint64_t *, pDst, 0);
3479 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3480 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3481 IEM_MC_PREPARE_FPU_USAGE();
3482 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3483 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3484 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3485 IEM_MC_ADVANCE_RIP();
3486 IEM_MC_END();
3487 }
3488 else
3489 {
3490 /*
3491 * Register, memory.
3492 */
3493 IEM_MC_BEGIN(2, 2);
3494 IEM_MC_ARG(uint64_t *, pDst, 0);
3495 IEM_MC_LOCAL(uint64_t, uSrc);
3496 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3498
3499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3501 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3502 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3503
3504 IEM_MC_PREPARE_FPU_USAGE();
3505 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3506 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3507
3508 IEM_MC_ADVANCE_RIP();
3509 IEM_MC_END();
3510 }
3511 return VINF_SUCCESS;
3512
3513 default:
3514 return IEMOP_RAISE_INVALID_OPCODE();
3515 }
3516}
3517
3518
3519/** Opcode 0x0f 0x74. */
3520FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3521{
3522 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3523 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3524}
3525
3526
3527/** Opcode 0x0f 0x75. */
3528FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3529{
3530 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3531 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3532}
3533
3534
3535/** Opcode 0x0f 0x76. */
3536FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3537{
3538 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3539 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3540}
3541
3542
3543/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3544FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3545/* Opcode 0x66 0x0f 0x77 - invalid */
3546/* Opcode 0xf3 0x0f 0x77 - invalid */
3547/* Opcode 0xf2 0x0f 0x77 - invalid */
3548
3549/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3550FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3551/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3552FNIEMOP_STUB(iemOp_AmdGrp17);
3553/* Opcode 0xf3 0x0f 0x78 - invalid */
3554/* Opcode 0xf2 0x0f 0x78 - invalid */
3555
3556/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3557FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3558/* Opcode 0x66 0x0f 0x79 - invalid */
3559/* Opcode 0xf3 0x0f 0x79 - invalid */
3560/* Opcode 0xf2 0x0f 0x79 - invalid */
3561
3562/* Opcode 0x0f 0x7a - invalid */
3563/* Opcode 0x66 0x0f 0x7a - invalid */
3564/* Opcode 0xf3 0x0f 0x7a - invalid */
3565/* Opcode 0xf2 0x0f 0x7a - invalid */
3566
3567/* Opcode 0x0f 0x7b - invalid */
3568/* Opcode 0x66 0x0f 0x7b - invalid */
3569/* Opcode 0xf3 0x0f 0x7b - invalid */
3570/* Opcode 0xf2 0x0f 0x7b - invalid */
3571
3572/* Opcode 0x0f 0x7c - invalid */
3573/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3574FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3575/* Opcode 0xf3 0x0f 0x7c - invalid */
3576/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3577FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3578
3579/* Opcode 0x0f 0x7d - invalid */
3580/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3581FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3582/* Opcode 0xf3 0x0f 0x7d - invalid */
3583/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3584FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3585
3586
3587/** Opcode 0x0f 0x7e. */
3588FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3589{
3590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3591 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3592 {
3593 case IEM_OP_PRF_SIZE_OP: /* SSE */
3594 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3595 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3596 else
3597 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3599 {
3600 /* greg, XMM */
3601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3602 IEM_MC_BEGIN(0, 1);
3603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3604 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3605 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3606 {
3607 IEM_MC_LOCAL(uint64_t, u64Tmp);
3608 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3609 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3610 }
3611 else
3612 {
3613 IEM_MC_LOCAL(uint32_t, u32Tmp);
3614 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3615 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3616 }
3617 IEM_MC_ADVANCE_RIP();
3618 IEM_MC_END();
3619 }
3620 else
3621 {
3622 /* [mem], XMM */
3623 IEM_MC_BEGIN(0, 2);
3624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3625 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3628 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3629 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3630 {
3631 IEM_MC_LOCAL(uint64_t, u64Tmp);
3632 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3633 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3634 }
3635 else
3636 {
3637 IEM_MC_LOCAL(uint32_t, u32Tmp);
3638 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3639 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3640 }
3641 IEM_MC_ADVANCE_RIP();
3642 IEM_MC_END();
3643 }
3644 return VINF_SUCCESS;
3645
3646 case 0: /* MMX */
3647 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3648 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3649 else
3650 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3651 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3652 {
3653 /* greg, MMX */
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_BEGIN(0, 1);
3656 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3657 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3658 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3659 {
3660 IEM_MC_LOCAL(uint64_t, u64Tmp);
3661 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3662 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3663 }
3664 else
3665 {
3666 IEM_MC_LOCAL(uint32_t, u32Tmp);
3667 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3668 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3669 }
3670 IEM_MC_ADVANCE_RIP();
3671 IEM_MC_END();
3672 }
3673 else
3674 {
3675 /* [mem], MMX */
3676 IEM_MC_BEGIN(0, 2);
3677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3678 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3682 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3683 {
3684 IEM_MC_LOCAL(uint64_t, u64Tmp);
3685 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3686 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3687 }
3688 else
3689 {
3690 IEM_MC_LOCAL(uint32_t, u32Tmp);
3691 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3692 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3693 }
3694 IEM_MC_ADVANCE_RIP();
3695 IEM_MC_END();
3696 }
3697 return VINF_SUCCESS;
3698
3699 default:
3700 return IEMOP_RAISE_INVALID_OPCODE();
3701 }
3702}
3703
3704
3705/** Opcode 0x0f 0x7f. */
3706FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3707{
3708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3709 bool fAligned = false;
3710 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3711 {
3712 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3713 fAligned = true;
3714 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3715 if (fAligned)
3716 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3717 else
3718 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3720 {
3721 /*
3722 * Register, register.
3723 */
3724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3725 IEM_MC_BEGIN(0, 0);
3726 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3727 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3728 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3729 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 }
3733 else
3734 {
3735 /*
3736 * Register, memory.
3737 */
3738 IEM_MC_BEGIN(0, 2);
3739 IEM_MC_LOCAL(uint128_t, u128Tmp);
3740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3741
3742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3744 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3745 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3746
3747 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3748 if (fAligned)
3749 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3750 else
3751 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3752
3753 IEM_MC_ADVANCE_RIP();
3754 IEM_MC_END();
3755 }
3756 return VINF_SUCCESS;
3757
3758 case 0: /* MMX */
3759 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3760
3761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3762 {
3763 /*
3764 * Register, register.
3765 */
3766 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3767 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3769 IEM_MC_BEGIN(0, 1);
3770 IEM_MC_LOCAL(uint64_t, u64Tmp);
3771 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3772 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3773 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3774 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3775 IEM_MC_ADVANCE_RIP();
3776 IEM_MC_END();
3777 }
3778 else
3779 {
3780 /*
3781 * Register, memory.
3782 */
3783 IEM_MC_BEGIN(0, 2);
3784 IEM_MC_LOCAL(uint64_t, u64Tmp);
3785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3786
3787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3790 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3791
3792 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3793 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3794
3795 IEM_MC_ADVANCE_RIP();
3796 IEM_MC_END();
3797 }
3798 return VINF_SUCCESS;
3799
3800 default:
3801 return IEMOP_RAISE_INVALID_OPCODE();
3802 }
3803}
3804
3805
3806
3807/** Opcode 0x0f 0x80. */
3808FNIEMOP_DEF(iemOp_jo_Jv)
3809{
3810 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3811 IEMOP_HLP_MIN_386();
3812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3813 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3814 {
3815 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3817
3818 IEM_MC_BEGIN(0, 0);
3819 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3820 IEM_MC_REL_JMP_S16(i16Imm);
3821 } IEM_MC_ELSE() {
3822 IEM_MC_ADVANCE_RIP();
3823 } IEM_MC_ENDIF();
3824 IEM_MC_END();
3825 }
3826 else
3827 {
3828 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3830
3831 IEM_MC_BEGIN(0, 0);
3832 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3833 IEM_MC_REL_JMP_S32(i32Imm);
3834 } IEM_MC_ELSE() {
3835 IEM_MC_ADVANCE_RIP();
3836 } IEM_MC_ENDIF();
3837 IEM_MC_END();
3838 }
3839 return VINF_SUCCESS;
3840}
3841
3842
3843/** Opcode 0x0f 0x81. */
3844FNIEMOP_DEF(iemOp_jno_Jv)
3845{
3846 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3847 IEMOP_HLP_MIN_386();
3848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3849 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3850 {
3851 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3853
3854 IEM_MC_BEGIN(0, 0);
3855 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3856 IEM_MC_ADVANCE_RIP();
3857 } IEM_MC_ELSE() {
3858 IEM_MC_REL_JMP_S16(i16Imm);
3859 } IEM_MC_ENDIF();
3860 IEM_MC_END();
3861 }
3862 else
3863 {
3864 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3866
3867 IEM_MC_BEGIN(0, 0);
3868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3869 IEM_MC_ADVANCE_RIP();
3870 } IEM_MC_ELSE() {
3871 IEM_MC_REL_JMP_S32(i32Imm);
3872 } IEM_MC_ENDIF();
3873 IEM_MC_END();
3874 }
3875 return VINF_SUCCESS;
3876}
3877
3878
3879/** Opcode 0x0f 0x82. */
3880FNIEMOP_DEF(iemOp_jc_Jv)
3881{
3882 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3883 IEMOP_HLP_MIN_386();
3884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3885 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3886 {
3887 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3889
3890 IEM_MC_BEGIN(0, 0);
3891 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3892 IEM_MC_REL_JMP_S16(i16Imm);
3893 } IEM_MC_ELSE() {
3894 IEM_MC_ADVANCE_RIP();
3895 } IEM_MC_ENDIF();
3896 IEM_MC_END();
3897 }
3898 else
3899 {
3900 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3902
3903 IEM_MC_BEGIN(0, 0);
3904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3905 IEM_MC_REL_JMP_S32(i32Imm);
3906 } IEM_MC_ELSE() {
3907 IEM_MC_ADVANCE_RIP();
3908 } IEM_MC_ENDIF();
3909 IEM_MC_END();
3910 }
3911 return VINF_SUCCESS;
3912}
3913
3914
3915/** Opcode 0x0f 0x83. */
3916FNIEMOP_DEF(iemOp_jnc_Jv)
3917{
3918 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3919 IEMOP_HLP_MIN_386();
3920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3921 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3922 {
3923 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3925
3926 IEM_MC_BEGIN(0, 0);
3927 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3928 IEM_MC_ADVANCE_RIP();
3929 } IEM_MC_ELSE() {
3930 IEM_MC_REL_JMP_S16(i16Imm);
3931 } IEM_MC_ENDIF();
3932 IEM_MC_END();
3933 }
3934 else
3935 {
3936 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3938
3939 IEM_MC_BEGIN(0, 0);
3940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3941 IEM_MC_ADVANCE_RIP();
3942 } IEM_MC_ELSE() {
3943 IEM_MC_REL_JMP_S32(i32Imm);
3944 } IEM_MC_ENDIF();
3945 IEM_MC_END();
3946 }
3947 return VINF_SUCCESS;
3948}
3949
3950
3951/** Opcode 0x0f 0x84. */
3952FNIEMOP_DEF(iemOp_je_Jv)
3953{
3954 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3955 IEMOP_HLP_MIN_386();
3956 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3957 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3958 {
3959 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3961
3962 IEM_MC_BEGIN(0, 0);
3963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3964 IEM_MC_REL_JMP_S16(i16Imm);
3965 } IEM_MC_ELSE() {
3966 IEM_MC_ADVANCE_RIP();
3967 } IEM_MC_ENDIF();
3968 IEM_MC_END();
3969 }
3970 else
3971 {
3972 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3974
3975 IEM_MC_BEGIN(0, 0);
3976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3977 IEM_MC_REL_JMP_S32(i32Imm);
3978 } IEM_MC_ELSE() {
3979 IEM_MC_ADVANCE_RIP();
3980 } IEM_MC_ENDIF();
3981 IEM_MC_END();
3982 }
3983 return VINF_SUCCESS;
3984}
3985
3986
3987/** Opcode 0x0f 0x85. */
3988FNIEMOP_DEF(iemOp_jne_Jv)
3989{
3990 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3991 IEMOP_HLP_MIN_386();
3992 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3993 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3994 {
3995 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3997
3998 IEM_MC_BEGIN(0, 0);
3999 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4000 IEM_MC_ADVANCE_RIP();
4001 } IEM_MC_ELSE() {
4002 IEM_MC_REL_JMP_S16(i16Imm);
4003 } IEM_MC_ENDIF();
4004 IEM_MC_END();
4005 }
4006 else
4007 {
4008 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4010
4011 IEM_MC_BEGIN(0, 0);
4012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4013 IEM_MC_ADVANCE_RIP();
4014 } IEM_MC_ELSE() {
4015 IEM_MC_REL_JMP_S32(i32Imm);
4016 } IEM_MC_ENDIF();
4017 IEM_MC_END();
4018 }
4019 return VINF_SUCCESS;
4020}
4021
4022
4023/** Opcode 0x0f 0x86. */
4024FNIEMOP_DEF(iemOp_jbe_Jv)
4025{
4026 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4027 IEMOP_HLP_MIN_386();
4028 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4029 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4030 {
4031 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4033
4034 IEM_MC_BEGIN(0, 0);
4035 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4036 IEM_MC_REL_JMP_S16(i16Imm);
4037 } IEM_MC_ELSE() {
4038 IEM_MC_ADVANCE_RIP();
4039 } IEM_MC_ENDIF();
4040 IEM_MC_END();
4041 }
4042 else
4043 {
4044 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4046
4047 IEM_MC_BEGIN(0, 0);
4048 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4049 IEM_MC_REL_JMP_S32(i32Imm);
4050 } IEM_MC_ELSE() {
4051 IEM_MC_ADVANCE_RIP();
4052 } IEM_MC_ENDIF();
4053 IEM_MC_END();
4054 }
4055 return VINF_SUCCESS;
4056}
4057
4058
4059/** Opcode 0x0f 0x87. */
4060FNIEMOP_DEF(iemOp_jnbe_Jv)
4061{
4062 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4063 IEMOP_HLP_MIN_386();
4064 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4065 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4066 {
4067 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4069
4070 IEM_MC_BEGIN(0, 0);
4071 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4072 IEM_MC_ADVANCE_RIP();
4073 } IEM_MC_ELSE() {
4074 IEM_MC_REL_JMP_S16(i16Imm);
4075 } IEM_MC_ENDIF();
4076 IEM_MC_END();
4077 }
4078 else
4079 {
4080 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4082
4083 IEM_MC_BEGIN(0, 0);
4084 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4085 IEM_MC_ADVANCE_RIP();
4086 } IEM_MC_ELSE() {
4087 IEM_MC_REL_JMP_S32(i32Imm);
4088 } IEM_MC_ENDIF();
4089 IEM_MC_END();
4090 }
4091 return VINF_SUCCESS;
4092}
4093
4094
4095/** Opcode 0x0f 0x88. */
4096FNIEMOP_DEF(iemOp_js_Jv)
4097{
4098 IEMOP_MNEMONIC(js_Jv, "js Jv");
4099 IEMOP_HLP_MIN_386();
4100 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4101 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4102 {
4103 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4105
4106 IEM_MC_BEGIN(0, 0);
4107 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4108 IEM_MC_REL_JMP_S16(i16Imm);
4109 } IEM_MC_ELSE() {
4110 IEM_MC_ADVANCE_RIP();
4111 } IEM_MC_ENDIF();
4112 IEM_MC_END();
4113 }
4114 else
4115 {
4116 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4118
4119 IEM_MC_BEGIN(0, 0);
4120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4121 IEM_MC_REL_JMP_S32(i32Imm);
4122 } IEM_MC_ELSE() {
4123 IEM_MC_ADVANCE_RIP();
4124 } IEM_MC_ENDIF();
4125 IEM_MC_END();
4126 }
4127 return VINF_SUCCESS;
4128}
4129
4130
4131/** Opcode 0x0f 0x89. */
4132FNIEMOP_DEF(iemOp_jns_Jv)
4133{
4134 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4135 IEMOP_HLP_MIN_386();
4136 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4137 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4138 {
4139 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4141
4142 IEM_MC_BEGIN(0, 0);
4143 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4144 IEM_MC_ADVANCE_RIP();
4145 } IEM_MC_ELSE() {
4146 IEM_MC_REL_JMP_S16(i16Imm);
4147 } IEM_MC_ENDIF();
4148 IEM_MC_END();
4149 }
4150 else
4151 {
4152 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154
4155 IEM_MC_BEGIN(0, 0);
4156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4157 IEM_MC_ADVANCE_RIP();
4158 } IEM_MC_ELSE() {
4159 IEM_MC_REL_JMP_S32(i32Imm);
4160 } IEM_MC_ENDIF();
4161 IEM_MC_END();
4162 }
4163 return VINF_SUCCESS;
4164}
4165
4166
4167/** Opcode 0x0f 0x8a. */
4168FNIEMOP_DEF(iemOp_jp_Jv)
4169{
4170 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4171 IEMOP_HLP_MIN_386();
4172 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4173 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4174 {
4175 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4177
4178 IEM_MC_BEGIN(0, 0);
4179 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4180 IEM_MC_REL_JMP_S16(i16Imm);
4181 } IEM_MC_ELSE() {
4182 IEM_MC_ADVANCE_RIP();
4183 } IEM_MC_ENDIF();
4184 IEM_MC_END();
4185 }
4186 else
4187 {
4188 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4190
4191 IEM_MC_BEGIN(0, 0);
4192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4193 IEM_MC_REL_JMP_S32(i32Imm);
4194 } IEM_MC_ELSE() {
4195 IEM_MC_ADVANCE_RIP();
4196 } IEM_MC_ENDIF();
4197 IEM_MC_END();
4198 }
4199 return VINF_SUCCESS;
4200}
4201
4202
4203/** Opcode 0x0f 0x8b. */
4204FNIEMOP_DEF(iemOp_jnp_Jv)
4205{
4206 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4207 IEMOP_HLP_MIN_386();
4208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4209 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4210 {
4211 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4213
4214 IEM_MC_BEGIN(0, 0);
4215 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4216 IEM_MC_ADVANCE_RIP();
4217 } IEM_MC_ELSE() {
4218 IEM_MC_REL_JMP_S16(i16Imm);
4219 } IEM_MC_ENDIF();
4220 IEM_MC_END();
4221 }
4222 else
4223 {
4224 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4226
4227 IEM_MC_BEGIN(0, 0);
4228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4229 IEM_MC_ADVANCE_RIP();
4230 } IEM_MC_ELSE() {
4231 IEM_MC_REL_JMP_S32(i32Imm);
4232 } IEM_MC_ENDIF();
4233 IEM_MC_END();
4234 }
4235 return VINF_SUCCESS;
4236}
4237
4238
4239/** Opcode 0x0f 0x8c. */
4240FNIEMOP_DEF(iemOp_jl_Jv)
4241{
4242 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4243 IEMOP_HLP_MIN_386();
4244 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4245 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4246 {
4247 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4249
4250 IEM_MC_BEGIN(0, 0);
4251 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4252 IEM_MC_REL_JMP_S16(i16Imm);
4253 } IEM_MC_ELSE() {
4254 IEM_MC_ADVANCE_RIP();
4255 } IEM_MC_ENDIF();
4256 IEM_MC_END();
4257 }
4258 else
4259 {
4260 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4262
4263 IEM_MC_BEGIN(0, 0);
4264 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4265 IEM_MC_REL_JMP_S32(i32Imm);
4266 } IEM_MC_ELSE() {
4267 IEM_MC_ADVANCE_RIP();
4268 } IEM_MC_ENDIF();
4269 IEM_MC_END();
4270 }
4271 return VINF_SUCCESS;
4272}
4273
4274
4275/** Opcode 0x0f 0x8d. */
4276FNIEMOP_DEF(iemOp_jnl_Jv)
4277{
4278 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4279 IEMOP_HLP_MIN_386();
4280 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4281 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4282 {
4283 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4285
4286 IEM_MC_BEGIN(0, 0);
4287 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4288 IEM_MC_ADVANCE_RIP();
4289 } IEM_MC_ELSE() {
4290 IEM_MC_REL_JMP_S16(i16Imm);
4291 } IEM_MC_ENDIF();
4292 IEM_MC_END();
4293 }
4294 else
4295 {
4296 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4298
4299 IEM_MC_BEGIN(0, 0);
4300 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4301 IEM_MC_ADVANCE_RIP();
4302 } IEM_MC_ELSE() {
4303 IEM_MC_REL_JMP_S32(i32Imm);
4304 } IEM_MC_ENDIF();
4305 IEM_MC_END();
4306 }
4307 return VINF_SUCCESS;
4308}
4309
4310
4311/** Opcode 0x0f 0x8e. */
4312FNIEMOP_DEF(iemOp_jle_Jv)
4313{
4314 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4315 IEMOP_HLP_MIN_386();
4316 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4317 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4318 {
4319 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4321
4322 IEM_MC_BEGIN(0, 0);
4323 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4324 IEM_MC_REL_JMP_S16(i16Imm);
4325 } IEM_MC_ELSE() {
4326 IEM_MC_ADVANCE_RIP();
4327 } IEM_MC_ENDIF();
4328 IEM_MC_END();
4329 }
4330 else
4331 {
4332 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4334
4335 IEM_MC_BEGIN(0, 0);
4336 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4337 IEM_MC_REL_JMP_S32(i32Imm);
4338 } IEM_MC_ELSE() {
4339 IEM_MC_ADVANCE_RIP();
4340 } IEM_MC_ENDIF();
4341 IEM_MC_END();
4342 }
4343 return VINF_SUCCESS;
4344}
4345
4346
4347/** Opcode 0x0f 0x8f. */
4348FNIEMOP_DEF(iemOp_jnle_Jv)
4349{
4350 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4351 IEMOP_HLP_MIN_386();
4352 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4353 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4354 {
4355 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4357
4358 IEM_MC_BEGIN(0, 0);
4359 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4360 IEM_MC_ADVANCE_RIP();
4361 } IEM_MC_ELSE() {
4362 IEM_MC_REL_JMP_S16(i16Imm);
4363 } IEM_MC_ENDIF();
4364 IEM_MC_END();
4365 }
4366 else
4367 {
4368 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4370
4371 IEM_MC_BEGIN(0, 0);
4372 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4373 IEM_MC_ADVANCE_RIP();
4374 } IEM_MC_ELSE() {
4375 IEM_MC_REL_JMP_S32(i32Imm);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_END();
4378 }
4379 return VINF_SUCCESS;
4380}
4381
4382
4383/** Opcode 0x0f 0x90. */
4384FNIEMOP_DEF(iemOp_seto_Eb)
4385{
4386 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4387 IEMOP_HLP_MIN_386();
4388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4389
4390 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4391 * any way. AMD says it's "unused", whatever that means. We're
4392 * ignoring for now. */
4393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4394 {
4395 /* register target */
4396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4397 IEM_MC_BEGIN(0, 0);
4398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4399 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4400 } IEM_MC_ELSE() {
4401 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4402 } IEM_MC_ENDIF();
4403 IEM_MC_ADVANCE_RIP();
4404 IEM_MC_END();
4405 }
4406 else
4407 {
4408 /* memory target */
4409 IEM_MC_BEGIN(0, 1);
4410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4413 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4414 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4415 } IEM_MC_ELSE() {
4416 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4417 } IEM_MC_ENDIF();
4418 IEM_MC_ADVANCE_RIP();
4419 IEM_MC_END();
4420 }
4421 return VINF_SUCCESS;
4422}
4423
4424
4425/** Opcode 0x0f 0x91. */
4426FNIEMOP_DEF(iemOp_setno_Eb)
4427{
4428 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4429 IEMOP_HLP_MIN_386();
4430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4431
4432 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4433 * any way. AMD says it's "unused", whatever that means. We're
4434 * ignoring for now. */
4435 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4436 {
4437 /* register target */
4438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4439 IEM_MC_BEGIN(0, 0);
4440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4441 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4442 } IEM_MC_ELSE() {
4443 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4444 } IEM_MC_ENDIF();
4445 IEM_MC_ADVANCE_RIP();
4446 IEM_MC_END();
4447 }
4448 else
4449 {
4450 /* memory target */
4451 IEM_MC_BEGIN(0, 1);
4452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4456 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4457 } IEM_MC_ELSE() {
4458 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4459 } IEM_MC_ENDIF();
4460 IEM_MC_ADVANCE_RIP();
4461 IEM_MC_END();
4462 }
4463 return VINF_SUCCESS;
4464}
4465
4466
4467/** Opcode 0x0f 0x92. */
4468FNIEMOP_DEF(iemOp_setc_Eb)
4469{
4470 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4471 IEMOP_HLP_MIN_386();
4472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4473
4474 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4475 * any way. AMD says it's "unused", whatever that means. We're
4476 * ignoring for now. */
4477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4478 {
4479 /* register target */
4480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4481 IEM_MC_BEGIN(0, 0);
4482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4483 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4484 } IEM_MC_ELSE() {
4485 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4486 } IEM_MC_ENDIF();
4487 IEM_MC_ADVANCE_RIP();
4488 IEM_MC_END();
4489 }
4490 else
4491 {
4492 /* memory target */
4493 IEM_MC_BEGIN(0, 1);
4494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4497 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4498 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4499 } IEM_MC_ELSE() {
4500 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4501 } IEM_MC_ENDIF();
4502 IEM_MC_ADVANCE_RIP();
4503 IEM_MC_END();
4504 }
4505 return VINF_SUCCESS;
4506}
4507
4508
4509/** Opcode 0x0f 0x93. */
4510FNIEMOP_DEF(iemOp_setnc_Eb)
4511{
4512 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4513 IEMOP_HLP_MIN_386();
4514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4515
4516 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4517 * any way. AMD says it's "unused", whatever that means. We're
4518 * ignoring for now. */
4519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4520 {
4521 /* register target */
4522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4523 IEM_MC_BEGIN(0, 0);
4524 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4525 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4526 } IEM_MC_ELSE() {
4527 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4528 } IEM_MC_ENDIF();
4529 IEM_MC_ADVANCE_RIP();
4530 IEM_MC_END();
4531 }
4532 else
4533 {
4534 /* memory target */
4535 IEM_MC_BEGIN(0, 1);
4536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4540 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4541 } IEM_MC_ELSE() {
4542 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4543 } IEM_MC_ENDIF();
4544 IEM_MC_ADVANCE_RIP();
4545 IEM_MC_END();
4546 }
4547 return VINF_SUCCESS;
4548}
4549
4550
4551/** Opcode 0x0f 0x94. */
4552FNIEMOP_DEF(iemOp_sete_Eb)
4553{
4554 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4555 IEMOP_HLP_MIN_386();
4556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4557
4558 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4559 * any way. AMD says it's "unused", whatever that means. We're
4560 * ignoring for now. */
4561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4562 {
4563 /* register target */
4564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4565 IEM_MC_BEGIN(0, 0);
4566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4567 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4568 } IEM_MC_ELSE() {
4569 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4570 } IEM_MC_ENDIF();
4571 IEM_MC_ADVANCE_RIP();
4572 IEM_MC_END();
4573 }
4574 else
4575 {
4576 /* memory target */
4577 IEM_MC_BEGIN(0, 1);
4578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4581 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4582 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4583 } IEM_MC_ELSE() {
4584 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4585 } IEM_MC_ENDIF();
4586 IEM_MC_ADVANCE_RIP();
4587 IEM_MC_END();
4588 }
4589 return VINF_SUCCESS;
4590}
4591
4592
4593/** Opcode 0x0f 0x95. */
4594FNIEMOP_DEF(iemOp_setne_Eb)
4595{
4596 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4597 IEMOP_HLP_MIN_386();
4598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4599
4600 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4601 * any way. AMD says it's "unused", whatever that means. We're
4602 * ignoring for now. */
4603 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4604 {
4605 /* register target */
4606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4607 IEM_MC_BEGIN(0, 0);
4608 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4609 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4610 } IEM_MC_ELSE() {
4611 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4612 } IEM_MC_ENDIF();
4613 IEM_MC_ADVANCE_RIP();
4614 IEM_MC_END();
4615 }
4616 else
4617 {
4618 /* memory target */
4619 IEM_MC_BEGIN(0, 1);
4620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4624 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4625 } IEM_MC_ELSE() {
4626 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4627 } IEM_MC_ENDIF();
4628 IEM_MC_ADVANCE_RIP();
4629 IEM_MC_END();
4630 }
4631 return VINF_SUCCESS;
4632}
4633
4634
4635/** Opcode 0x0f 0x96. */
4636FNIEMOP_DEF(iemOp_setbe_Eb)
4637{
4638 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4639 IEMOP_HLP_MIN_386();
4640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4641
4642 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4643 * any way. AMD says it's "unused", whatever that means. We're
4644 * ignoring for now. */
4645 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4646 {
4647 /* register target */
4648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4649 IEM_MC_BEGIN(0, 0);
4650 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4651 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4652 } IEM_MC_ELSE() {
4653 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4654 } IEM_MC_ENDIF();
4655 IEM_MC_ADVANCE_RIP();
4656 IEM_MC_END();
4657 }
4658 else
4659 {
4660 /* memory target */
4661 IEM_MC_BEGIN(0, 1);
4662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4665 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4666 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4667 } IEM_MC_ELSE() {
4668 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4669 } IEM_MC_ENDIF();
4670 IEM_MC_ADVANCE_RIP();
4671 IEM_MC_END();
4672 }
4673 return VINF_SUCCESS;
4674}
4675
4676
4677/** Opcode 0x0f 0x97. */
4678FNIEMOP_DEF(iemOp_setnbe_Eb)
4679{
4680 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4681 IEMOP_HLP_MIN_386();
4682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4683
4684 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4685 * any way. AMD says it's "unused", whatever that means. We're
4686 * ignoring for now. */
4687 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4688 {
4689 /* register target */
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4691 IEM_MC_BEGIN(0, 0);
4692 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4693 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4694 } IEM_MC_ELSE() {
4695 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4696 } IEM_MC_ENDIF();
4697 IEM_MC_ADVANCE_RIP();
4698 IEM_MC_END();
4699 }
4700 else
4701 {
4702 /* memory target */
4703 IEM_MC_BEGIN(0, 1);
4704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4707 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4708 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4709 } IEM_MC_ELSE() {
4710 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4711 } IEM_MC_ENDIF();
4712 IEM_MC_ADVANCE_RIP();
4713 IEM_MC_END();
4714 }
4715 return VINF_SUCCESS;
4716}
4717
4718
4719/** Opcode 0x0f 0x98. */
4720FNIEMOP_DEF(iemOp_sets_Eb)
4721{
4722 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4723 IEMOP_HLP_MIN_386();
4724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4725
4726 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4727 * any way. AMD says it's "unused", whatever that means. We're
4728 * ignoring for now. */
4729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4730 {
4731 /* register target */
4732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4733 IEM_MC_BEGIN(0, 0);
4734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4735 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4736 } IEM_MC_ELSE() {
4737 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4738 } IEM_MC_ENDIF();
4739 IEM_MC_ADVANCE_RIP();
4740 IEM_MC_END();
4741 }
4742 else
4743 {
4744 /* memory target */
4745 IEM_MC_BEGIN(0, 1);
4746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4749 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4750 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4751 } IEM_MC_ELSE() {
4752 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4753 } IEM_MC_ENDIF();
4754 IEM_MC_ADVANCE_RIP();
4755 IEM_MC_END();
4756 }
4757 return VINF_SUCCESS;
4758}
4759
4760
4761/** Opcode 0x0f 0x99. */
4762FNIEMOP_DEF(iemOp_setns_Eb)
4763{
4764 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4765 IEMOP_HLP_MIN_386();
4766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4767
4768 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4769 * any way. AMD says it's "unused", whatever that means. We're
4770 * ignoring for now. */
4771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4772 {
4773 /* register target */
4774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4775 IEM_MC_BEGIN(0, 0);
4776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4777 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4778 } IEM_MC_ELSE() {
4779 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4780 } IEM_MC_ENDIF();
4781 IEM_MC_ADVANCE_RIP();
4782 IEM_MC_END();
4783 }
4784 else
4785 {
4786 /* memory target */
4787 IEM_MC_BEGIN(0, 1);
4788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4791 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4792 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4793 } IEM_MC_ELSE() {
4794 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4795 } IEM_MC_ENDIF();
4796 IEM_MC_ADVANCE_RIP();
4797 IEM_MC_END();
4798 }
4799 return VINF_SUCCESS;
4800}
4801
4802
4803/** Opcode 0x0f 0x9a. */
4804FNIEMOP_DEF(iemOp_setp_Eb)
4805{
4806 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4807 IEMOP_HLP_MIN_386();
4808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4809
4810 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4811 * any way. AMD says it's "unused", whatever that means. We're
4812 * ignoring for now. */
4813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4814 {
4815 /* register target */
4816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4817 IEM_MC_BEGIN(0, 0);
4818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4819 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4820 } IEM_MC_ELSE() {
4821 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4822 } IEM_MC_ENDIF();
4823 IEM_MC_ADVANCE_RIP();
4824 IEM_MC_END();
4825 }
4826 else
4827 {
4828 /* memory target */
4829 IEM_MC_BEGIN(0, 1);
4830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4833 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4834 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4835 } IEM_MC_ELSE() {
4836 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4837 } IEM_MC_ENDIF();
4838 IEM_MC_ADVANCE_RIP();
4839 IEM_MC_END();
4840 }
4841 return VINF_SUCCESS;
4842}
4843
4844
4845/** Opcode 0x0f 0x9b. */
4846FNIEMOP_DEF(iemOp_setnp_Eb)
4847{
4848 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4849 IEMOP_HLP_MIN_386();
4850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4851
4852 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4853 * any way. AMD says it's "unused", whatever that means. We're
4854 * ignoring for now. */
4855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4856 {
4857 /* register target */
4858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4859 IEM_MC_BEGIN(0, 0);
4860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4861 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4862 } IEM_MC_ELSE() {
4863 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4864 } IEM_MC_ENDIF();
4865 IEM_MC_ADVANCE_RIP();
4866 IEM_MC_END();
4867 }
4868 else
4869 {
4870 /* memory target */
4871 IEM_MC_BEGIN(0, 1);
4872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4875 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4876 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4877 } IEM_MC_ELSE() {
4878 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4879 } IEM_MC_ENDIF();
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 }
4883 return VINF_SUCCESS;
4884}
4885
4886
4887/** Opcode 0x0f 0x9c. */
4888FNIEMOP_DEF(iemOp_setl_Eb)
4889{
4890 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4891 IEMOP_HLP_MIN_386();
4892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4893
4894 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4895 * any way. AMD says it's "unused", whatever that means. We're
4896 * ignoring for now. */
4897 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4898 {
4899 /* register target */
4900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4901 IEM_MC_BEGIN(0, 0);
4902 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4903 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4904 } IEM_MC_ELSE() {
4905 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4906 } IEM_MC_ENDIF();
4907 IEM_MC_ADVANCE_RIP();
4908 IEM_MC_END();
4909 }
4910 else
4911 {
4912 /* memory target */
4913 IEM_MC_BEGIN(0, 1);
4914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4917 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4918 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4919 } IEM_MC_ELSE() {
4920 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4921 } IEM_MC_ENDIF();
4922 IEM_MC_ADVANCE_RIP();
4923 IEM_MC_END();
4924 }
4925 return VINF_SUCCESS;
4926}
4927
4928
4929/** Opcode 0x0f 0x9d. */
4930FNIEMOP_DEF(iemOp_setnl_Eb)
4931{
4932 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4933 IEMOP_HLP_MIN_386();
4934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4935
4936 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4937 * any way. AMD says it's "unused", whatever that means. We're
4938 * ignoring for now. */
4939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4940 {
4941 /* register target */
4942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4943 IEM_MC_BEGIN(0, 0);
4944 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4945 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4946 } IEM_MC_ELSE() {
4947 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4948 } IEM_MC_ENDIF();
4949 IEM_MC_ADVANCE_RIP();
4950 IEM_MC_END();
4951 }
4952 else
4953 {
4954 /* memory target */
4955 IEM_MC_BEGIN(0, 1);
4956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4959 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4960 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4961 } IEM_MC_ELSE() {
4962 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4963 } IEM_MC_ENDIF();
4964 IEM_MC_ADVANCE_RIP();
4965 IEM_MC_END();
4966 }
4967 return VINF_SUCCESS;
4968}
4969
4970
4971/** Opcode 0x0f 0x9e. */
4972FNIEMOP_DEF(iemOp_setle_Eb)
4973{
4974 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4975 IEMOP_HLP_MIN_386();
4976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4977
4978 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4979 * any way. AMD says it's "unused", whatever that means. We're
4980 * ignoring for now. */
4981 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4982 {
4983 /* register target */
4984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4985 IEM_MC_BEGIN(0, 0);
4986 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4987 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4988 } IEM_MC_ELSE() {
4989 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4990 } IEM_MC_ENDIF();
4991 IEM_MC_ADVANCE_RIP();
4992 IEM_MC_END();
4993 }
4994 else
4995 {
4996 /* memory target */
4997 IEM_MC_BEGIN(0, 1);
4998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5001 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5002 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5003 } IEM_MC_ELSE() {
5004 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5005 } IEM_MC_ENDIF();
5006 IEM_MC_ADVANCE_RIP();
5007 IEM_MC_END();
5008 }
5009 return VINF_SUCCESS;
5010}
5011
5012
5013/** Opcode 0x0f 0x9f. */
5014FNIEMOP_DEF(iemOp_setnle_Eb)
5015{
5016 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5017 IEMOP_HLP_MIN_386();
5018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5019
5020 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5021 * any way. AMD says it's "unused", whatever that means. We're
5022 * ignoring for now. */
5023 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5024 {
5025 /* register target */
5026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5027 IEM_MC_BEGIN(0, 0);
5028 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5029 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5030 } IEM_MC_ELSE() {
5031 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5032 } IEM_MC_ENDIF();
5033 IEM_MC_ADVANCE_RIP();
5034 IEM_MC_END();
5035 }
5036 else
5037 {
5038 /* memory target */
5039 IEM_MC_BEGIN(0, 1);
5040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5043 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5044 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5045 } IEM_MC_ELSE() {
5046 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5047 } IEM_MC_ENDIF();
5048 IEM_MC_ADVANCE_RIP();
5049 IEM_MC_END();
5050 }
5051 return VINF_SUCCESS;
5052}
5053
5054
5055/**
5056 * Common 'push segment-register' helper.
5057 */
5058FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5059{
5060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5061 if (iReg < X86_SREG_FS)
5062 IEMOP_HLP_NO_64BIT();
5063 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5064
5065 switch (pVCpu->iem.s.enmEffOpSize)
5066 {
5067 case IEMMODE_16BIT:
5068 IEM_MC_BEGIN(0, 1);
5069 IEM_MC_LOCAL(uint16_t, u16Value);
5070 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5071 IEM_MC_PUSH_U16(u16Value);
5072 IEM_MC_ADVANCE_RIP();
5073 IEM_MC_END();
5074 break;
5075
5076 case IEMMODE_32BIT:
5077 IEM_MC_BEGIN(0, 1);
5078 IEM_MC_LOCAL(uint32_t, u32Value);
5079 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5080 IEM_MC_PUSH_U32_SREG(u32Value);
5081 IEM_MC_ADVANCE_RIP();
5082 IEM_MC_END();
5083 break;
5084
5085 case IEMMODE_64BIT:
5086 IEM_MC_BEGIN(0, 1);
5087 IEM_MC_LOCAL(uint64_t, u64Value);
5088 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5089 IEM_MC_PUSH_U64(u64Value);
5090 IEM_MC_ADVANCE_RIP();
5091 IEM_MC_END();
5092 break;
5093 }
5094
5095 return VINF_SUCCESS;
5096}
5097
5098
5099/** Opcode 0x0f 0xa0. */
5100FNIEMOP_DEF(iemOp_push_fs)
5101{
5102 IEMOP_MNEMONIC(push_fs, "push fs");
5103 IEMOP_HLP_MIN_386();
5104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5105 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5106}
5107
5108
5109/** Opcode 0x0f 0xa1. */
5110FNIEMOP_DEF(iemOp_pop_fs)
5111{
5112 IEMOP_MNEMONIC(pop_fs, "pop fs");
5113 IEMOP_HLP_MIN_386();
5114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5115 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5116}
5117
5118
5119/** Opcode 0x0f 0xa2. */
5120FNIEMOP_DEF(iemOp_cpuid)
5121{
5122 IEMOP_MNEMONIC(cpuid, "cpuid");
5123 IEMOP_HLP_MIN_486(); /* not all 486es. */
5124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5125 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5126}
5127
5128
5129/**
5130 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5131 * iemOp_bts_Ev_Gv.
5132 */
5133FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5134{
5135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5136 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5137
5138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5139 {
5140 /* register destination. */
5141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5142 switch (pVCpu->iem.s.enmEffOpSize)
5143 {
5144 case IEMMODE_16BIT:
5145 IEM_MC_BEGIN(3, 0);
5146 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5147 IEM_MC_ARG(uint16_t, u16Src, 1);
5148 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5149
5150 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5151 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5152 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5153 IEM_MC_REF_EFLAGS(pEFlags);
5154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5155
5156 IEM_MC_ADVANCE_RIP();
5157 IEM_MC_END();
5158 return VINF_SUCCESS;
5159
5160 case IEMMODE_32BIT:
5161 IEM_MC_BEGIN(3, 0);
5162 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5163 IEM_MC_ARG(uint32_t, u32Src, 1);
5164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5165
5166 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5167 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5168 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5169 IEM_MC_REF_EFLAGS(pEFlags);
5170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5171
5172 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5173 IEM_MC_ADVANCE_RIP();
5174 IEM_MC_END();
5175 return VINF_SUCCESS;
5176
5177 case IEMMODE_64BIT:
5178 IEM_MC_BEGIN(3, 0);
5179 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5180 IEM_MC_ARG(uint64_t, u64Src, 1);
5181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5182
5183 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5184 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5185 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5186 IEM_MC_REF_EFLAGS(pEFlags);
5187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5188
5189 IEM_MC_ADVANCE_RIP();
5190 IEM_MC_END();
5191 return VINF_SUCCESS;
5192
5193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5194 }
5195 }
5196 else
5197 {
5198 /* memory destination. */
5199
5200 uint32_t fAccess;
5201 if (pImpl->pfnLockedU16)
5202 fAccess = IEM_ACCESS_DATA_RW;
5203 else /* BT */
5204 fAccess = IEM_ACCESS_DATA_R;
5205
5206 /** @todo test negative bit offsets! */
5207 switch (pVCpu->iem.s.enmEffOpSize)
5208 {
5209 case IEMMODE_16BIT:
5210 IEM_MC_BEGIN(3, 2);
5211 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5212 IEM_MC_ARG(uint16_t, u16Src, 1);
5213 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5215 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5216
5217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5218 if (pImpl->pfnLockedU16)
5219 IEMOP_HLP_DONE_DECODING();
5220 else
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5223 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5224 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5225 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5226 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
5227 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5228 IEM_MC_FETCH_EFLAGS(EFlags);
5229
5230 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5231 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5232 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5233 else
5234 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5235 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5236
5237 IEM_MC_COMMIT_EFLAGS(EFlags);
5238 IEM_MC_ADVANCE_RIP();
5239 IEM_MC_END();
5240 return VINF_SUCCESS;
5241
5242 case IEMMODE_32BIT:
5243 IEM_MC_BEGIN(3, 2);
5244 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5245 IEM_MC_ARG(uint32_t, u32Src, 1);
5246 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5248 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5249
5250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5251 if (pImpl->pfnLockedU16)
5252 IEMOP_HLP_DONE_DECODING();
5253 else
5254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5255 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5256 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5257 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5258 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5259 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5260 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5261 IEM_MC_FETCH_EFLAGS(EFlags);
5262
5263 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5264 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5266 else
5267 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5268 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5269
5270 IEM_MC_COMMIT_EFLAGS(EFlags);
5271 IEM_MC_ADVANCE_RIP();
5272 IEM_MC_END();
5273 return VINF_SUCCESS;
5274
5275 case IEMMODE_64BIT:
5276 IEM_MC_BEGIN(3, 2);
5277 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5278 IEM_MC_ARG(uint64_t, u64Src, 1);
5279 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5281 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5282
5283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5284 if (pImpl->pfnLockedU16)
5285 IEMOP_HLP_DONE_DECODING();
5286 else
5287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5288 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5289 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5290 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5291 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5292 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5293 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5294 IEM_MC_FETCH_EFLAGS(EFlags);
5295
5296 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5297 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5298 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5299 else
5300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5301 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5302
5303 IEM_MC_COMMIT_EFLAGS(EFlags);
5304 IEM_MC_ADVANCE_RIP();
5305 IEM_MC_END();
5306 return VINF_SUCCESS;
5307
5308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5309 }
5310 }
5311}
5312
5313
5314/** Opcode 0x0f 0xa3. */
5315FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5316{
5317 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5318 IEMOP_HLP_MIN_386();
5319 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5320}
5321
5322
5323/**
5324 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5325 */
5326FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5327{
5328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5330
5331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5332 {
5333 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5335
5336 switch (pVCpu->iem.s.enmEffOpSize)
5337 {
5338 case IEMMODE_16BIT:
5339 IEM_MC_BEGIN(4, 0);
5340 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5341 IEM_MC_ARG(uint16_t, u16Src, 1);
5342 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5343 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5344
5345 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5346 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5347 IEM_MC_REF_EFLAGS(pEFlags);
5348 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5349
5350 IEM_MC_ADVANCE_RIP();
5351 IEM_MC_END();
5352 return VINF_SUCCESS;
5353
5354 case IEMMODE_32BIT:
5355 IEM_MC_BEGIN(4, 0);
5356 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5357 IEM_MC_ARG(uint32_t, u32Src, 1);
5358 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5359 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5360
5361 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5362 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5363 IEM_MC_REF_EFLAGS(pEFlags);
5364 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5365
5366 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5367 IEM_MC_ADVANCE_RIP();
5368 IEM_MC_END();
5369 return VINF_SUCCESS;
5370
5371 case IEMMODE_64BIT:
5372 IEM_MC_BEGIN(4, 0);
5373 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5374 IEM_MC_ARG(uint64_t, u64Src, 1);
5375 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5376 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5377
5378 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5379 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5380 IEM_MC_REF_EFLAGS(pEFlags);
5381 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5382
5383 IEM_MC_ADVANCE_RIP();
5384 IEM_MC_END();
5385 return VINF_SUCCESS;
5386
5387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5388 }
5389 }
5390 else
5391 {
5392 switch (pVCpu->iem.s.enmEffOpSize)
5393 {
5394 case IEMMODE_16BIT:
5395 IEM_MC_BEGIN(4, 2);
5396 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5397 IEM_MC_ARG(uint16_t, u16Src, 1);
5398 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5399 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5401
5402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5403 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5404 IEM_MC_ASSIGN(cShiftArg, cShift);
5405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5406 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5407 IEM_MC_FETCH_EFLAGS(EFlags);
5408 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5409 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5410
5411 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5412 IEM_MC_COMMIT_EFLAGS(EFlags);
5413 IEM_MC_ADVANCE_RIP();
5414 IEM_MC_END();
5415 return VINF_SUCCESS;
5416
5417 case IEMMODE_32BIT:
5418 IEM_MC_BEGIN(4, 2);
5419 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5420 IEM_MC_ARG(uint32_t, u32Src, 1);
5421 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5422 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5424
5425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5426 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5427 IEM_MC_ASSIGN(cShiftArg, cShift);
5428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5429 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5430 IEM_MC_FETCH_EFLAGS(EFlags);
5431 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5432 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5433
5434 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5435 IEM_MC_COMMIT_EFLAGS(EFlags);
5436 IEM_MC_ADVANCE_RIP();
5437 IEM_MC_END();
5438 return VINF_SUCCESS;
5439
5440 case IEMMODE_64BIT:
5441 IEM_MC_BEGIN(4, 2);
5442 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5443 IEM_MC_ARG(uint64_t, u64Src, 1);
5444 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5445 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5447
5448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5449 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5450 IEM_MC_ASSIGN(cShiftArg, cShift);
5451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5452 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5453 IEM_MC_FETCH_EFLAGS(EFlags);
5454 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5455 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5456
5457 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5458 IEM_MC_COMMIT_EFLAGS(EFlags);
5459 IEM_MC_ADVANCE_RIP();
5460 IEM_MC_END();
5461 return VINF_SUCCESS;
5462
5463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5464 }
5465 }
5466}
5467
5468
5469/**
5470 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5471 */
5472FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5473{
5474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5475 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5476
5477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5478 {
5479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5480
5481 switch (pVCpu->iem.s.enmEffOpSize)
5482 {
5483 case IEMMODE_16BIT:
5484 IEM_MC_BEGIN(4, 0);
5485 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5486 IEM_MC_ARG(uint16_t, u16Src, 1);
5487 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5488 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5489
5490 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5491 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5492 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5493 IEM_MC_REF_EFLAGS(pEFlags);
5494 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5495
5496 IEM_MC_ADVANCE_RIP();
5497 IEM_MC_END();
5498 return VINF_SUCCESS;
5499
5500 case IEMMODE_32BIT:
5501 IEM_MC_BEGIN(4, 0);
5502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5503 IEM_MC_ARG(uint32_t, u32Src, 1);
5504 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5505 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5506
5507 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5508 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5509 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5510 IEM_MC_REF_EFLAGS(pEFlags);
5511 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5512
5513 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 return VINF_SUCCESS;
5517
5518 case IEMMODE_64BIT:
5519 IEM_MC_BEGIN(4, 0);
5520 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5521 IEM_MC_ARG(uint64_t, u64Src, 1);
5522 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5523 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5524
5525 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5526 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5527 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5528 IEM_MC_REF_EFLAGS(pEFlags);
5529 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5530
5531 IEM_MC_ADVANCE_RIP();
5532 IEM_MC_END();
5533 return VINF_SUCCESS;
5534
5535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5536 }
5537 }
5538 else
5539 {
5540 switch (pVCpu->iem.s.enmEffOpSize)
5541 {
5542 case IEMMODE_16BIT:
5543 IEM_MC_BEGIN(4, 2);
5544 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5545 IEM_MC_ARG(uint16_t, u16Src, 1);
5546 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5547 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5549
5550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5552 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5553 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5554 IEM_MC_FETCH_EFLAGS(EFlags);
5555 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5556 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5557
5558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5559 IEM_MC_COMMIT_EFLAGS(EFlags);
5560 IEM_MC_ADVANCE_RIP();
5561 IEM_MC_END();
5562 return VINF_SUCCESS;
5563
5564 case IEMMODE_32BIT:
5565 IEM_MC_BEGIN(4, 2);
5566 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5567 IEM_MC_ARG(uint32_t, u32Src, 1);
5568 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5571
5572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5574 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5575 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5576 IEM_MC_FETCH_EFLAGS(EFlags);
5577 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5578 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5579
5580 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5581 IEM_MC_COMMIT_EFLAGS(EFlags);
5582 IEM_MC_ADVANCE_RIP();
5583 IEM_MC_END();
5584 return VINF_SUCCESS;
5585
5586 case IEMMODE_64BIT:
5587 IEM_MC_BEGIN(4, 2);
5588 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5589 IEM_MC_ARG(uint64_t, u64Src, 1);
5590 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5591 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5593
5594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5596 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5597 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5598 IEM_MC_FETCH_EFLAGS(EFlags);
5599 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5600 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5601
5602 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5603 IEM_MC_COMMIT_EFLAGS(EFlags);
5604 IEM_MC_ADVANCE_RIP();
5605 IEM_MC_END();
5606 return VINF_SUCCESS;
5607
5608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5609 }
5610 }
5611}
5612
5613
5614
5615/** Opcode 0x0f 0xa4. */
5616FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5617{
5618 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5619 IEMOP_HLP_MIN_386();
5620 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5621}
5622
5623
5624/** Opcode 0x0f 0xa5. */
5625FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5626{
5627 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5628 IEMOP_HLP_MIN_386();
5629 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5630}
5631
5632
5633/** Opcode 0x0f 0xa8. */
5634FNIEMOP_DEF(iemOp_push_gs)
5635{
5636 IEMOP_MNEMONIC(push_gs, "push gs");
5637 IEMOP_HLP_MIN_386();
5638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5639 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5640}
5641
5642
5643/** Opcode 0x0f 0xa9. */
5644FNIEMOP_DEF(iemOp_pop_gs)
5645{
5646 IEMOP_MNEMONIC(pop_gs, "pop gs");
5647 IEMOP_HLP_MIN_386();
5648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5649 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5650}
5651
5652
5653/** Opcode 0x0f 0xaa. */
5654FNIEMOP_STUB(iemOp_rsm);
5655//IEMOP_HLP_MIN_386();
5656
5657
5658/** Opcode 0x0f 0xab. */
5659FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5660{
5661 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5662 IEMOP_HLP_MIN_386();
5663 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5664}
5665
5666
5667/** Opcode 0x0f 0xac. */
5668FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5669{
5670 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5671 IEMOP_HLP_MIN_386();
5672 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5673}
5674
5675
5676/** Opcode 0x0f 0xad. */
5677FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5678{
5679 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5680 IEMOP_HLP_MIN_386();
5681 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5682}
5683
5684
5685/** Opcode 0x0f 0xae mem/0. */
5686FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5687{
5688 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5689 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5690 return IEMOP_RAISE_INVALID_OPCODE();
5691
5692 IEM_MC_BEGIN(3, 1);
5693 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5694 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5695 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5699 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5700 IEM_MC_END();
5701 return VINF_SUCCESS;
5702}
5703
5704
5705/** Opcode 0x0f 0xae mem/1. */
5706FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5707{
5708 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5709 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5710 return IEMOP_RAISE_INVALID_OPCODE();
5711
5712 IEM_MC_BEGIN(3, 1);
5713 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5714 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5715 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5719 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5720 IEM_MC_END();
5721 return VINF_SUCCESS;
5722}
5723
5724
5725/** Opcode 0x0f 0xae mem/2. */
5726FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5727
5728/** Opcode 0x0f 0xae mem/3. */
5729FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5730
5731/** Opcode 0x0f 0xae mem/4. */
5732FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5733
5734/** Opcode 0x0f 0xae mem/5. */
5735FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5736
5737/** Opcode 0x0f 0xae mem/6. */
5738FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5739
5740/** Opcode 0x0f 0xae mem/7. */
5741FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5742
5743
5744/** Opcode 0x0f 0xae 11b/5. */
5745FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5746{
5747 RT_NOREF_PV(bRm);
5748 IEMOP_MNEMONIC(lfence, "lfence");
5749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5750 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5751 return IEMOP_RAISE_INVALID_OPCODE();
5752
5753 IEM_MC_BEGIN(0, 0);
5754 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5755 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5756 else
5757 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5758 IEM_MC_ADVANCE_RIP();
5759 IEM_MC_END();
5760 return VINF_SUCCESS;
5761}
5762
5763
5764/** Opcode 0x0f 0xae 11b/6. */
5765FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5766{
5767 RT_NOREF_PV(bRm);
5768 IEMOP_MNEMONIC(mfence, "mfence");
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5771 return IEMOP_RAISE_INVALID_OPCODE();
5772
5773 IEM_MC_BEGIN(0, 0);
5774 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5775 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5776 else
5777 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5778 IEM_MC_ADVANCE_RIP();
5779 IEM_MC_END();
5780 return VINF_SUCCESS;
5781}
5782
5783
5784/** Opcode 0x0f 0xae 11b/7. */
5785FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5786{
5787 RT_NOREF_PV(bRm);
5788 IEMOP_MNEMONIC(sfence, "sfence");
5789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5790 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5791 return IEMOP_RAISE_INVALID_OPCODE();
5792
5793 IEM_MC_BEGIN(0, 0);
5794 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5795 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5796 else
5797 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5798 IEM_MC_ADVANCE_RIP();
5799 IEM_MC_END();
5800 return VINF_SUCCESS;
5801}
5802
5803
5804/** Opcode 0xf3 0x0f 0xae 11b/0. */
5805FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5806
5807/** Opcode 0xf3 0x0f 0xae 11b/1. */
5808FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5809
5810/** Opcode 0xf3 0x0f 0xae 11b/2. */
5811FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5812
5813/** Opcode 0xf3 0x0f 0xae 11b/3. */
5814FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5815
5816
5817/** Opcode 0x0f 0xae. */
5818FNIEMOP_DEF(iemOp_Grp15)
5819{
5820 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5822 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5823 {
5824 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5825 {
5826 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5827 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5828 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5829 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5830 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5831 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5832 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5833 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5835 }
5836 }
5837 else
5838 {
5839 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5840 {
5841 case 0:
5842 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5843 {
5844 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5845 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5846 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5847 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5848 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5849 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5850 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5851 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5853 }
5854 break;
5855
5856 case IEM_OP_PRF_REPZ:
5857 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5858 {
5859 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5860 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5861 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5862 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5863 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5864 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5865 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5866 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5868 }
5869 break;
5870
5871 default:
5872 return IEMOP_RAISE_INVALID_OPCODE();
5873 }
5874 }
5875}
5876
5877
5878/** Opcode 0x0f 0xaf. */
5879FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5880{
5881 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5882 IEMOP_HLP_MIN_386();
5883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5884 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5885}
5886
5887
5888/** Opcode 0x0f 0xb0. */
5889FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5890{
5891 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5892 IEMOP_HLP_MIN_486();
5893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5894
5895 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5896 {
5897 IEMOP_HLP_DONE_DECODING();
5898 IEM_MC_BEGIN(4, 0);
5899 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5900 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5901 IEM_MC_ARG(uint8_t, u8Src, 2);
5902 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5903
5904 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5905 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5906 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5907 IEM_MC_REF_EFLAGS(pEFlags);
5908 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5909 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5910 else
5911 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5912
5913 IEM_MC_ADVANCE_RIP();
5914 IEM_MC_END();
5915 }
5916 else
5917 {
5918 IEM_MC_BEGIN(4, 3);
5919 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5920 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5921 IEM_MC_ARG(uint8_t, u8Src, 2);
5922 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5924 IEM_MC_LOCAL(uint8_t, u8Al);
5925
5926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5927 IEMOP_HLP_DONE_DECODING();
5928 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5929 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5930 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5931 IEM_MC_FETCH_EFLAGS(EFlags);
5932 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5933 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5934 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5935 else
5936 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5937
5938 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5939 IEM_MC_COMMIT_EFLAGS(EFlags);
5940 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5941 IEM_MC_ADVANCE_RIP();
5942 IEM_MC_END();
5943 }
5944 return VINF_SUCCESS;
5945}
5946
5947/** Opcode 0x0f 0xb1. */
5948FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5949{
5950 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5951 IEMOP_HLP_MIN_486();
5952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5953
5954 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5955 {
5956 IEMOP_HLP_DONE_DECODING();
5957 switch (pVCpu->iem.s.enmEffOpSize)
5958 {
5959 case IEMMODE_16BIT:
5960 IEM_MC_BEGIN(4, 0);
5961 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5962 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5963 IEM_MC_ARG(uint16_t, u16Src, 2);
5964 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5965
5966 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5967 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5968 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5969 IEM_MC_REF_EFLAGS(pEFlags);
5970 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5971 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5972 else
5973 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5974
5975 IEM_MC_ADVANCE_RIP();
5976 IEM_MC_END();
5977 return VINF_SUCCESS;
5978
5979 case IEMMODE_32BIT:
5980 IEM_MC_BEGIN(4, 0);
5981 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5982 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5983 IEM_MC_ARG(uint32_t, u32Src, 2);
5984 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5985
5986 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5987 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5988 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5989 IEM_MC_REF_EFLAGS(pEFlags);
5990 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5991 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5992 else
5993 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5994
5995 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5996 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5997 IEM_MC_ADVANCE_RIP();
5998 IEM_MC_END();
5999 return VINF_SUCCESS;
6000
6001 case IEMMODE_64BIT:
6002 IEM_MC_BEGIN(4, 0);
6003 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6004 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6005#ifdef RT_ARCH_X86
6006 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6007#else
6008 IEM_MC_ARG(uint64_t, u64Src, 2);
6009#endif
6010 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6011
6012 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6013 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6014 IEM_MC_REF_EFLAGS(pEFlags);
6015#ifdef RT_ARCH_X86
6016 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6017 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6018 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6019 else
6020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6021#else
6022 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6023 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6024 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6025 else
6026 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6027#endif
6028
6029 IEM_MC_ADVANCE_RIP();
6030 IEM_MC_END();
6031 return VINF_SUCCESS;
6032
6033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6034 }
6035 }
6036 else
6037 {
6038 switch (pVCpu->iem.s.enmEffOpSize)
6039 {
6040 case IEMMODE_16BIT:
6041 IEM_MC_BEGIN(4, 3);
6042 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6043 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6044 IEM_MC_ARG(uint16_t, u16Src, 2);
6045 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6047 IEM_MC_LOCAL(uint16_t, u16Ax);
6048
6049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6050 IEMOP_HLP_DONE_DECODING();
6051 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6052 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6053 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6054 IEM_MC_FETCH_EFLAGS(EFlags);
6055 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6056 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6057 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6058 else
6059 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6060
6061 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6062 IEM_MC_COMMIT_EFLAGS(EFlags);
6063 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6064 IEM_MC_ADVANCE_RIP();
6065 IEM_MC_END();
6066 return VINF_SUCCESS;
6067
6068 case IEMMODE_32BIT:
6069 IEM_MC_BEGIN(4, 3);
6070 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6071 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6072 IEM_MC_ARG(uint32_t, u32Src, 2);
6073 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6075 IEM_MC_LOCAL(uint32_t, u32Eax);
6076
6077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6078 IEMOP_HLP_DONE_DECODING();
6079 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6080 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6081 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6082 IEM_MC_FETCH_EFLAGS(EFlags);
6083 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6084 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6085 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6086 else
6087 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6088
6089 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6090 IEM_MC_COMMIT_EFLAGS(EFlags);
6091 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6092 IEM_MC_ADVANCE_RIP();
6093 IEM_MC_END();
6094 return VINF_SUCCESS;
6095
6096 case IEMMODE_64BIT:
6097 IEM_MC_BEGIN(4, 3);
6098 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6099 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6100#ifdef RT_ARCH_X86
6101 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6102#else
6103 IEM_MC_ARG(uint64_t, u64Src, 2);
6104#endif
6105 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6107 IEM_MC_LOCAL(uint64_t, u64Rax);
6108
6109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6110 IEMOP_HLP_DONE_DECODING();
6111 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6112 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6113 IEM_MC_FETCH_EFLAGS(EFlags);
6114 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6115#ifdef RT_ARCH_X86
6116 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6117 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6118 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6119 else
6120 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6121#else
6122 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6123 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6124 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6125 else
6126 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6127#endif
6128
6129 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6130 IEM_MC_COMMIT_EFLAGS(EFlags);
6131 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6132 IEM_MC_ADVANCE_RIP();
6133 IEM_MC_END();
6134 return VINF_SUCCESS;
6135
6136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6137 }
6138 }
6139}
6140
6141
6142FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6143{
6144 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6145 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6146
6147 switch (pVCpu->iem.s.enmEffOpSize)
6148 {
6149 case IEMMODE_16BIT:
6150 IEM_MC_BEGIN(5, 1);
6151 IEM_MC_ARG(uint16_t, uSel, 0);
6152 IEM_MC_ARG(uint16_t, offSeg, 1);
6153 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6154 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6155 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6156 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6159 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6160 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6161 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6162 IEM_MC_END();
6163 return VINF_SUCCESS;
6164
6165 case IEMMODE_32BIT:
6166 IEM_MC_BEGIN(5, 1);
6167 IEM_MC_ARG(uint16_t, uSel, 0);
6168 IEM_MC_ARG(uint32_t, offSeg, 1);
6169 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6170 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6171 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6172 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6175 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6176 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6177 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6178 IEM_MC_END();
6179 return VINF_SUCCESS;
6180
6181 case IEMMODE_64BIT:
6182 IEM_MC_BEGIN(5, 1);
6183 IEM_MC_ARG(uint16_t, uSel, 0);
6184 IEM_MC_ARG(uint64_t, offSeg, 1);
6185 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6186 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6187 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6188 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6191 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6192 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6193 else
6194 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6195 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6196 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6197 IEM_MC_END();
6198 return VINF_SUCCESS;
6199
6200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6201 }
6202}
6203
6204
6205/** Opcode 0x0f 0xb2. */
6206FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6207{
6208 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6209 IEMOP_HLP_MIN_386();
6210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6212 return IEMOP_RAISE_INVALID_OPCODE();
6213 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6214}
6215
6216
6217/** Opcode 0x0f 0xb3. */
6218FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6219{
6220 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6221 IEMOP_HLP_MIN_386();
6222 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6223}
6224
6225
6226/** Opcode 0x0f 0xb4. */
6227FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6228{
6229 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6230 IEMOP_HLP_MIN_386();
6231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6233 return IEMOP_RAISE_INVALID_OPCODE();
6234 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6235}
6236
6237
6238/** Opcode 0x0f 0xb5. */
6239FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6240{
6241 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6242 IEMOP_HLP_MIN_386();
6243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6245 return IEMOP_RAISE_INVALID_OPCODE();
6246 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6247}
6248
6249
6250/** Opcode 0x0f 0xb6. */
6251FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6252{
6253 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6254 IEMOP_HLP_MIN_386();
6255
6256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6257
6258 /*
6259 * If rm is denoting a register, no more instruction bytes.
6260 */
6261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6262 {
6263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6264 switch (pVCpu->iem.s.enmEffOpSize)
6265 {
6266 case IEMMODE_16BIT:
6267 IEM_MC_BEGIN(0, 1);
6268 IEM_MC_LOCAL(uint16_t, u16Value);
6269 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6270 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6271 IEM_MC_ADVANCE_RIP();
6272 IEM_MC_END();
6273 return VINF_SUCCESS;
6274
6275 case IEMMODE_32BIT:
6276 IEM_MC_BEGIN(0, 1);
6277 IEM_MC_LOCAL(uint32_t, u32Value);
6278 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6279 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6280 IEM_MC_ADVANCE_RIP();
6281 IEM_MC_END();
6282 return VINF_SUCCESS;
6283
6284 case IEMMODE_64BIT:
6285 IEM_MC_BEGIN(0, 1);
6286 IEM_MC_LOCAL(uint64_t, u64Value);
6287 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6288 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6289 IEM_MC_ADVANCE_RIP();
6290 IEM_MC_END();
6291 return VINF_SUCCESS;
6292
6293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6294 }
6295 }
6296 else
6297 {
6298 /*
6299 * We're loading a register from memory.
6300 */
6301 switch (pVCpu->iem.s.enmEffOpSize)
6302 {
6303 case IEMMODE_16BIT:
6304 IEM_MC_BEGIN(0, 2);
6305 IEM_MC_LOCAL(uint16_t, u16Value);
6306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6309 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6310 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6311 IEM_MC_ADVANCE_RIP();
6312 IEM_MC_END();
6313 return VINF_SUCCESS;
6314
6315 case IEMMODE_32BIT:
6316 IEM_MC_BEGIN(0, 2);
6317 IEM_MC_LOCAL(uint32_t, u32Value);
6318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6321 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6322 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6323 IEM_MC_ADVANCE_RIP();
6324 IEM_MC_END();
6325 return VINF_SUCCESS;
6326
6327 case IEMMODE_64BIT:
6328 IEM_MC_BEGIN(0, 2);
6329 IEM_MC_LOCAL(uint64_t, u64Value);
6330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6333 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6334 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6335 IEM_MC_ADVANCE_RIP();
6336 IEM_MC_END();
6337 return VINF_SUCCESS;
6338
6339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6340 }
6341 }
6342}
6343
6344
6345/** Opcode 0x0f 0xb7. */
6346FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6347{
6348 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6349 IEMOP_HLP_MIN_386();
6350
6351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6352
6353 /** @todo Not entirely sure how the operand size prefix is handled here,
6354 * assuming that it will be ignored. Would be nice to have a few
6355 * test for this. */
6356 /*
6357 * If rm is denoting a register, no more instruction bytes.
6358 */
6359 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6360 {
6361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6362 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6363 {
6364 IEM_MC_BEGIN(0, 1);
6365 IEM_MC_LOCAL(uint32_t, u32Value);
6366 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6367 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6368 IEM_MC_ADVANCE_RIP();
6369 IEM_MC_END();
6370 }
6371 else
6372 {
6373 IEM_MC_BEGIN(0, 1);
6374 IEM_MC_LOCAL(uint64_t, u64Value);
6375 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6376 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6377 IEM_MC_ADVANCE_RIP();
6378 IEM_MC_END();
6379 }
6380 }
6381 else
6382 {
6383 /*
6384 * We're loading a register from memory.
6385 */
6386 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6387 {
6388 IEM_MC_BEGIN(0, 2);
6389 IEM_MC_LOCAL(uint32_t, u32Value);
6390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6393 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6394 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6395 IEM_MC_ADVANCE_RIP();
6396 IEM_MC_END();
6397 }
6398 else
6399 {
6400 IEM_MC_BEGIN(0, 2);
6401 IEM_MC_LOCAL(uint64_t, u64Value);
6402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6405 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6406 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6407 IEM_MC_ADVANCE_RIP();
6408 IEM_MC_END();
6409 }
6410 }
6411 return VINF_SUCCESS;
6412}
6413
6414
6415/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6416FNIEMOP_UD_STUB(iemOp_jmpe);
6417/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6418FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6419
6420
6421/** Opcode 0x0f 0xb9. */
6422FNIEMOP_DEF(iemOp_Grp10)
6423{
6424 Log(("iemOp_Grp10 -> #UD\n"));
6425 return IEMOP_RAISE_INVALID_OPCODE();
6426}
6427
6428
6429/** Opcode 0x0f 0xba. */
6430FNIEMOP_DEF(iemOp_Grp8)
6431{
6432 IEMOP_HLP_MIN_386();
6433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6434 PCIEMOPBINSIZES pImpl;
6435 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6436 {
6437 case 0: case 1: case 2: case 3:
6438 return IEMOP_RAISE_INVALID_OPCODE();
6439 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6440 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6441 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6442 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6444 }
6445 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6446
6447 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6448 {
6449 /* register destination. */
6450 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6452
6453 switch (pVCpu->iem.s.enmEffOpSize)
6454 {
6455 case IEMMODE_16BIT:
6456 IEM_MC_BEGIN(3, 0);
6457 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6458 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6459 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6460
6461 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6462 IEM_MC_REF_EFLAGS(pEFlags);
6463 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6464
6465 IEM_MC_ADVANCE_RIP();
6466 IEM_MC_END();
6467 return VINF_SUCCESS;
6468
6469 case IEMMODE_32BIT:
6470 IEM_MC_BEGIN(3, 0);
6471 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6472 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6473 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6474
6475 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6476 IEM_MC_REF_EFLAGS(pEFlags);
6477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6478
6479 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6480 IEM_MC_ADVANCE_RIP();
6481 IEM_MC_END();
6482 return VINF_SUCCESS;
6483
6484 case IEMMODE_64BIT:
6485 IEM_MC_BEGIN(3, 0);
6486 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6487 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6488 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6489
6490 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6491 IEM_MC_REF_EFLAGS(pEFlags);
6492 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6493
6494 IEM_MC_ADVANCE_RIP();
6495 IEM_MC_END();
6496 return VINF_SUCCESS;
6497
6498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6499 }
6500 }
6501 else
6502 {
6503 /* memory destination. */
6504
6505 uint32_t fAccess;
6506 if (pImpl->pfnLockedU16)
6507 fAccess = IEM_ACCESS_DATA_RW;
6508 else /* BT */
6509 fAccess = IEM_ACCESS_DATA_R;
6510
6511 /** @todo test negative bit offsets! */
6512 switch (pVCpu->iem.s.enmEffOpSize)
6513 {
6514 case IEMMODE_16BIT:
6515 IEM_MC_BEGIN(3, 1);
6516 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6517 IEM_MC_ARG(uint16_t, u16Src, 1);
6518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6520
6521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6522 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6523 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6524 if (pImpl->pfnLockedU16)
6525 IEMOP_HLP_DONE_DECODING();
6526 else
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6528 IEM_MC_FETCH_EFLAGS(EFlags);
6529 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6530 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6531 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6532 else
6533 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6534 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6535
6536 IEM_MC_COMMIT_EFLAGS(EFlags);
6537 IEM_MC_ADVANCE_RIP();
6538 IEM_MC_END();
6539 return VINF_SUCCESS;
6540
6541 case IEMMODE_32BIT:
6542 IEM_MC_BEGIN(3, 1);
6543 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6544 IEM_MC_ARG(uint32_t, u32Src, 1);
6545 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6547
6548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6549 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6550 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6551 if (pImpl->pfnLockedU16)
6552 IEMOP_HLP_DONE_DECODING();
6553 else
6554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6555 IEM_MC_FETCH_EFLAGS(EFlags);
6556 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6557 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6559 else
6560 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6561 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6562
6563 IEM_MC_COMMIT_EFLAGS(EFlags);
6564 IEM_MC_ADVANCE_RIP();
6565 IEM_MC_END();
6566 return VINF_SUCCESS;
6567
6568 case IEMMODE_64BIT:
6569 IEM_MC_BEGIN(3, 1);
6570 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6571 IEM_MC_ARG(uint64_t, u64Src, 1);
6572 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6574
6575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6576 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6577 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6578 if (pImpl->pfnLockedU16)
6579 IEMOP_HLP_DONE_DECODING();
6580 else
6581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6582 IEM_MC_FETCH_EFLAGS(EFlags);
6583 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6584 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6585 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6586 else
6587 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6588 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6589
6590 IEM_MC_COMMIT_EFLAGS(EFlags);
6591 IEM_MC_ADVANCE_RIP();
6592 IEM_MC_END();
6593 return VINF_SUCCESS;
6594
6595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6596 }
6597 }
6598
6599}
6600
6601
6602/** Opcode 0x0f 0xbb. */
6603FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6604{
6605 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6606 IEMOP_HLP_MIN_386();
6607 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6608}
6609
6610
6611/** Opcode 0x0f 0xbc. */
6612FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6613{
6614 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6615 IEMOP_HLP_MIN_386();
6616 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6617 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6618}
6619
6620
6621/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6622FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6623
6624
6625/** Opcode 0x0f 0xbd. */
6626FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6627{
6628 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6629 IEMOP_HLP_MIN_386();
6630 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6631 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6632}
6633
6634
6635/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6636FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6637
6638
6639/** Opcode 0x0f 0xbe. */
6640FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6641{
6642 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6643 IEMOP_HLP_MIN_386();
6644
6645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6646
6647 /*
6648 * If rm is denoting a register, no more instruction bytes.
6649 */
6650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6651 {
6652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6653 switch (pVCpu->iem.s.enmEffOpSize)
6654 {
6655 case IEMMODE_16BIT:
6656 IEM_MC_BEGIN(0, 1);
6657 IEM_MC_LOCAL(uint16_t, u16Value);
6658 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6659 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6660 IEM_MC_ADVANCE_RIP();
6661 IEM_MC_END();
6662 return VINF_SUCCESS;
6663
6664 case IEMMODE_32BIT:
6665 IEM_MC_BEGIN(0, 1);
6666 IEM_MC_LOCAL(uint32_t, u32Value);
6667 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6668 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6669 IEM_MC_ADVANCE_RIP();
6670 IEM_MC_END();
6671 return VINF_SUCCESS;
6672
6673 case IEMMODE_64BIT:
6674 IEM_MC_BEGIN(0, 1);
6675 IEM_MC_LOCAL(uint64_t, u64Value);
6676 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6677 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6678 IEM_MC_ADVANCE_RIP();
6679 IEM_MC_END();
6680 return VINF_SUCCESS;
6681
6682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6683 }
6684 }
6685 else
6686 {
6687 /*
6688 * We're loading a register from memory.
6689 */
6690 switch (pVCpu->iem.s.enmEffOpSize)
6691 {
6692 case IEMMODE_16BIT:
6693 IEM_MC_BEGIN(0, 2);
6694 IEM_MC_LOCAL(uint16_t, u16Value);
6695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6698 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6699 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6700 IEM_MC_ADVANCE_RIP();
6701 IEM_MC_END();
6702 return VINF_SUCCESS;
6703
6704 case IEMMODE_32BIT:
6705 IEM_MC_BEGIN(0, 2);
6706 IEM_MC_LOCAL(uint32_t, u32Value);
6707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6710 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6711 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6712 IEM_MC_ADVANCE_RIP();
6713 IEM_MC_END();
6714 return VINF_SUCCESS;
6715
6716 case IEMMODE_64BIT:
6717 IEM_MC_BEGIN(0, 2);
6718 IEM_MC_LOCAL(uint64_t, u64Value);
6719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6722 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6723 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6724 IEM_MC_ADVANCE_RIP();
6725 IEM_MC_END();
6726 return VINF_SUCCESS;
6727
6728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6729 }
6730 }
6731}
6732
6733
6734/** Opcode 0x0f 0xbf. */
6735FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6736{
6737 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6738 IEMOP_HLP_MIN_386();
6739
6740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6741
6742 /** @todo Not entirely sure how the operand size prefix is handled here,
6743 * assuming that it will be ignored. Would be nice to have a few
6744 * test for this. */
6745 /*
6746 * If rm is denoting a register, no more instruction bytes.
6747 */
6748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6749 {
6750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6751 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6752 {
6753 IEM_MC_BEGIN(0, 1);
6754 IEM_MC_LOCAL(uint32_t, u32Value);
6755 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6756 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6757 IEM_MC_ADVANCE_RIP();
6758 IEM_MC_END();
6759 }
6760 else
6761 {
6762 IEM_MC_BEGIN(0, 1);
6763 IEM_MC_LOCAL(uint64_t, u64Value);
6764 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6765 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6766 IEM_MC_ADVANCE_RIP();
6767 IEM_MC_END();
6768 }
6769 }
6770 else
6771 {
6772 /*
6773 * We're loading a register from memory.
6774 */
6775 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6776 {
6777 IEM_MC_BEGIN(0, 2);
6778 IEM_MC_LOCAL(uint32_t, u32Value);
6779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6782 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6783 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6784 IEM_MC_ADVANCE_RIP();
6785 IEM_MC_END();
6786 }
6787 else
6788 {
6789 IEM_MC_BEGIN(0, 2);
6790 IEM_MC_LOCAL(uint64_t, u64Value);
6791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6794 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6795 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6796 IEM_MC_ADVANCE_RIP();
6797 IEM_MC_END();
6798 }
6799 }
6800 return VINF_SUCCESS;
6801}
6802
6803
6804/** Opcode 0x0f 0xc0. */
6805FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6806{
6807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6808 IEMOP_HLP_MIN_486();
6809 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6810
6811 /*
6812 * If rm is denoting a register, no more instruction bytes.
6813 */
6814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6815 {
6816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6817
6818 IEM_MC_BEGIN(3, 0);
6819 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6820 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6821 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6822
6823 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6824 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6825 IEM_MC_REF_EFLAGS(pEFlags);
6826 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6827
6828 IEM_MC_ADVANCE_RIP();
6829 IEM_MC_END();
6830 }
6831 else
6832 {
6833 /*
6834 * We're accessing memory.
6835 */
6836 IEM_MC_BEGIN(3, 3);
6837 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6838 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6839 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6840 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6842
6843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6844 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6845 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6846 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6847 IEM_MC_FETCH_EFLAGS(EFlags);
6848 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6849 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6850 else
6851 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6852
6853 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6854 IEM_MC_COMMIT_EFLAGS(EFlags);
6855 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6856 IEM_MC_ADVANCE_RIP();
6857 IEM_MC_END();
6858 return VINF_SUCCESS;
6859 }
6860 return VINF_SUCCESS;
6861}
6862
6863
6864/** Opcode 0x0f 0xc1. */
6865FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6866{
6867 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6868 IEMOP_HLP_MIN_486();
6869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6870
6871 /*
6872 * If rm is denoting a register, no more instruction bytes.
6873 */
6874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6875 {
6876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6877
6878 switch (pVCpu->iem.s.enmEffOpSize)
6879 {
6880 case IEMMODE_16BIT:
6881 IEM_MC_BEGIN(3, 0);
6882 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6883 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6885
6886 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6887 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6888 IEM_MC_REF_EFLAGS(pEFlags);
6889 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6890
6891 IEM_MC_ADVANCE_RIP();
6892 IEM_MC_END();
6893 return VINF_SUCCESS;
6894
6895 case IEMMODE_32BIT:
6896 IEM_MC_BEGIN(3, 0);
6897 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6898 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6900
6901 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6902 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6903 IEM_MC_REF_EFLAGS(pEFlags);
6904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6905
6906 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6907 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6908 IEM_MC_ADVANCE_RIP();
6909 IEM_MC_END();
6910 return VINF_SUCCESS;
6911
6912 case IEMMODE_64BIT:
6913 IEM_MC_BEGIN(3, 0);
6914 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6915 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6916 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6917
6918 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6919 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6920 IEM_MC_REF_EFLAGS(pEFlags);
6921 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6922
6923 IEM_MC_ADVANCE_RIP();
6924 IEM_MC_END();
6925 return VINF_SUCCESS;
6926
6927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6928 }
6929 }
6930 else
6931 {
6932 /*
6933 * We're accessing memory.
6934 */
6935 switch (pVCpu->iem.s.enmEffOpSize)
6936 {
6937 case IEMMODE_16BIT:
6938 IEM_MC_BEGIN(3, 3);
6939 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6940 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6941 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6942 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6944
6945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6946 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6947 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6948 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6949 IEM_MC_FETCH_EFLAGS(EFlags);
6950 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6951 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6952 else
6953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6954
6955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6956 IEM_MC_COMMIT_EFLAGS(EFlags);
6957 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6958 IEM_MC_ADVANCE_RIP();
6959 IEM_MC_END();
6960 return VINF_SUCCESS;
6961
6962 case IEMMODE_32BIT:
6963 IEM_MC_BEGIN(3, 3);
6964 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6965 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6966 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6967 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6969
6970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6971 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6972 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6973 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6974 IEM_MC_FETCH_EFLAGS(EFlags);
6975 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6976 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6977 else
6978 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6979
6980 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6981 IEM_MC_COMMIT_EFLAGS(EFlags);
6982 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6983 IEM_MC_ADVANCE_RIP();
6984 IEM_MC_END();
6985 return VINF_SUCCESS;
6986
6987 case IEMMODE_64BIT:
6988 IEM_MC_BEGIN(3, 3);
6989 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6990 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6991 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6992 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6994
6995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6996 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6997 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6998 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6999 IEM_MC_FETCH_EFLAGS(EFlags);
7000 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7001 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7002 else
7003 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7004
7005 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7006 IEM_MC_COMMIT_EFLAGS(EFlags);
7007 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7008 IEM_MC_ADVANCE_RIP();
7009 IEM_MC_END();
7010 return VINF_SUCCESS;
7011
7012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7013 }
7014 }
7015}
7016
7017
7018/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7019FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7020/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7021FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7022/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7023FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7024/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7025FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7026
7027
7028/** Opcode 0x0f 0xc3. */
7029FNIEMOP_DEF(iemOp_movnti_My_Gy)
7030{
7031 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7032
7033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7034
7035 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7036 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7037 {
7038 switch (pVCpu->iem.s.enmEffOpSize)
7039 {
7040 case IEMMODE_32BIT:
7041 IEM_MC_BEGIN(0, 2);
7042 IEM_MC_LOCAL(uint32_t, u32Value);
7043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7044
7045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7047 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7048 return IEMOP_RAISE_INVALID_OPCODE();
7049
7050 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7051 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7052 IEM_MC_ADVANCE_RIP();
7053 IEM_MC_END();
7054 break;
7055
7056 case IEMMODE_64BIT:
7057 IEM_MC_BEGIN(0, 2);
7058 IEM_MC_LOCAL(uint64_t, u64Value);
7059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7060
7061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7063 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7064 return IEMOP_RAISE_INVALID_OPCODE();
7065
7066 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7067 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7068 IEM_MC_ADVANCE_RIP();
7069 IEM_MC_END();
7070 break;
7071
7072 case IEMMODE_16BIT:
7073 /** @todo check this form. */
7074 return IEMOP_RAISE_INVALID_OPCODE();
7075 }
7076 }
7077 else
7078 return IEMOP_RAISE_INVALID_OPCODE();
7079 return VINF_SUCCESS;
7080}
7081/* Opcode 0x66 0x0f 0xc3 - invalid */
7082/* Opcode 0xf3 0x0f 0xc3 - invalid */
7083/* Opcode 0xf2 0x0f 0xc3 - invalid */
7084
7085/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7086FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7087/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7088FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7089/* Opcode 0xf3 0x0f 0xc4 - invalid */
7090/* Opcode 0xf2 0x0f 0xc4 - invalid */
7091
7092/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7093FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7094/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7095FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7096/* Opcode 0xf3 0x0f 0xc5 - invalid */
7097/* Opcode 0xf2 0x0f 0xc5 - invalid */
7098
7099/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7100FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7101/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7102FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7103/* Opcode 0xf3 0x0f 0xc6 - invalid */
7104/* Opcode 0xf2 0x0f 0xc6 - invalid */
7105
7106
7107/** Opcode 0x0f 0xc7 !11/1. */
7108FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7109{
7110 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7111
7112 IEM_MC_BEGIN(4, 3);
7113 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7114 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7115 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7116 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7117 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7118 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7120
7121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7122 IEMOP_HLP_DONE_DECODING();
7123 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7124
7125 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7126 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7127 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7128
7129 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7130 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7131 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7132
7133 IEM_MC_FETCH_EFLAGS(EFlags);
7134 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7136 else
7137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7138
7139 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7140 IEM_MC_COMMIT_EFLAGS(EFlags);
7141 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7142 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7143 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7144 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7145 IEM_MC_ENDIF();
7146 IEM_MC_ADVANCE_RIP();
7147
7148 IEM_MC_END();
7149 return VINF_SUCCESS;
7150}
7151
7152
7153/** Opcode REX.W 0x0f 0xc7 !11/1. */
7154FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7155{
7156 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7157 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7158 {
7159#if 0
7160 RT_NOREF(bRm);
7161 IEMOP_BITCH_ABOUT_STUB();
7162 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7163#else
7164 IEM_MC_BEGIN(4, 3);
7165 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7166 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7167 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7169 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7170 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7172
7173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7174 IEMOP_HLP_DONE_DECODING();
7175 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7176 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7177
7178 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7179 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7180 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7181
7182 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7183 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7184 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7185
7186 IEM_MC_FETCH_EFLAGS(EFlags);
7187# ifdef RT_ARCH_AMD64
7188 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7189 {
7190 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7191 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7192 else
7193 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7194 }
7195 else
7196# endif
7197 {
7198 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7199 accesses and not all all atomic, which works fine on in UNI CPU guest
7200 configuration (ignoring DMA). If guest SMP is active we have no choice
7201 but to use a rendezvous callback here. Sigh. */
7202 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7203 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7204 else
7205 {
7206 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7207 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7208 }
7209 }
7210
7211 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7212 IEM_MC_COMMIT_EFLAGS(EFlags);
7213 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7214 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7215 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7216 IEM_MC_ENDIF();
7217 IEM_MC_ADVANCE_RIP();
7218
7219 IEM_MC_END();
7220 return VINF_SUCCESS;
7221#endif
7222 }
7223 Log(("cmpxchg16b -> #UD\n"));
7224 return IEMOP_RAISE_INVALID_OPCODE();
7225}
7226
7227
7228/** Opcode 0x0f 0xc7 11/6. */
7229FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7230
7231/** Opcode 0x0f 0xc7 !11/6. */
7232FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7233
7234/** Opcode 0x66 0x0f 0xc7 !11/6. */
7235FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7236
7237/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7238FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7239
7240/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7241FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7242
7243
7244/** Opcode 0x0f 0xc7. */
7245FNIEMOP_DEF(iemOp_Grp9)
7246{
7247 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7249 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7250 {
7251 case 0: case 2: case 3: case 4: case 5:
7252 return IEMOP_RAISE_INVALID_OPCODE();
7253 case 1:
7254 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7255 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7256 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7257 return IEMOP_RAISE_INVALID_OPCODE();
7258 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7259 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7260 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7261 case 6:
7262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7263 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7264 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7265 {
7266 case 0:
7267 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7268 case IEM_OP_PRF_SIZE_OP:
7269 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7270 case IEM_OP_PRF_REPZ:
7271 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7272 default:
7273 return IEMOP_RAISE_INVALID_OPCODE();
7274 }
7275 case 7:
7276 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7277 {
7278 case 0:
7279 case IEM_OP_PRF_REPZ:
7280 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7281 default:
7282 return IEMOP_RAISE_INVALID_OPCODE();
7283 }
7284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7285 }
7286}
7287
7288
7289/**
7290 * Common 'bswap register' helper.
7291 */
7292FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7293{
7294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7295 switch (pVCpu->iem.s.enmEffOpSize)
7296 {
7297 case IEMMODE_16BIT:
7298 IEM_MC_BEGIN(1, 0);
7299 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7300 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7301 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7302 IEM_MC_ADVANCE_RIP();
7303 IEM_MC_END();
7304 return VINF_SUCCESS;
7305
7306 case IEMMODE_32BIT:
7307 IEM_MC_BEGIN(1, 0);
7308 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7309 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7310 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7311 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7312 IEM_MC_ADVANCE_RIP();
7313 IEM_MC_END();
7314 return VINF_SUCCESS;
7315
7316 case IEMMODE_64BIT:
7317 IEM_MC_BEGIN(1, 0);
7318 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7319 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7320 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7321 IEM_MC_ADVANCE_RIP();
7322 IEM_MC_END();
7323 return VINF_SUCCESS;
7324
7325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7326 }
7327}
7328
7329
7330/** Opcode 0x0f 0xc8. */
7331FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7332{
7333 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7334 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7335 prefix. REX.B is the correct prefix it appears. For a parallel
7336 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7337 IEMOP_HLP_MIN_486();
7338 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7339}
7340
7341
7342/** Opcode 0x0f 0xc9. */
7343FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7344{
7345 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7346 IEMOP_HLP_MIN_486();
7347 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7348}
7349
7350
7351/** Opcode 0x0f 0xca. */
7352FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7353{
7354 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7355 IEMOP_HLP_MIN_486();
7356 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7357}
7358
7359
7360/** Opcode 0x0f 0xcb. */
7361FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7362{
7363 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7364 IEMOP_HLP_MIN_486();
7365 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7366}
7367
7368
7369/** Opcode 0x0f 0xcc. */
7370FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7371{
7372 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7373 IEMOP_HLP_MIN_486();
7374 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7375}
7376
7377
7378/** Opcode 0x0f 0xcd. */
7379FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7380{
7381 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7382 IEMOP_HLP_MIN_486();
7383 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7384}
7385
7386
7387/** Opcode 0x0f 0xce. */
7388FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7389{
7390 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7391 IEMOP_HLP_MIN_486();
7392 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7393}
7394
7395
7396/** Opcode 0x0f 0xcf. */
7397FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7398{
7399 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7400 IEMOP_HLP_MIN_486();
7401 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7402}
7403
7404
7405/* Opcode 0x0f 0xd0 - invalid */
7406/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7407FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7408/* Opcode 0xf3 0x0f 0xd0 - invalid */
7409/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7410FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7411
7412/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7413FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7414/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7415FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7416/* Opcode 0xf3 0x0f 0xd1 - invalid */
7417/* Opcode 0xf2 0x0f 0xd1 - invalid */
7418
7419/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7420FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7421/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7422FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7423/* Opcode 0xf3 0x0f 0xd2 - invalid */
7424/* Opcode 0xf2 0x0f 0xd2 - invalid */
7425
7426/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7427FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7428/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7429FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7430/* Opcode 0xf3 0x0f 0xd3 - invalid */
7431/* Opcode 0xf2 0x0f 0xd3 - invalid */
7432
7433/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7434FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7435/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7436FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7437/* Opcode 0xf3 0x0f 0xd4 - invalid */
7438/* Opcode 0xf2 0x0f 0xd4 - invalid */
7439
7440/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7441FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7442/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7443FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7444/* Opcode 0xf3 0x0f 0xd5 - invalid */
7445/* Opcode 0xf2 0x0f 0xd5 - invalid */
7446
7447/* Opcode 0x0f 0xd6 - invalid */
7448/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7449FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7450/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7451FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7452/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7453FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7454#if 0
7455FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7456{
7457 /* Docs says register only. */
7458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7459
7460 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7461 {
7462 case IEM_OP_PRF_SIZE_OP: /* SSE */
7463 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7464 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7465 IEM_MC_BEGIN(2, 0);
7466 IEM_MC_ARG(uint64_t *, pDst, 0);
7467 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7468 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7469 IEM_MC_PREPARE_SSE_USAGE();
7470 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7471 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7472 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7473 IEM_MC_ADVANCE_RIP();
7474 IEM_MC_END();
7475 return VINF_SUCCESS;
7476
7477 case 0: /* MMX */
7478 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7479 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7480 IEM_MC_BEGIN(2, 0);
7481 IEM_MC_ARG(uint64_t *, pDst, 0);
7482 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7483 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7484 IEM_MC_PREPARE_FPU_USAGE();
7485 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7486 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7487 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7488 IEM_MC_ADVANCE_RIP();
7489 IEM_MC_END();
7490 return VINF_SUCCESS;
7491
7492 default:
7493 return IEMOP_RAISE_INVALID_OPCODE();
7494 }
7495}
7496#endif
7497
7498
7499/** Opcode 0x0f 0xd7. */
7500FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7501{
7502 /* Docs says register only. */
7503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7504 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7505 return IEMOP_RAISE_INVALID_OPCODE();
7506
7507 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7508 /** @todo testcase: Check that the instruction implicitly clears the high
7509 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7510 * and opcode modifications are made to work with the whole width (not
7511 * just 128). */
7512 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7513 {
7514 case IEM_OP_PRF_SIZE_OP: /* SSE */
7515 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7516 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7517 IEM_MC_BEGIN(2, 0);
7518 IEM_MC_ARG(uint64_t *, pDst, 0);
7519 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7520 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7521 IEM_MC_PREPARE_SSE_USAGE();
7522 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7523 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7524 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7525 IEM_MC_ADVANCE_RIP();
7526 IEM_MC_END();
7527 return VINF_SUCCESS;
7528
7529 case 0: /* MMX */
7530 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7531 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7532 IEM_MC_BEGIN(2, 0);
7533 IEM_MC_ARG(uint64_t *, pDst, 0);
7534 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7535 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7536 IEM_MC_PREPARE_FPU_USAGE();
7537 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7538 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7539 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7540 IEM_MC_ADVANCE_RIP();
7541 IEM_MC_END();
7542 return VINF_SUCCESS;
7543
7544 default:
7545 return IEMOP_RAISE_INVALID_OPCODE();
7546 }
7547}
7548
7549
7550/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7551FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7552/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7553FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7554/* Opcode 0xf3 0x0f 0xd8 - invalid */
7555/* Opcode 0xf2 0x0f 0xd8 - invalid */
7556
7557/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7558FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7559/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7560FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7561/* Opcode 0xf3 0x0f 0xd9 - invalid */
7562/* Opcode 0xf2 0x0f 0xd9 - invalid */
7563
7564/** Opcode 0x0f 0xda - pminub Pq, Qq */
7565FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7566/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7567FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7568/* Opcode 0xf3 0x0f 0xda - invalid */
7569/* Opcode 0xf2 0x0f 0xda - invalid */
7570
7571/** Opcode 0x0f 0xdb - pand Pq, Qq */
7572FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7573/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7574FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7575/* Opcode 0xf3 0x0f 0xdb - invalid */
7576/* Opcode 0xf2 0x0f 0xdb - invalid */
7577
7578/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7579FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7580/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7581FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7582/* Opcode 0xf3 0x0f 0xdc - invalid */
7583/* Opcode 0xf2 0x0f 0xdc - invalid */
7584
7585/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7586FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7587/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7588FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7589/* Opcode 0xf3 0x0f 0xdd - invalid */
7590/* Opcode 0xf2 0x0f 0xdd - invalid */
7591
7592/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7593FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7594/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7595FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7596/* Opcode 0xf3 0x0f 0xde - invalid */
7597/* Opcode 0xf2 0x0f 0xde - invalid */
7598
7599/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7600FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7601/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7602FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7603/* Opcode 0xf3 0x0f 0xdf - invalid */
7604/* Opcode 0xf2 0x0f 0xdf - invalid */
7605
7606/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7607FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7608/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7609FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7610/* Opcode 0xf3 0x0f 0xe0 - invalid */
7611/* Opcode 0xf2 0x0f 0xe0 - invalid */
7612
7613/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7614FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7615/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7616FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7617/* Opcode 0xf3 0x0f 0xe1 - invalid */
7618/* Opcode 0xf2 0x0f 0xe1 - invalid */
7619
7620/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7621FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7622/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7623FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7624/* Opcode 0xf3 0x0f 0xe2 - invalid */
7625/* Opcode 0xf2 0x0f 0xe2 - invalid */
7626
7627/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7628FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7629/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7630FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7631/* Opcode 0xf3 0x0f 0xe3 - invalid */
7632/* Opcode 0xf2 0x0f 0xe3 - invalid */
7633
7634/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7635FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7636/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7637FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7638/* Opcode 0xf3 0x0f 0xe4 - invalid */
7639/* Opcode 0xf2 0x0f 0xe4 - invalid */
7640
7641/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7642FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7643/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7644FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7645/* Opcode 0xf3 0x0f 0xe5 - invalid */
7646/* Opcode 0xf2 0x0f 0xe5 - invalid */
7647
7648/* Opcode 0x0f 0xe6 - invalid */
7649/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7650FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7651/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7652FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7653/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7654FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7655
7656
7657/** Opcode 0x0f 0xe7. */
7658FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7659{
7660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7661 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7662 {
7663 /*
7664 * Register, memory.
7665 */
7666/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7667 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7668 {
7669
7670 case IEM_OP_PRF_SIZE_OP: /* SSE */
7671 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7672 IEM_MC_BEGIN(0, 2);
7673 IEM_MC_LOCAL(uint128_t, uSrc);
7674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7675
7676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7678 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7679 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7680
7681 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7682 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7683
7684 IEM_MC_ADVANCE_RIP();
7685 IEM_MC_END();
7686 break;
7687
7688 case 0: /* MMX */
7689 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7690 IEM_MC_BEGIN(0, 2);
7691 IEM_MC_LOCAL(uint64_t, uSrc);
7692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7693
7694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7696 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7697 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7698
7699 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7700 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7701
7702 IEM_MC_ADVANCE_RIP();
7703 IEM_MC_END();
7704 break;
7705
7706 default:
7707 return IEMOP_RAISE_INVALID_OPCODE();
7708 }
7709 }
7710 /* The register, register encoding is invalid. */
7711 else
7712 return IEMOP_RAISE_INVALID_OPCODE();
7713 return VINF_SUCCESS;
7714}
7715
7716
7717/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7718FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7719/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7720FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7721/* Opcode 0xf3 0x0f 0xe8 - invalid */
7722/* Opcode 0xf2 0x0f 0xe8 - invalid */
7723
7724/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7725FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7726/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7727FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7728/* Opcode 0xf3 0x0f 0xe9 - invalid */
7729/* Opcode 0xf2 0x0f 0xe9 - invalid */
7730
7731/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7732FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7733/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7734FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7735/* Opcode 0xf3 0x0f 0xea - invalid */
7736/* Opcode 0xf2 0x0f 0xea - invalid */
7737
7738/** Opcode 0x0f 0xeb - por Pq, Qq */
7739FNIEMOP_STUB(iemOp_por_Pq_Qq);
7740/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7741FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7742/* Opcode 0xf3 0x0f 0xeb - invalid */
7743/* Opcode 0xf2 0x0f 0xeb - invalid */
7744
7745/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7746FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7747/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7748FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7749/* Opcode 0xf3 0x0f 0xec - invalid */
7750/* Opcode 0xf2 0x0f 0xec - invalid */
7751
7752/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7753FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7754/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7755FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7756/* Opcode 0xf3 0x0f 0xed - invalid */
7757/* Opcode 0xf2 0x0f 0xed - invalid */
7758
7759/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7760FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7761/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7762FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7763/* Opcode 0xf3 0x0f 0xee - invalid */
7764/* Opcode 0xf2 0x0f 0xee - invalid */
7765
7766
7767/** Opcode 0x0f 0xef. */
7768FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7769{
7770 IEMOP_MNEMONIC(pxor, "pxor");
7771 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7772}
7773/* Opcode 0xf3 0x0f 0xef - invalid */
7774/* Opcode 0xf2 0x0f 0xef - invalid */
7775
7776/* Opcode 0x0f 0xf0 - invalid */
7777/* Opcode 0x66 0x0f 0xf0 - invalid */
7778/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7779FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7780
7781/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7782FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7783/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7784FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7785/* Opcode 0xf2 0x0f 0xf1 - invalid */
7786
7787/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7788FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7789/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7790FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7791/* Opcode 0xf2 0x0f 0xf2 - invalid */
7792
7793/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7794FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7795/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7796FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7797/* Opcode 0xf2 0x0f 0xf3 - invalid */
7798
7799/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7800FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7801/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7802FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7803/* Opcode 0xf2 0x0f 0xf4 - invalid */
7804
7805/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7806FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7807/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7808FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7809/* Opcode 0xf2 0x0f 0xf5 - invalid */
7810
7811/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7812FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7813/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7814FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7815/* Opcode 0xf2 0x0f 0xf6 - invalid */
7816
7817/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7818FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7819/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7820FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7821/* Opcode 0xf2 0x0f 0xf7 - invalid */
7822
7823/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7824FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7825/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7826FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7827/* Opcode 0xf2 0x0f 0xf8 - invalid */
7828
7829/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7830FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7831/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7832FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7833/* Opcode 0xf2 0x0f 0xf9 - invalid */
7834
7835/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7836FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7837/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7838FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7839/* Opcode 0xf2 0x0f 0xfa - invalid */
7840
7841/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7842FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7843/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7844FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7845/* Opcode 0xf2 0x0f 0xfb - invalid */
7846
7847/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7848FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7849/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7850FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7851/* Opcode 0xf2 0x0f 0xfc - invalid */
7852
7853/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7854FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7855/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7856FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7857/* Opcode 0xf2 0x0f 0xfd - invalid */
7858
7859/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7860FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7861/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7862FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7863/* Opcode 0xf2 0x0f 0xfe - invalid */
7864
7865
7866/** Opcode **** 0x0f 0xff - UD0 */
7867FNIEMOP_DEF(iemOp_ud0)
7868{
7869 IEMOP_MNEMONIC(ud0, "ud0");
7870 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7871 {
7872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7873#ifndef TST_IEM_CHECK_MC
7874 RTGCPTR GCPtrEff;
7875 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7876 if (rcStrict != VINF_SUCCESS)
7877 return rcStrict;
7878#endif
7879 IEMOP_HLP_DONE_DECODING();
7880 }
7881 return IEMOP_RAISE_INVALID_OPCODE();
7882}
7883
7884
7885
7886/** Repeats a_fn four times. For decoding tables. */
7887#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
7888
7889IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7890{
7891 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7892 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7893 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7894 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7895 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7896 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7897 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7898 /* 0x06 */ IEMOP_X4(iemOp_clts),
7899 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7900 /* 0x08 */ IEMOP_X4(iemOp_invd),
7901 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7902 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7903 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7904 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7905 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7906 /* 0x0e */ IEMOP_X4(iemOp_femms),
7907 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7908
7909 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Hx_Wss, iemOp_movsd_Vsd_Hx_Wsd,
7910 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd, iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd, iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd, iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7911 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7912 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq, iemOp_movlps_Mq_Vq__movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7913 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7914 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7915 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7916 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7917 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7918 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7919 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7920 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7921 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7922 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7923 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7924 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7925
7926 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7927 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7928 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7929 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7930 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7931 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7932 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7933 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7934 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd, iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7935 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
7936 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7937 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7938 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7939 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7940 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7941 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7942
7943 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7944 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7945 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7946 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7947 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7948 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7949 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7950 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7951 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7952 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7953 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7954 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7955 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7956 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7957 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7958 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7959
7960 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7961 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7962 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7963 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7964 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7965 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7966 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7967 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7968 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7969 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7970 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7971 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7972 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7973 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7974 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7975 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7976
7977 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7978 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7979 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7980 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7981 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7982 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7983 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7984 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7985 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7986 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7987 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7988 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7989 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7990 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7991 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7992 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7993
7994 /* 0x60 */ IEMOP_X4(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq),
7995 /* 0x61 */ IEMOP_X4(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq),
7996 /* 0x62 */ IEMOP_X4(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq),
7997 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7998 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7999 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8000 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8001 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8002 /* 0x68 */ IEMOP_X4(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq),
8003 /* 0x69 */ IEMOP_X4(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq),
8004 /* 0x6a */ IEMOP_X4(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq),
8005 /* 0x6b */ IEMOP_X4(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq),
8006 /* 0x6c */ IEMOP_X4(iemOp_punpcklqdq_Vdq_Wdq),
8007 /* 0x6d */ IEMOP_X4(iemOp_punpckhqdq_Vdq_Wdq),
8008 /* 0x6e */ IEMOP_X4(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey),
8009 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),
8010
8011 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8012 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8013 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8014 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8015 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8016 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8017 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019
8020 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8021 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8022 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8023 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8024 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8025 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8026 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8027 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8028
8029 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8030 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8031 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8032 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8033 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8034 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8035 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8036 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8037 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8038 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8039 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8040 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8041 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8042 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8043 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8044 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8045
8046 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8047 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8048 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8049 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8050 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8051 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8052 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8053 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8054 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8055 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8056 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8057 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8058 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8059 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8060 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8061 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8062
8063 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8064 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8065 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8066 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8067 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8068 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8069 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8070 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8071 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8072 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8073 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8074 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8075 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8076 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8077 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8078 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8079
8080 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8081 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8082 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8083 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8084 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8085 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8086 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8087 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8088 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8089 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8090 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8091 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8092 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8093 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8094 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8095 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8096
8097 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8098 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8099 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8100 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8101 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8102 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8103 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8104 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8105 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8106 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8107 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8108 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8109 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8110 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8111 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8112 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8113
8114 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8115 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8116 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8117 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8118 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8119 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8120 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8121 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8122 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8123 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8124 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8125 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8126 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8127 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8128 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8129 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8130
8131 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8132 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8133 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8134 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8135 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8137 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8138 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8145 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147
8148 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8149 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8150 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8151 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8160 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8161 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0xff */ IEMOP_X4(iemOp_ud0),
8164};
8165AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8166/** @} */
8167
8168
8169/** @name One byte opcodes.
8170 *
8171 * @{
8172 */
8173
8174/** Opcode 0x00. */
8175FNIEMOP_DEF(iemOp_add_Eb_Gb)
8176{
8177 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8178 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8179}
8180
8181
8182/** Opcode 0x01. */
8183FNIEMOP_DEF(iemOp_add_Ev_Gv)
8184{
8185 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8186 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8187}
8188
8189
8190/** Opcode 0x02. */
8191FNIEMOP_DEF(iemOp_add_Gb_Eb)
8192{
8193 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8194 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8195}
8196
8197
8198/** Opcode 0x03. */
8199FNIEMOP_DEF(iemOp_add_Gv_Ev)
8200{
8201 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8202 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8203}
8204
8205
8206/** Opcode 0x04. */
8207FNIEMOP_DEF(iemOp_add_Al_Ib)
8208{
8209 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8210 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8211}
8212
8213
8214/** Opcode 0x05. */
8215FNIEMOP_DEF(iemOp_add_eAX_Iz)
8216{
8217 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8218 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8219}
8220
8221
8222/** Opcode 0x06. */
8223FNIEMOP_DEF(iemOp_push_ES)
8224{
8225 IEMOP_MNEMONIC(push_es, "push es");
8226 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8227}
8228
8229
8230/** Opcode 0x07. */
8231FNIEMOP_DEF(iemOp_pop_ES)
8232{
8233 IEMOP_MNEMONIC(pop_es, "pop es");
8234 IEMOP_HLP_NO_64BIT();
8235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8236 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8237}
8238
8239
8240/** Opcode 0x08. */
8241FNIEMOP_DEF(iemOp_or_Eb_Gb)
8242{
8243 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8245 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8246}
8247
8248
8249/** Opcode 0x09. */
8250FNIEMOP_DEF(iemOp_or_Ev_Gv)
8251{
8252 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8255}
8256
8257
8258/** Opcode 0x0a. */
8259FNIEMOP_DEF(iemOp_or_Gb_Eb)
8260{
8261 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8263 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8264}
8265
8266
8267/** Opcode 0x0b. */
8268FNIEMOP_DEF(iemOp_or_Gv_Ev)
8269{
8270 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8271 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8272 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8273}
8274
8275
8276/** Opcode 0x0c. */
8277FNIEMOP_DEF(iemOp_or_Al_Ib)
8278{
8279 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8281 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8282}
8283
8284
8285/** Opcode 0x0d. */
8286FNIEMOP_DEF(iemOp_or_eAX_Iz)
8287{
8288 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8290 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8291}
8292
8293
8294/** Opcode 0x0e. */
8295FNIEMOP_DEF(iemOp_push_CS)
8296{
8297 IEMOP_MNEMONIC(push_cs, "push cs");
8298 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8299}
8300
8301
8302/** Opcode 0x0f. */
8303FNIEMOP_DEF(iemOp_2byteEscape)
8304{
8305#ifdef VBOX_STRICT
8306 static bool s_fTested = false;
8307 if (RT_LIKELY(s_fTested)) { /* likely */ }
8308 else
8309 {
8310 s_fTested = true;
8311 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8312 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8313 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8314 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8315 }
8316#endif
8317
8318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8319
8320 /** @todo PUSH CS on 8086, undefined on 80186. */
8321 IEMOP_HLP_MIN_286();
8322 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8323}
8324
8325/** Opcode 0x10. */
8326FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8327{
8328 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8329 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8330}
8331
8332
8333/** Opcode 0x11. */
8334FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8335{
8336 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8337 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8338}
8339
8340
8341/** Opcode 0x12. */
8342FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8343{
8344 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8345 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8346}
8347
8348
8349/** Opcode 0x13. */
8350FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8351{
8352 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8353 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8354}
8355
8356
8357/** Opcode 0x14. */
8358FNIEMOP_DEF(iemOp_adc_Al_Ib)
8359{
8360 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8361 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8362}
8363
8364
8365/** Opcode 0x15. */
8366FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8367{
8368 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8369 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8370}
8371
8372
8373/** Opcode 0x16. */
8374FNIEMOP_DEF(iemOp_push_SS)
8375{
8376 IEMOP_MNEMONIC(push_ss, "push ss");
8377 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8378}
8379
8380
8381/** Opcode 0x17. */
8382FNIEMOP_DEF(iemOp_pop_SS)
8383{
8384 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8386 IEMOP_HLP_NO_64BIT();
8387 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8388}
8389
8390
8391/** Opcode 0x18. */
8392FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8393{
8394 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8395 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8396}
8397
8398
8399/** Opcode 0x19. */
8400FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8401{
8402 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8403 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8404}
8405
8406
8407/** Opcode 0x1a. */
8408FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8409{
8410 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8411 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8412}
8413
8414
8415/** Opcode 0x1b. */
8416FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8417{
8418 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8419 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8420}
8421
8422
8423/** Opcode 0x1c. */
8424FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8425{
8426 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8427 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8428}
8429
8430
8431/** Opcode 0x1d. */
8432FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8433{
8434 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8435 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8436}
8437
8438
8439/** Opcode 0x1e. */
8440FNIEMOP_DEF(iemOp_push_DS)
8441{
8442 IEMOP_MNEMONIC(push_ds, "push ds");
8443 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8444}
8445
8446
8447/** Opcode 0x1f. */
8448FNIEMOP_DEF(iemOp_pop_DS)
8449{
8450 IEMOP_MNEMONIC(pop_ds, "pop ds");
8451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8452 IEMOP_HLP_NO_64BIT();
8453 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8454}
8455
8456
8457/** Opcode 0x20. */
8458FNIEMOP_DEF(iemOp_and_Eb_Gb)
8459{
8460 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8461 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8462 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8463}
8464
8465
8466/** Opcode 0x21. */
8467FNIEMOP_DEF(iemOp_and_Ev_Gv)
8468{
8469 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8470 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8472}
8473
8474
8475/** Opcode 0x22. */
8476FNIEMOP_DEF(iemOp_and_Gb_Eb)
8477{
8478 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8479 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8480 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8481}
8482
8483
8484/** Opcode 0x23. */
8485FNIEMOP_DEF(iemOp_and_Gv_Ev)
8486{
8487 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8488 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8489 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8490}
8491
8492
8493/** Opcode 0x24. */
8494FNIEMOP_DEF(iemOp_and_Al_Ib)
8495{
8496 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8497 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8498 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8499}
8500
8501
8502/** Opcode 0x25. */
8503FNIEMOP_DEF(iemOp_and_eAX_Iz)
8504{
8505 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8506 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8507 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8508}
8509
8510
8511/** Opcode 0x26. */
8512FNIEMOP_DEF(iemOp_seg_ES)
8513{
8514 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8515 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8516 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8517
8518 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8519 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8520}
8521
8522
8523/** Opcode 0x27. */
8524FNIEMOP_DEF(iemOp_daa)
8525{
8526 IEMOP_MNEMONIC(daa_AL, "daa AL");
8527 IEMOP_HLP_NO_64BIT();
8528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8530 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8531}
8532
8533
8534/** Opcode 0x28. */
8535FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8536{
8537 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8538 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8539}
8540
8541
8542/** Opcode 0x29. */
8543FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8544{
8545 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8546 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8547}
8548
8549
8550/** Opcode 0x2a. */
8551FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8552{
8553 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8554 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8555}
8556
8557
8558/** Opcode 0x2b. */
8559FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8560{
8561 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8562 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8563}
8564
8565
8566/** Opcode 0x2c. */
8567FNIEMOP_DEF(iemOp_sub_Al_Ib)
8568{
8569 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8570 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8571}
8572
8573
8574/** Opcode 0x2d. */
8575FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8576{
8577 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8578 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8579}
8580
8581
8582/** Opcode 0x2e. */
8583FNIEMOP_DEF(iemOp_seg_CS)
8584{
8585 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8586 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8587 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8588
8589 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8590 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8591}
8592
8593
8594/** Opcode 0x2f. */
8595FNIEMOP_DEF(iemOp_das)
8596{
8597 IEMOP_MNEMONIC(das_AL, "das AL");
8598 IEMOP_HLP_NO_64BIT();
8599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8600 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8601 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8602}
8603
8604
8605/** Opcode 0x30. */
8606FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8607{
8608 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8609 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8610 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8611}
8612
8613
8614/** Opcode 0x31. */
8615FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8616{
8617 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8618 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8619 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8620}
8621
8622
8623/** Opcode 0x32. */
8624FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8625{
8626 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8628 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8629}
8630
8631
8632/** Opcode 0x33. */
8633FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8634{
8635 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8636 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8637 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8638}
8639
8640
8641/** Opcode 0x34. */
8642FNIEMOP_DEF(iemOp_xor_Al_Ib)
8643{
8644 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8645 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8646 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8647}
8648
8649
8650/** Opcode 0x35. */
8651FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8652{
8653 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8654 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8655 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8656}
8657
8658
8659/** Opcode 0x36. */
8660FNIEMOP_DEF(iemOp_seg_SS)
8661{
8662 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8663 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8664 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8665
8666 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8667 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8668}
8669
8670
8671/** Opcode 0x37. */
8672FNIEMOP_STUB(iemOp_aaa);
8673
8674
8675/** Opcode 0x38. */
8676FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8677{
8678 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8679 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8680}
8681
8682
8683/** Opcode 0x39. */
8684FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8685{
8686 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8687 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8688}
8689
8690
8691/** Opcode 0x3a. */
8692FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8693{
8694 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8695 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8696}
8697
8698
8699/** Opcode 0x3b. */
8700FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8701{
8702 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8703 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8704}
8705
8706
8707/** Opcode 0x3c. */
8708FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8709{
8710 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8711 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8712}
8713
8714
8715/** Opcode 0x3d. */
8716FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8717{
8718 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8719 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8720}
8721
8722
8723/** Opcode 0x3e. */
8724FNIEMOP_DEF(iemOp_seg_DS)
8725{
8726 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8727 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8728 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8729
8730 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8731 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8732}
8733
8734
8735/** Opcode 0x3f. */
8736FNIEMOP_STUB(iemOp_aas);
8737
8738/**
8739 * Common 'inc/dec/not/neg register' helper.
8740 */
8741FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8742{
8743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8744 switch (pVCpu->iem.s.enmEffOpSize)
8745 {
8746 case IEMMODE_16BIT:
8747 IEM_MC_BEGIN(2, 0);
8748 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8749 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8750 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8751 IEM_MC_REF_EFLAGS(pEFlags);
8752 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8753 IEM_MC_ADVANCE_RIP();
8754 IEM_MC_END();
8755 return VINF_SUCCESS;
8756
8757 case IEMMODE_32BIT:
8758 IEM_MC_BEGIN(2, 0);
8759 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8760 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8761 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8762 IEM_MC_REF_EFLAGS(pEFlags);
8763 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8764 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8765 IEM_MC_ADVANCE_RIP();
8766 IEM_MC_END();
8767 return VINF_SUCCESS;
8768
8769 case IEMMODE_64BIT:
8770 IEM_MC_BEGIN(2, 0);
8771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8772 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8773 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8774 IEM_MC_REF_EFLAGS(pEFlags);
8775 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8776 IEM_MC_ADVANCE_RIP();
8777 IEM_MC_END();
8778 return VINF_SUCCESS;
8779 }
8780 return VINF_SUCCESS;
8781}
8782
8783
8784/** Opcode 0x40. */
8785FNIEMOP_DEF(iemOp_inc_eAX)
8786{
8787 /*
8788 * This is a REX prefix in 64-bit mode.
8789 */
8790 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8791 {
8792 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8793 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8794
8795 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8796 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8797 }
8798
8799 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8800 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8801}
8802
8803
8804/** Opcode 0x41. */
8805FNIEMOP_DEF(iemOp_inc_eCX)
8806{
8807 /*
8808 * This is a REX prefix in 64-bit mode.
8809 */
8810 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8811 {
8812 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8813 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8814 pVCpu->iem.s.uRexB = 1 << 3;
8815
8816 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8817 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8818 }
8819
8820 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8821 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8822}
8823
8824
8825/** Opcode 0x42. */
8826FNIEMOP_DEF(iemOp_inc_eDX)
8827{
8828 /*
8829 * This is a REX prefix in 64-bit mode.
8830 */
8831 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8832 {
8833 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8834 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8835 pVCpu->iem.s.uRexIndex = 1 << 3;
8836
8837 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8838 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8839 }
8840
8841 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8842 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8843}
8844
8845
8846
8847/** Opcode 0x43. */
8848FNIEMOP_DEF(iemOp_inc_eBX)
8849{
8850 /*
8851 * This is a REX prefix in 64-bit mode.
8852 */
8853 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8854 {
8855 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8856 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8857 pVCpu->iem.s.uRexB = 1 << 3;
8858 pVCpu->iem.s.uRexIndex = 1 << 3;
8859
8860 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8861 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8862 }
8863
8864 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8865 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8866}
8867
8868
8869/** Opcode 0x44. */
8870FNIEMOP_DEF(iemOp_inc_eSP)
8871{
8872 /*
8873 * This is a REX prefix in 64-bit mode.
8874 */
8875 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8876 {
8877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8879 pVCpu->iem.s.uRexReg = 1 << 3;
8880
8881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8883 }
8884
8885 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8886 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8887}
8888
8889
8890/** Opcode 0x45. */
8891FNIEMOP_DEF(iemOp_inc_eBP)
8892{
8893 /*
8894 * This is a REX prefix in 64-bit mode.
8895 */
8896 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8897 {
8898 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8899 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8900 pVCpu->iem.s.uRexReg = 1 << 3;
8901 pVCpu->iem.s.uRexB = 1 << 3;
8902
8903 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8904 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8905 }
8906
8907 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8908 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8909}
8910
8911
8912/** Opcode 0x46. */
8913FNIEMOP_DEF(iemOp_inc_eSI)
8914{
8915 /*
8916 * This is a REX prefix in 64-bit mode.
8917 */
8918 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8919 {
8920 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8921 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8922 pVCpu->iem.s.uRexReg = 1 << 3;
8923 pVCpu->iem.s.uRexIndex = 1 << 3;
8924
8925 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8926 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8927 }
8928
8929 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8930 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8931}
8932
8933
8934/** Opcode 0x47. */
8935FNIEMOP_DEF(iemOp_inc_eDI)
8936{
8937 /*
8938 * This is a REX prefix in 64-bit mode.
8939 */
8940 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8941 {
8942 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8943 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8944 pVCpu->iem.s.uRexReg = 1 << 3;
8945 pVCpu->iem.s.uRexB = 1 << 3;
8946 pVCpu->iem.s.uRexIndex = 1 << 3;
8947
8948 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8949 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8950 }
8951
8952 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
8953 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8954}
8955
8956
8957/** Opcode 0x48. */
8958FNIEMOP_DEF(iemOp_dec_eAX)
8959{
8960 /*
8961 * This is a REX prefix in 64-bit mode.
8962 */
8963 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8964 {
8965 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8966 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8967 iemRecalEffOpSize(pVCpu);
8968
8969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8971 }
8972
8973 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
8974 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8975}
8976
8977
8978/** Opcode 0x49. */
8979FNIEMOP_DEF(iemOp_dec_eCX)
8980{
8981 /*
8982 * This is a REX prefix in 64-bit mode.
8983 */
8984 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8985 {
8986 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8987 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8988 pVCpu->iem.s.uRexB = 1 << 3;
8989 iemRecalEffOpSize(pVCpu);
8990
8991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8992 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8993 }
8994
8995 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
8996 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8997}
8998
8999
9000/** Opcode 0x4a. */
9001FNIEMOP_DEF(iemOp_dec_eDX)
9002{
9003 /*
9004 * This is a REX prefix in 64-bit mode.
9005 */
9006 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9007 {
9008 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9009 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9010 pVCpu->iem.s.uRexIndex = 1 << 3;
9011 iemRecalEffOpSize(pVCpu);
9012
9013 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9014 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9015 }
9016
9017 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9018 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9019}
9020
9021
9022/** Opcode 0x4b. */
9023FNIEMOP_DEF(iemOp_dec_eBX)
9024{
9025 /*
9026 * This is a REX prefix in 64-bit mode.
9027 */
9028 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9029 {
9030 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9031 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9032 pVCpu->iem.s.uRexB = 1 << 3;
9033 pVCpu->iem.s.uRexIndex = 1 << 3;
9034 iemRecalEffOpSize(pVCpu);
9035
9036 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9037 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9038 }
9039
9040 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9041 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9042}
9043
9044
9045/** Opcode 0x4c. */
9046FNIEMOP_DEF(iemOp_dec_eSP)
9047{
9048 /*
9049 * This is a REX prefix in 64-bit mode.
9050 */
9051 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9052 {
9053 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9054 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9055 pVCpu->iem.s.uRexReg = 1 << 3;
9056 iemRecalEffOpSize(pVCpu);
9057
9058 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9059 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9060 }
9061
9062 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9063 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9064}
9065
9066
9067/** Opcode 0x4d. */
9068FNIEMOP_DEF(iemOp_dec_eBP)
9069{
9070 /*
9071 * This is a REX prefix in 64-bit mode.
9072 */
9073 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9074 {
9075 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9076 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9077 pVCpu->iem.s.uRexReg = 1 << 3;
9078 pVCpu->iem.s.uRexB = 1 << 3;
9079 iemRecalEffOpSize(pVCpu);
9080
9081 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9082 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9083 }
9084
9085 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9086 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9087}
9088
9089
9090/** Opcode 0x4e. */
9091FNIEMOP_DEF(iemOp_dec_eSI)
9092{
9093 /*
9094 * This is a REX prefix in 64-bit mode.
9095 */
9096 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9097 {
9098 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9099 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9100 pVCpu->iem.s.uRexReg = 1 << 3;
9101 pVCpu->iem.s.uRexIndex = 1 << 3;
9102 iemRecalEffOpSize(pVCpu);
9103
9104 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9105 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9106 }
9107
9108 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9109 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9110}
9111
9112
9113/** Opcode 0x4f. */
9114FNIEMOP_DEF(iemOp_dec_eDI)
9115{
9116 /*
9117 * This is a REX prefix in 64-bit mode.
9118 */
9119 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9120 {
9121 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9122 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9123 pVCpu->iem.s.uRexReg = 1 << 3;
9124 pVCpu->iem.s.uRexB = 1 << 3;
9125 pVCpu->iem.s.uRexIndex = 1 << 3;
9126 iemRecalEffOpSize(pVCpu);
9127
9128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9130 }
9131
9132 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9133 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9134}
9135
9136
9137/**
9138 * Common 'push register' helper.
9139 */
9140FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9141{
9142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9143 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9144 {
9145 iReg |= pVCpu->iem.s.uRexB;
9146 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9147 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9148 }
9149
9150 switch (pVCpu->iem.s.enmEffOpSize)
9151 {
9152 case IEMMODE_16BIT:
9153 IEM_MC_BEGIN(0, 1);
9154 IEM_MC_LOCAL(uint16_t, u16Value);
9155 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9156 IEM_MC_PUSH_U16(u16Value);
9157 IEM_MC_ADVANCE_RIP();
9158 IEM_MC_END();
9159 break;
9160
9161 case IEMMODE_32BIT:
9162 IEM_MC_BEGIN(0, 1);
9163 IEM_MC_LOCAL(uint32_t, u32Value);
9164 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9165 IEM_MC_PUSH_U32(u32Value);
9166 IEM_MC_ADVANCE_RIP();
9167 IEM_MC_END();
9168 break;
9169
9170 case IEMMODE_64BIT:
9171 IEM_MC_BEGIN(0, 1);
9172 IEM_MC_LOCAL(uint64_t, u64Value);
9173 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9174 IEM_MC_PUSH_U64(u64Value);
9175 IEM_MC_ADVANCE_RIP();
9176 IEM_MC_END();
9177 break;
9178 }
9179
9180 return VINF_SUCCESS;
9181}
9182
9183
9184/** Opcode 0x50. */
9185FNIEMOP_DEF(iemOp_push_eAX)
9186{
9187 IEMOP_MNEMONIC(push_rAX, "push rAX");
9188 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9189}
9190
9191
9192/** Opcode 0x51. */
9193FNIEMOP_DEF(iemOp_push_eCX)
9194{
9195 IEMOP_MNEMONIC(push_rCX, "push rCX");
9196 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9197}
9198
9199
9200/** Opcode 0x52. */
9201FNIEMOP_DEF(iemOp_push_eDX)
9202{
9203 IEMOP_MNEMONIC(push_rDX, "push rDX");
9204 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9205}
9206
9207
9208/** Opcode 0x53. */
9209FNIEMOP_DEF(iemOp_push_eBX)
9210{
9211 IEMOP_MNEMONIC(push_rBX, "push rBX");
9212 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9213}
9214
9215
9216/** Opcode 0x54. */
9217FNIEMOP_DEF(iemOp_push_eSP)
9218{
9219 IEMOP_MNEMONIC(push_rSP, "push rSP");
9220 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9221 {
9222 IEM_MC_BEGIN(0, 1);
9223 IEM_MC_LOCAL(uint16_t, u16Value);
9224 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9225 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9226 IEM_MC_PUSH_U16(u16Value);
9227 IEM_MC_ADVANCE_RIP();
9228 IEM_MC_END();
9229 }
9230 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9231}
9232
9233
9234/** Opcode 0x55. */
9235FNIEMOP_DEF(iemOp_push_eBP)
9236{
9237 IEMOP_MNEMONIC(push_rBP, "push rBP");
9238 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9239}
9240
9241
9242/** Opcode 0x56. */
9243FNIEMOP_DEF(iemOp_push_eSI)
9244{
9245 IEMOP_MNEMONIC(push_rSI, "push rSI");
9246 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9247}
9248
9249
9250/** Opcode 0x57. */
9251FNIEMOP_DEF(iemOp_push_eDI)
9252{
9253 IEMOP_MNEMONIC(push_rDI, "push rDI");
9254 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9255}
9256
9257
9258/**
9259 * Common 'pop register' helper.
9260 */
9261FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9262{
9263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9264 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9265 {
9266 iReg |= pVCpu->iem.s.uRexB;
9267 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9268 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9269 }
9270
9271 switch (pVCpu->iem.s.enmEffOpSize)
9272 {
9273 case IEMMODE_16BIT:
9274 IEM_MC_BEGIN(0, 1);
9275 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9276 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9277 IEM_MC_POP_U16(pu16Dst);
9278 IEM_MC_ADVANCE_RIP();
9279 IEM_MC_END();
9280 break;
9281
9282 case IEMMODE_32BIT:
9283 IEM_MC_BEGIN(0, 1);
9284 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9285 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9286 IEM_MC_POP_U32(pu32Dst);
9287 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9288 IEM_MC_ADVANCE_RIP();
9289 IEM_MC_END();
9290 break;
9291
9292 case IEMMODE_64BIT:
9293 IEM_MC_BEGIN(0, 1);
9294 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9295 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9296 IEM_MC_POP_U64(pu64Dst);
9297 IEM_MC_ADVANCE_RIP();
9298 IEM_MC_END();
9299 break;
9300 }
9301
9302 return VINF_SUCCESS;
9303}
9304
9305
9306/** Opcode 0x58. */
9307FNIEMOP_DEF(iemOp_pop_eAX)
9308{
9309 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9310 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9311}
9312
9313
9314/** Opcode 0x59. */
9315FNIEMOP_DEF(iemOp_pop_eCX)
9316{
9317 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9318 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9319}
9320
9321
9322/** Opcode 0x5a. */
9323FNIEMOP_DEF(iemOp_pop_eDX)
9324{
9325 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9326 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9327}
9328
9329
9330/** Opcode 0x5b. */
9331FNIEMOP_DEF(iemOp_pop_eBX)
9332{
9333 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9334 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9335}
9336
9337
9338/** Opcode 0x5c. */
9339FNIEMOP_DEF(iemOp_pop_eSP)
9340{
9341 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9342 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9343 {
9344 if (pVCpu->iem.s.uRexB)
9345 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9346 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9347 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9348 }
9349
9350 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9351 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9352 /** @todo add testcase for this instruction. */
9353 switch (pVCpu->iem.s.enmEffOpSize)
9354 {
9355 case IEMMODE_16BIT:
9356 IEM_MC_BEGIN(0, 1);
9357 IEM_MC_LOCAL(uint16_t, u16Dst);
9358 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9359 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9360 IEM_MC_ADVANCE_RIP();
9361 IEM_MC_END();
9362 break;
9363
9364 case IEMMODE_32BIT:
9365 IEM_MC_BEGIN(0, 1);
9366 IEM_MC_LOCAL(uint32_t, u32Dst);
9367 IEM_MC_POP_U32(&u32Dst);
9368 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9369 IEM_MC_ADVANCE_RIP();
9370 IEM_MC_END();
9371 break;
9372
9373 case IEMMODE_64BIT:
9374 IEM_MC_BEGIN(0, 1);
9375 IEM_MC_LOCAL(uint64_t, u64Dst);
9376 IEM_MC_POP_U64(&u64Dst);
9377 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9378 IEM_MC_ADVANCE_RIP();
9379 IEM_MC_END();
9380 break;
9381 }
9382
9383 return VINF_SUCCESS;
9384}
9385
9386
9387/** Opcode 0x5d. */
9388FNIEMOP_DEF(iemOp_pop_eBP)
9389{
9390 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9391 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9392}
9393
9394
9395/** Opcode 0x5e. */
9396FNIEMOP_DEF(iemOp_pop_eSI)
9397{
9398 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9399 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9400}
9401
9402
9403/** Opcode 0x5f. */
9404FNIEMOP_DEF(iemOp_pop_eDI)
9405{
9406 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9407 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9408}
9409
9410
9411/** Opcode 0x60. */
9412FNIEMOP_DEF(iemOp_pusha)
9413{
9414 IEMOP_MNEMONIC(pusha, "pusha");
9415 IEMOP_HLP_MIN_186();
9416 IEMOP_HLP_NO_64BIT();
9417 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9418 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9419 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9420 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9421}
9422
9423
9424/** Opcode 0x61. */
9425FNIEMOP_DEF(iemOp_popa)
9426{
9427 IEMOP_MNEMONIC(popa, "popa");
9428 IEMOP_HLP_MIN_186();
9429 IEMOP_HLP_NO_64BIT();
9430 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9431 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9432 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9434}
9435
9436
9437/** Opcode 0x62. */
9438FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9439// IEMOP_HLP_MIN_186();
9440
9441
9442/** Opcode 0x63 - non-64-bit modes. */
9443FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9444{
9445 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9446 IEMOP_HLP_MIN_286();
9447 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9449
9450 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9451 {
9452 /* Register */
9453 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9454 IEM_MC_BEGIN(3, 0);
9455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9456 IEM_MC_ARG(uint16_t, u16Src, 1);
9457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9458
9459 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9460 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9461 IEM_MC_REF_EFLAGS(pEFlags);
9462 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9463
9464 IEM_MC_ADVANCE_RIP();
9465 IEM_MC_END();
9466 }
9467 else
9468 {
9469 /* Memory */
9470 IEM_MC_BEGIN(3, 2);
9471 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9472 IEM_MC_ARG(uint16_t, u16Src, 1);
9473 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9475
9476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9477 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9478 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9479 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9480 IEM_MC_FETCH_EFLAGS(EFlags);
9481 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9482
9483 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9484 IEM_MC_COMMIT_EFLAGS(EFlags);
9485 IEM_MC_ADVANCE_RIP();
9486 IEM_MC_END();
9487 }
9488 return VINF_SUCCESS;
9489
9490}
9491
9492
9493/** Opcode 0x63.
9494 * @note This is a weird one. It works like a regular move instruction if
9495 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9496 * @todo This definitely needs a testcase to verify the odd cases. */
9497FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9498{
9499 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9500
9501 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9503
9504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9505 {
9506 /*
9507 * Register to register.
9508 */
9509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9510 IEM_MC_BEGIN(0, 1);
9511 IEM_MC_LOCAL(uint64_t, u64Value);
9512 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9513 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9514 IEM_MC_ADVANCE_RIP();
9515 IEM_MC_END();
9516 }
9517 else
9518 {
9519 /*
9520 * We're loading a register from memory.
9521 */
9522 IEM_MC_BEGIN(0, 2);
9523 IEM_MC_LOCAL(uint64_t, u64Value);
9524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9527 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9528 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9529 IEM_MC_ADVANCE_RIP();
9530 IEM_MC_END();
9531 }
9532 return VINF_SUCCESS;
9533}
9534
9535
9536/** Opcode 0x64. */
9537FNIEMOP_DEF(iemOp_seg_FS)
9538{
9539 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9540 IEMOP_HLP_MIN_386();
9541
9542 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9543 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9544
9545 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9546 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9547}
9548
9549
9550/** Opcode 0x65. */
9551FNIEMOP_DEF(iemOp_seg_GS)
9552{
9553 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9554 IEMOP_HLP_MIN_386();
9555
9556 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9557 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9558
9559 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9560 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9561}
9562
9563
9564/** Opcode 0x66. */
9565FNIEMOP_DEF(iemOp_op_size)
9566{
9567 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9568 IEMOP_HLP_MIN_386();
9569
9570 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9571 iemRecalEffOpSize(pVCpu);
9572
9573 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9574 when REPZ or REPNZ are present. */
9575 if (pVCpu->iem.s.idxPrefix == 0)
9576 pVCpu->iem.s.idxPrefix = 1;
9577
9578 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9579 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9580}
9581
9582
9583/** Opcode 0x67. */
9584FNIEMOP_DEF(iemOp_addr_size)
9585{
9586 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9587 IEMOP_HLP_MIN_386();
9588
9589 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9590 switch (pVCpu->iem.s.enmDefAddrMode)
9591 {
9592 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9593 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9594 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9595 default: AssertFailed();
9596 }
9597
9598 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9599 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9600}
9601
9602
9603/** Opcode 0x68. */
9604FNIEMOP_DEF(iemOp_push_Iz)
9605{
9606 IEMOP_MNEMONIC(push_Iz, "push Iz");
9607 IEMOP_HLP_MIN_186();
9608 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9609 switch (pVCpu->iem.s.enmEffOpSize)
9610 {
9611 case IEMMODE_16BIT:
9612 {
9613 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9615 IEM_MC_BEGIN(0,0);
9616 IEM_MC_PUSH_U16(u16Imm);
9617 IEM_MC_ADVANCE_RIP();
9618 IEM_MC_END();
9619 return VINF_SUCCESS;
9620 }
9621
9622 case IEMMODE_32BIT:
9623 {
9624 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9626 IEM_MC_BEGIN(0,0);
9627 IEM_MC_PUSH_U32(u32Imm);
9628 IEM_MC_ADVANCE_RIP();
9629 IEM_MC_END();
9630 return VINF_SUCCESS;
9631 }
9632
9633 case IEMMODE_64BIT:
9634 {
9635 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9637 IEM_MC_BEGIN(0,0);
9638 IEM_MC_PUSH_U64(u64Imm);
9639 IEM_MC_ADVANCE_RIP();
9640 IEM_MC_END();
9641 return VINF_SUCCESS;
9642 }
9643
9644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9645 }
9646}
9647
9648
9649/** Opcode 0x69. */
9650FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9651{
9652 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9653 IEMOP_HLP_MIN_186();
9654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9655 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9656
9657 switch (pVCpu->iem.s.enmEffOpSize)
9658 {
9659 case IEMMODE_16BIT:
9660 {
9661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9662 {
9663 /* register operand */
9664 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9666
9667 IEM_MC_BEGIN(3, 1);
9668 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9669 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9670 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9671 IEM_MC_LOCAL(uint16_t, u16Tmp);
9672
9673 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9674 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9675 IEM_MC_REF_EFLAGS(pEFlags);
9676 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9677 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9678
9679 IEM_MC_ADVANCE_RIP();
9680 IEM_MC_END();
9681 }
9682 else
9683 {
9684 /* memory operand */
9685 IEM_MC_BEGIN(3, 2);
9686 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9687 IEM_MC_ARG(uint16_t, u16Src, 1);
9688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9689 IEM_MC_LOCAL(uint16_t, u16Tmp);
9690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9691
9692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9693 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9694 IEM_MC_ASSIGN(u16Src, u16Imm);
9695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9696 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9697 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9698 IEM_MC_REF_EFLAGS(pEFlags);
9699 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9700 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9701
9702 IEM_MC_ADVANCE_RIP();
9703 IEM_MC_END();
9704 }
9705 return VINF_SUCCESS;
9706 }
9707
9708 case IEMMODE_32BIT:
9709 {
9710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9711 {
9712 /* register operand */
9713 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9715
9716 IEM_MC_BEGIN(3, 1);
9717 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9718 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9719 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9720 IEM_MC_LOCAL(uint32_t, u32Tmp);
9721
9722 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9723 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9724 IEM_MC_REF_EFLAGS(pEFlags);
9725 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9726 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9727
9728 IEM_MC_ADVANCE_RIP();
9729 IEM_MC_END();
9730 }
9731 else
9732 {
9733 /* memory operand */
9734 IEM_MC_BEGIN(3, 2);
9735 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9736 IEM_MC_ARG(uint32_t, u32Src, 1);
9737 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9738 IEM_MC_LOCAL(uint32_t, u32Tmp);
9739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9740
9741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9742 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9743 IEM_MC_ASSIGN(u32Src, u32Imm);
9744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9745 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9746 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9747 IEM_MC_REF_EFLAGS(pEFlags);
9748 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9749 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9750
9751 IEM_MC_ADVANCE_RIP();
9752 IEM_MC_END();
9753 }
9754 return VINF_SUCCESS;
9755 }
9756
9757 case IEMMODE_64BIT:
9758 {
9759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9760 {
9761 /* register operand */
9762 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9764
9765 IEM_MC_BEGIN(3, 1);
9766 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9767 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9768 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9769 IEM_MC_LOCAL(uint64_t, u64Tmp);
9770
9771 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9772 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9773 IEM_MC_REF_EFLAGS(pEFlags);
9774 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9775 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9776
9777 IEM_MC_ADVANCE_RIP();
9778 IEM_MC_END();
9779 }
9780 else
9781 {
9782 /* memory operand */
9783 IEM_MC_BEGIN(3, 2);
9784 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9785 IEM_MC_ARG(uint64_t, u64Src, 1);
9786 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9787 IEM_MC_LOCAL(uint64_t, u64Tmp);
9788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9789
9790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9791 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9792 IEM_MC_ASSIGN(u64Src, u64Imm);
9793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9794 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9795 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9796 IEM_MC_REF_EFLAGS(pEFlags);
9797 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9798 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9799
9800 IEM_MC_ADVANCE_RIP();
9801 IEM_MC_END();
9802 }
9803 return VINF_SUCCESS;
9804 }
9805 }
9806 AssertFailedReturn(VERR_IEM_IPE_9);
9807}
9808
9809
9810/** Opcode 0x6a. */
9811FNIEMOP_DEF(iemOp_push_Ib)
9812{
9813 IEMOP_MNEMONIC(push_Ib, "push Ib");
9814 IEMOP_HLP_MIN_186();
9815 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9817 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9818
9819 IEM_MC_BEGIN(0,0);
9820 switch (pVCpu->iem.s.enmEffOpSize)
9821 {
9822 case IEMMODE_16BIT:
9823 IEM_MC_PUSH_U16(i8Imm);
9824 break;
9825 case IEMMODE_32BIT:
9826 IEM_MC_PUSH_U32(i8Imm);
9827 break;
9828 case IEMMODE_64BIT:
9829 IEM_MC_PUSH_U64(i8Imm);
9830 break;
9831 }
9832 IEM_MC_ADVANCE_RIP();
9833 IEM_MC_END();
9834 return VINF_SUCCESS;
9835}
9836
9837
9838/** Opcode 0x6b. */
9839FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9840{
9841 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9842 IEMOP_HLP_MIN_186();
9843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9845
9846 switch (pVCpu->iem.s.enmEffOpSize)
9847 {
9848 case IEMMODE_16BIT:
9849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9850 {
9851 /* register operand */
9852 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9854
9855 IEM_MC_BEGIN(3, 1);
9856 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9857 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9858 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9859 IEM_MC_LOCAL(uint16_t, u16Tmp);
9860
9861 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9862 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9863 IEM_MC_REF_EFLAGS(pEFlags);
9864 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9865 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9866
9867 IEM_MC_ADVANCE_RIP();
9868 IEM_MC_END();
9869 }
9870 else
9871 {
9872 /* memory operand */
9873 IEM_MC_BEGIN(3, 2);
9874 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9875 IEM_MC_ARG(uint16_t, u16Src, 1);
9876 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9877 IEM_MC_LOCAL(uint16_t, u16Tmp);
9878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9879
9880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9881 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9882 IEM_MC_ASSIGN(u16Src, u16Imm);
9883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9884 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9885 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9886 IEM_MC_REF_EFLAGS(pEFlags);
9887 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9888 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9889
9890 IEM_MC_ADVANCE_RIP();
9891 IEM_MC_END();
9892 }
9893 return VINF_SUCCESS;
9894
9895 case IEMMODE_32BIT:
9896 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9897 {
9898 /* register operand */
9899 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9901
9902 IEM_MC_BEGIN(3, 1);
9903 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9904 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9905 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9906 IEM_MC_LOCAL(uint32_t, u32Tmp);
9907
9908 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9909 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9910 IEM_MC_REF_EFLAGS(pEFlags);
9911 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9912 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9913
9914 IEM_MC_ADVANCE_RIP();
9915 IEM_MC_END();
9916 }
9917 else
9918 {
9919 /* memory operand */
9920 IEM_MC_BEGIN(3, 2);
9921 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9922 IEM_MC_ARG(uint32_t, u32Src, 1);
9923 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9924 IEM_MC_LOCAL(uint32_t, u32Tmp);
9925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9926
9927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9928 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9929 IEM_MC_ASSIGN(u32Src, u32Imm);
9930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9931 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9932 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9933 IEM_MC_REF_EFLAGS(pEFlags);
9934 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9935 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9936
9937 IEM_MC_ADVANCE_RIP();
9938 IEM_MC_END();
9939 }
9940 return VINF_SUCCESS;
9941
9942 case IEMMODE_64BIT:
9943 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9944 {
9945 /* register operand */
9946 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9948
9949 IEM_MC_BEGIN(3, 1);
9950 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9951 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9952 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9953 IEM_MC_LOCAL(uint64_t, u64Tmp);
9954
9955 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9956 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9957 IEM_MC_REF_EFLAGS(pEFlags);
9958 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9959 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9960
9961 IEM_MC_ADVANCE_RIP();
9962 IEM_MC_END();
9963 }
9964 else
9965 {
9966 /* memory operand */
9967 IEM_MC_BEGIN(3, 2);
9968 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9969 IEM_MC_ARG(uint64_t, u64Src, 1);
9970 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9971 IEM_MC_LOCAL(uint64_t, u64Tmp);
9972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9973
9974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9975 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9976 IEM_MC_ASSIGN(u64Src, u64Imm);
9977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9978 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9979 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9980 IEM_MC_REF_EFLAGS(pEFlags);
9981 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9982 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9983
9984 IEM_MC_ADVANCE_RIP();
9985 IEM_MC_END();
9986 }
9987 return VINF_SUCCESS;
9988 }
9989 AssertFailedReturn(VERR_IEM_IPE_8);
9990}
9991
9992
9993/** Opcode 0x6c. */
9994FNIEMOP_DEF(iemOp_insb_Yb_DX)
9995{
9996 IEMOP_HLP_MIN_186();
9997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9998 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9999 {
10000 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10001 switch (pVCpu->iem.s.enmEffAddrMode)
10002 {
10003 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10004 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10005 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10007 }
10008 }
10009 else
10010 {
10011 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10012 switch (pVCpu->iem.s.enmEffAddrMode)
10013 {
10014 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10015 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10016 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10018 }
10019 }
10020}
10021
10022
10023/** Opcode 0x6d. */
10024FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10025{
10026 IEMOP_HLP_MIN_186();
10027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10028 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10029 {
10030 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10031 switch (pVCpu->iem.s.enmEffOpSize)
10032 {
10033 case IEMMODE_16BIT:
10034 switch (pVCpu->iem.s.enmEffAddrMode)
10035 {
10036 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10037 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10038 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10040 }
10041 break;
10042 case IEMMODE_64BIT:
10043 case IEMMODE_32BIT:
10044 switch (pVCpu->iem.s.enmEffAddrMode)
10045 {
10046 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10047 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10048 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10050 }
10051 break;
10052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10053 }
10054 }
10055 else
10056 {
10057 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10058 switch (pVCpu->iem.s.enmEffOpSize)
10059 {
10060 case IEMMODE_16BIT:
10061 switch (pVCpu->iem.s.enmEffAddrMode)
10062 {
10063 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10064 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10065 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10067 }
10068 break;
10069 case IEMMODE_64BIT:
10070 case IEMMODE_32BIT:
10071 switch (pVCpu->iem.s.enmEffAddrMode)
10072 {
10073 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10074 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10075 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10077 }
10078 break;
10079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10080 }
10081 }
10082}
10083
10084
10085/** Opcode 0x6e. */
10086FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10087{
10088 IEMOP_HLP_MIN_186();
10089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10090 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10091 {
10092 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10093 switch (pVCpu->iem.s.enmEffAddrMode)
10094 {
10095 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10096 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10097 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10099 }
10100 }
10101 else
10102 {
10103 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10104 switch (pVCpu->iem.s.enmEffAddrMode)
10105 {
10106 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10107 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10108 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10110 }
10111 }
10112}
10113
10114
10115/** Opcode 0x6f. */
10116FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10117{
10118 IEMOP_HLP_MIN_186();
10119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10120 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10121 {
10122 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10123 switch (pVCpu->iem.s.enmEffOpSize)
10124 {
10125 case IEMMODE_16BIT:
10126 switch (pVCpu->iem.s.enmEffAddrMode)
10127 {
10128 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10129 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10130 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10132 }
10133 break;
10134 case IEMMODE_64BIT:
10135 case IEMMODE_32BIT:
10136 switch (pVCpu->iem.s.enmEffAddrMode)
10137 {
10138 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10139 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10140 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10142 }
10143 break;
10144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10145 }
10146 }
10147 else
10148 {
10149 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10150 switch (pVCpu->iem.s.enmEffOpSize)
10151 {
10152 case IEMMODE_16BIT:
10153 switch (pVCpu->iem.s.enmEffAddrMode)
10154 {
10155 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10156 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10157 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10159 }
10160 break;
10161 case IEMMODE_64BIT:
10162 case IEMMODE_32BIT:
10163 switch (pVCpu->iem.s.enmEffAddrMode)
10164 {
10165 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10166 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10167 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10169 }
10170 break;
10171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10172 }
10173 }
10174}
10175
10176
10177/** Opcode 0x70. */
10178FNIEMOP_DEF(iemOp_jo_Jb)
10179{
10180 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10181 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10184
10185 IEM_MC_BEGIN(0, 0);
10186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10187 IEM_MC_REL_JMP_S8(i8Imm);
10188 } IEM_MC_ELSE() {
10189 IEM_MC_ADVANCE_RIP();
10190 } IEM_MC_ENDIF();
10191 IEM_MC_END();
10192 return VINF_SUCCESS;
10193}
10194
10195
10196/** Opcode 0x71. */
10197FNIEMOP_DEF(iemOp_jno_Jb)
10198{
10199 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10200 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10202 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10203
10204 IEM_MC_BEGIN(0, 0);
10205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10206 IEM_MC_ADVANCE_RIP();
10207 } IEM_MC_ELSE() {
10208 IEM_MC_REL_JMP_S8(i8Imm);
10209 } IEM_MC_ENDIF();
10210 IEM_MC_END();
10211 return VINF_SUCCESS;
10212}
10213
10214/** Opcode 0x72. */
10215FNIEMOP_DEF(iemOp_jc_Jb)
10216{
10217 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10218 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10221
10222 IEM_MC_BEGIN(0, 0);
10223 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10224 IEM_MC_REL_JMP_S8(i8Imm);
10225 } IEM_MC_ELSE() {
10226 IEM_MC_ADVANCE_RIP();
10227 } IEM_MC_ENDIF();
10228 IEM_MC_END();
10229 return VINF_SUCCESS;
10230}
10231
10232
10233/** Opcode 0x73. */
10234FNIEMOP_DEF(iemOp_jnc_Jb)
10235{
10236 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10237 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10239 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10240
10241 IEM_MC_BEGIN(0, 0);
10242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10243 IEM_MC_ADVANCE_RIP();
10244 } IEM_MC_ELSE() {
10245 IEM_MC_REL_JMP_S8(i8Imm);
10246 } IEM_MC_ENDIF();
10247 IEM_MC_END();
10248 return VINF_SUCCESS;
10249}
10250
10251
10252/** Opcode 0x74. */
10253FNIEMOP_DEF(iemOp_je_Jb)
10254{
10255 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10256 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10258 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10259
10260 IEM_MC_BEGIN(0, 0);
10261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10262 IEM_MC_REL_JMP_S8(i8Imm);
10263 } IEM_MC_ELSE() {
10264 IEM_MC_ADVANCE_RIP();
10265 } IEM_MC_ENDIF();
10266 IEM_MC_END();
10267 return VINF_SUCCESS;
10268}
10269
10270
10271/** Opcode 0x75. */
10272FNIEMOP_DEF(iemOp_jne_Jb)
10273{
10274 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10275 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10277 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10278
10279 IEM_MC_BEGIN(0, 0);
10280 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10281 IEM_MC_ADVANCE_RIP();
10282 } IEM_MC_ELSE() {
10283 IEM_MC_REL_JMP_S8(i8Imm);
10284 } IEM_MC_ENDIF();
10285 IEM_MC_END();
10286 return VINF_SUCCESS;
10287}
10288
10289
10290/** Opcode 0x76. */
10291FNIEMOP_DEF(iemOp_jbe_Jb)
10292{
10293 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10294 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10296 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10297
10298 IEM_MC_BEGIN(0, 0);
10299 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10300 IEM_MC_REL_JMP_S8(i8Imm);
10301 } IEM_MC_ELSE() {
10302 IEM_MC_ADVANCE_RIP();
10303 } IEM_MC_ENDIF();
10304 IEM_MC_END();
10305 return VINF_SUCCESS;
10306}
10307
10308
10309/** Opcode 0x77. */
10310FNIEMOP_DEF(iemOp_jnbe_Jb)
10311{
10312 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10313 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10315 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10316
10317 IEM_MC_BEGIN(0, 0);
10318 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10319 IEM_MC_ADVANCE_RIP();
10320 } IEM_MC_ELSE() {
10321 IEM_MC_REL_JMP_S8(i8Imm);
10322 } IEM_MC_ENDIF();
10323 IEM_MC_END();
10324 return VINF_SUCCESS;
10325}
10326
10327
10328/** Opcode 0x78. */
10329FNIEMOP_DEF(iemOp_js_Jb)
10330{
10331 IEMOP_MNEMONIC(js_Jb, "js Jb");
10332 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10335
10336 IEM_MC_BEGIN(0, 0);
10337 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10338 IEM_MC_REL_JMP_S8(i8Imm);
10339 } IEM_MC_ELSE() {
10340 IEM_MC_ADVANCE_RIP();
10341 } IEM_MC_ENDIF();
10342 IEM_MC_END();
10343 return VINF_SUCCESS;
10344}
10345
10346
10347/** Opcode 0x79. */
10348FNIEMOP_DEF(iemOp_jns_Jb)
10349{
10350 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10351 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10353 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10354
10355 IEM_MC_BEGIN(0, 0);
10356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10357 IEM_MC_ADVANCE_RIP();
10358 } IEM_MC_ELSE() {
10359 IEM_MC_REL_JMP_S8(i8Imm);
10360 } IEM_MC_ENDIF();
10361 IEM_MC_END();
10362 return VINF_SUCCESS;
10363}
10364
10365
10366/** Opcode 0x7a. */
10367FNIEMOP_DEF(iemOp_jp_Jb)
10368{
10369 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10370 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10372 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10373
10374 IEM_MC_BEGIN(0, 0);
10375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10376 IEM_MC_REL_JMP_S8(i8Imm);
10377 } IEM_MC_ELSE() {
10378 IEM_MC_ADVANCE_RIP();
10379 } IEM_MC_ENDIF();
10380 IEM_MC_END();
10381 return VINF_SUCCESS;
10382}
10383
10384
10385/** Opcode 0x7b. */
10386FNIEMOP_DEF(iemOp_jnp_Jb)
10387{
10388 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10389 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10391 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10392
10393 IEM_MC_BEGIN(0, 0);
10394 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10395 IEM_MC_ADVANCE_RIP();
10396 } IEM_MC_ELSE() {
10397 IEM_MC_REL_JMP_S8(i8Imm);
10398 } IEM_MC_ENDIF();
10399 IEM_MC_END();
10400 return VINF_SUCCESS;
10401}
10402
10403
10404/** Opcode 0x7c. */
10405FNIEMOP_DEF(iemOp_jl_Jb)
10406{
10407 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10408 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10410 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10411
10412 IEM_MC_BEGIN(0, 0);
10413 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10414 IEM_MC_REL_JMP_S8(i8Imm);
10415 } IEM_MC_ELSE() {
10416 IEM_MC_ADVANCE_RIP();
10417 } IEM_MC_ENDIF();
10418 IEM_MC_END();
10419 return VINF_SUCCESS;
10420}
10421
10422
10423/** Opcode 0x7d. */
10424FNIEMOP_DEF(iemOp_jnl_Jb)
10425{
10426 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10427 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10429 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10430
10431 IEM_MC_BEGIN(0, 0);
10432 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10433 IEM_MC_ADVANCE_RIP();
10434 } IEM_MC_ELSE() {
10435 IEM_MC_REL_JMP_S8(i8Imm);
10436 } IEM_MC_ENDIF();
10437 IEM_MC_END();
10438 return VINF_SUCCESS;
10439}
10440
10441
10442/** Opcode 0x7e. */
10443FNIEMOP_DEF(iemOp_jle_Jb)
10444{
10445 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10446 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10448 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10449
10450 IEM_MC_BEGIN(0, 0);
10451 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10452 IEM_MC_REL_JMP_S8(i8Imm);
10453 } IEM_MC_ELSE() {
10454 IEM_MC_ADVANCE_RIP();
10455 } IEM_MC_ENDIF();
10456 IEM_MC_END();
10457 return VINF_SUCCESS;
10458}
10459
10460
10461/** Opcode 0x7f. */
10462FNIEMOP_DEF(iemOp_jnle_Jb)
10463{
10464 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10465 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10467 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10468
10469 IEM_MC_BEGIN(0, 0);
10470 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10471 IEM_MC_ADVANCE_RIP();
10472 } IEM_MC_ELSE() {
10473 IEM_MC_REL_JMP_S8(i8Imm);
10474 } IEM_MC_ENDIF();
10475 IEM_MC_END();
10476 return VINF_SUCCESS;
10477}
10478
10479
10480/** Opcode 0x80. */
10481FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10482{
10483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10484 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10485 {
10486 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10487 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10488 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10489 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10490 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10491 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10492 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10493 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10494 }
10495 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10496
10497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10498 {
10499 /* register target */
10500 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10502 IEM_MC_BEGIN(3, 0);
10503 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10504 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10505 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10506
10507 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10508 IEM_MC_REF_EFLAGS(pEFlags);
10509 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10510
10511 IEM_MC_ADVANCE_RIP();
10512 IEM_MC_END();
10513 }
10514 else
10515 {
10516 /* memory target */
10517 uint32_t fAccess;
10518 if (pImpl->pfnLockedU8)
10519 fAccess = IEM_ACCESS_DATA_RW;
10520 else /* CMP */
10521 fAccess = IEM_ACCESS_DATA_R;
10522 IEM_MC_BEGIN(3, 2);
10523 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10524 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10526
10527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10528 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10529 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10530 if (pImpl->pfnLockedU8)
10531 IEMOP_HLP_DONE_DECODING();
10532 else
10533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10534
10535 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10536 IEM_MC_FETCH_EFLAGS(EFlags);
10537 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10538 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10539 else
10540 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10541
10542 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10543 IEM_MC_COMMIT_EFLAGS(EFlags);
10544 IEM_MC_ADVANCE_RIP();
10545 IEM_MC_END();
10546 }
10547 return VINF_SUCCESS;
10548}
10549
10550
10551/** Opcode 0x81. */
10552FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10553{
10554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10555 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10556 {
10557 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10558 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10559 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10560 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10561 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10562 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10563 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10564 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10565 }
10566 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10567
10568 switch (pVCpu->iem.s.enmEffOpSize)
10569 {
10570 case IEMMODE_16BIT:
10571 {
10572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10573 {
10574 /* register target */
10575 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10577 IEM_MC_BEGIN(3, 0);
10578 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10579 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10580 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10581
10582 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10583 IEM_MC_REF_EFLAGS(pEFlags);
10584 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10585
10586 IEM_MC_ADVANCE_RIP();
10587 IEM_MC_END();
10588 }
10589 else
10590 {
10591 /* memory target */
10592 uint32_t fAccess;
10593 if (pImpl->pfnLockedU16)
10594 fAccess = IEM_ACCESS_DATA_RW;
10595 else /* CMP, TEST */
10596 fAccess = IEM_ACCESS_DATA_R;
10597 IEM_MC_BEGIN(3, 2);
10598 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10599 IEM_MC_ARG(uint16_t, u16Src, 1);
10600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10602
10603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10604 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10605 IEM_MC_ASSIGN(u16Src, u16Imm);
10606 if (pImpl->pfnLockedU16)
10607 IEMOP_HLP_DONE_DECODING();
10608 else
10609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10610 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10611 IEM_MC_FETCH_EFLAGS(EFlags);
10612 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10613 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10614 else
10615 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10616
10617 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10618 IEM_MC_COMMIT_EFLAGS(EFlags);
10619 IEM_MC_ADVANCE_RIP();
10620 IEM_MC_END();
10621 }
10622 break;
10623 }
10624
10625 case IEMMODE_32BIT:
10626 {
10627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10628 {
10629 /* register target */
10630 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10632 IEM_MC_BEGIN(3, 0);
10633 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10634 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10635 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10636
10637 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10638 IEM_MC_REF_EFLAGS(pEFlags);
10639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10640 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10641
10642 IEM_MC_ADVANCE_RIP();
10643 IEM_MC_END();
10644 }
10645 else
10646 {
10647 /* memory target */
10648 uint32_t fAccess;
10649 if (pImpl->pfnLockedU32)
10650 fAccess = IEM_ACCESS_DATA_RW;
10651 else /* CMP, TEST */
10652 fAccess = IEM_ACCESS_DATA_R;
10653 IEM_MC_BEGIN(3, 2);
10654 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10655 IEM_MC_ARG(uint32_t, u32Src, 1);
10656 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10658
10659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10660 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10661 IEM_MC_ASSIGN(u32Src, u32Imm);
10662 if (pImpl->pfnLockedU32)
10663 IEMOP_HLP_DONE_DECODING();
10664 else
10665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10666 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10667 IEM_MC_FETCH_EFLAGS(EFlags);
10668 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10669 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10670 else
10671 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10672
10673 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10674 IEM_MC_COMMIT_EFLAGS(EFlags);
10675 IEM_MC_ADVANCE_RIP();
10676 IEM_MC_END();
10677 }
10678 break;
10679 }
10680
10681 case IEMMODE_64BIT:
10682 {
10683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10684 {
10685 /* register target */
10686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10688 IEM_MC_BEGIN(3, 0);
10689 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10690 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10691 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10692
10693 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10694 IEM_MC_REF_EFLAGS(pEFlags);
10695 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10696
10697 IEM_MC_ADVANCE_RIP();
10698 IEM_MC_END();
10699 }
10700 else
10701 {
10702 /* memory target */
10703 uint32_t fAccess;
10704 if (pImpl->pfnLockedU64)
10705 fAccess = IEM_ACCESS_DATA_RW;
10706 else /* CMP */
10707 fAccess = IEM_ACCESS_DATA_R;
10708 IEM_MC_BEGIN(3, 2);
10709 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10710 IEM_MC_ARG(uint64_t, u64Src, 1);
10711 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10713
10714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10715 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10716 if (pImpl->pfnLockedU64)
10717 IEMOP_HLP_DONE_DECODING();
10718 else
10719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10720 IEM_MC_ASSIGN(u64Src, u64Imm);
10721 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10722 IEM_MC_FETCH_EFLAGS(EFlags);
10723 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10724 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10725 else
10726 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10727
10728 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10729 IEM_MC_COMMIT_EFLAGS(EFlags);
10730 IEM_MC_ADVANCE_RIP();
10731 IEM_MC_END();
10732 }
10733 break;
10734 }
10735 }
10736 return VINF_SUCCESS;
10737}
10738
10739
10740/** Opcode 0x82. */
10741FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10742{
10743 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10744 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10745}
10746
10747
10748/** Opcode 0x83. */
10749FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10750{
10751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10752 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10753 {
10754 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10755 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10756 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10757 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10758 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10759 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10760 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10761 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10762 }
10763 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10764 to the 386 even if absent in the intel reference manuals and some
10765 3rd party opcode listings. */
10766 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10767
10768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10769 {
10770 /*
10771 * Register target
10772 */
10773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10775 switch (pVCpu->iem.s.enmEffOpSize)
10776 {
10777 case IEMMODE_16BIT:
10778 {
10779 IEM_MC_BEGIN(3, 0);
10780 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10781 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10783
10784 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10785 IEM_MC_REF_EFLAGS(pEFlags);
10786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10787
10788 IEM_MC_ADVANCE_RIP();
10789 IEM_MC_END();
10790 break;
10791 }
10792
10793 case IEMMODE_32BIT:
10794 {
10795 IEM_MC_BEGIN(3, 0);
10796 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10797 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10798 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10799
10800 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10801 IEM_MC_REF_EFLAGS(pEFlags);
10802 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10803 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10804
10805 IEM_MC_ADVANCE_RIP();
10806 IEM_MC_END();
10807 break;
10808 }
10809
10810 case IEMMODE_64BIT:
10811 {
10812 IEM_MC_BEGIN(3, 0);
10813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10814 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10816
10817 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10818 IEM_MC_REF_EFLAGS(pEFlags);
10819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10820
10821 IEM_MC_ADVANCE_RIP();
10822 IEM_MC_END();
10823 break;
10824 }
10825 }
10826 }
10827 else
10828 {
10829 /*
10830 * Memory target.
10831 */
10832 uint32_t fAccess;
10833 if (pImpl->pfnLockedU16)
10834 fAccess = IEM_ACCESS_DATA_RW;
10835 else /* CMP */
10836 fAccess = IEM_ACCESS_DATA_R;
10837
10838 switch (pVCpu->iem.s.enmEffOpSize)
10839 {
10840 case IEMMODE_16BIT:
10841 {
10842 IEM_MC_BEGIN(3, 2);
10843 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10844 IEM_MC_ARG(uint16_t, u16Src, 1);
10845 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10847
10848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10849 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10850 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10851 if (pImpl->pfnLockedU16)
10852 IEMOP_HLP_DONE_DECODING();
10853 else
10854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10855 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10856 IEM_MC_FETCH_EFLAGS(EFlags);
10857 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10859 else
10860 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10861
10862 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10863 IEM_MC_COMMIT_EFLAGS(EFlags);
10864 IEM_MC_ADVANCE_RIP();
10865 IEM_MC_END();
10866 break;
10867 }
10868
10869 case IEMMODE_32BIT:
10870 {
10871 IEM_MC_BEGIN(3, 2);
10872 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10873 IEM_MC_ARG(uint32_t, u32Src, 1);
10874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10876
10877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10878 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10879 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10880 if (pImpl->pfnLockedU32)
10881 IEMOP_HLP_DONE_DECODING();
10882 else
10883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10884 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10885 IEM_MC_FETCH_EFLAGS(EFlags);
10886 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10888 else
10889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10890
10891 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10892 IEM_MC_COMMIT_EFLAGS(EFlags);
10893 IEM_MC_ADVANCE_RIP();
10894 IEM_MC_END();
10895 break;
10896 }
10897
10898 case IEMMODE_64BIT:
10899 {
10900 IEM_MC_BEGIN(3, 2);
10901 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10902 IEM_MC_ARG(uint64_t, u64Src, 1);
10903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10905
10906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10907 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10908 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10909 if (pImpl->pfnLockedU64)
10910 IEMOP_HLP_DONE_DECODING();
10911 else
10912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10913 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10914 IEM_MC_FETCH_EFLAGS(EFlags);
10915 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10916 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10917 else
10918 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10919
10920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10921 IEM_MC_COMMIT_EFLAGS(EFlags);
10922 IEM_MC_ADVANCE_RIP();
10923 IEM_MC_END();
10924 break;
10925 }
10926 }
10927 }
10928 return VINF_SUCCESS;
10929}
10930
10931
10932/** Opcode 0x84. */
10933FNIEMOP_DEF(iemOp_test_Eb_Gb)
10934{
10935 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
10936 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10937 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10938}
10939
10940
10941/** Opcode 0x85. */
10942FNIEMOP_DEF(iemOp_test_Ev_Gv)
10943{
10944 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
10945 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10946 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10947}
10948
10949
10950/** Opcode 0x86. */
10951FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10952{
10953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10954 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
10955
10956 /*
10957 * If rm is denoting a register, no more instruction bytes.
10958 */
10959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10960 {
10961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10962
10963 IEM_MC_BEGIN(0, 2);
10964 IEM_MC_LOCAL(uint8_t, uTmp1);
10965 IEM_MC_LOCAL(uint8_t, uTmp2);
10966
10967 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10968 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10969 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10970 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10971
10972 IEM_MC_ADVANCE_RIP();
10973 IEM_MC_END();
10974 }
10975 else
10976 {
10977 /*
10978 * We're accessing memory.
10979 */
10980/** @todo the register must be committed separately! */
10981 IEM_MC_BEGIN(2, 2);
10982 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10983 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10985
10986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10987 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10988 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10989 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10990 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10991
10992 IEM_MC_ADVANCE_RIP();
10993 IEM_MC_END();
10994 }
10995 return VINF_SUCCESS;
10996}
10997
10998
10999/** Opcode 0x87. */
11000FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11001{
11002 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11004
11005 /*
11006 * If rm is denoting a register, no more instruction bytes.
11007 */
11008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11009 {
11010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11011
11012 switch (pVCpu->iem.s.enmEffOpSize)
11013 {
11014 case IEMMODE_16BIT:
11015 IEM_MC_BEGIN(0, 2);
11016 IEM_MC_LOCAL(uint16_t, uTmp1);
11017 IEM_MC_LOCAL(uint16_t, uTmp2);
11018
11019 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11020 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11021 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11022 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11023
11024 IEM_MC_ADVANCE_RIP();
11025 IEM_MC_END();
11026 return VINF_SUCCESS;
11027
11028 case IEMMODE_32BIT:
11029 IEM_MC_BEGIN(0, 2);
11030 IEM_MC_LOCAL(uint32_t, uTmp1);
11031 IEM_MC_LOCAL(uint32_t, uTmp2);
11032
11033 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11034 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11035 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11036 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11037
11038 IEM_MC_ADVANCE_RIP();
11039 IEM_MC_END();
11040 return VINF_SUCCESS;
11041
11042 case IEMMODE_64BIT:
11043 IEM_MC_BEGIN(0, 2);
11044 IEM_MC_LOCAL(uint64_t, uTmp1);
11045 IEM_MC_LOCAL(uint64_t, uTmp2);
11046
11047 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11048 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11049 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11050 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11051
11052 IEM_MC_ADVANCE_RIP();
11053 IEM_MC_END();
11054 return VINF_SUCCESS;
11055
11056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11057 }
11058 }
11059 else
11060 {
11061 /*
11062 * We're accessing memory.
11063 */
11064 switch (pVCpu->iem.s.enmEffOpSize)
11065 {
11066/** @todo the register must be committed separately! */
11067 case IEMMODE_16BIT:
11068 IEM_MC_BEGIN(2, 2);
11069 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11070 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11072
11073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11074 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11075 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11076 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11077 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11078
11079 IEM_MC_ADVANCE_RIP();
11080 IEM_MC_END();
11081 return VINF_SUCCESS;
11082
11083 case IEMMODE_32BIT:
11084 IEM_MC_BEGIN(2, 2);
11085 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11086 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11088
11089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11090 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11091 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11092 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11093 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11094
11095 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11096 IEM_MC_ADVANCE_RIP();
11097 IEM_MC_END();
11098 return VINF_SUCCESS;
11099
11100 case IEMMODE_64BIT:
11101 IEM_MC_BEGIN(2, 2);
11102 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11103 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11105
11106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11107 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11108 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11109 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11111
11112 IEM_MC_ADVANCE_RIP();
11113 IEM_MC_END();
11114 return VINF_SUCCESS;
11115
11116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11117 }
11118 }
11119}
11120
11121
11122/** Opcode 0x88. */
11123FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11124{
11125 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11126
11127 uint8_t bRm;
11128 IEM_OPCODE_GET_NEXT_U8(&bRm);
11129
11130 /*
11131 * If rm is denoting a register, no more instruction bytes.
11132 */
11133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11134 {
11135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11136 IEM_MC_BEGIN(0, 1);
11137 IEM_MC_LOCAL(uint8_t, u8Value);
11138 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11139 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11140 IEM_MC_ADVANCE_RIP();
11141 IEM_MC_END();
11142 }
11143 else
11144 {
11145 /*
11146 * We're writing a register to memory.
11147 */
11148 IEM_MC_BEGIN(0, 2);
11149 IEM_MC_LOCAL(uint8_t, u8Value);
11150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11153 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11154 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11155 IEM_MC_ADVANCE_RIP();
11156 IEM_MC_END();
11157 }
11158 return VINF_SUCCESS;
11159
11160}
11161
11162
11163/** Opcode 0x89. */
11164FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11165{
11166 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11167
11168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11169
11170 /*
11171 * If rm is denoting a register, no more instruction bytes.
11172 */
11173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11174 {
11175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11176 switch (pVCpu->iem.s.enmEffOpSize)
11177 {
11178 case IEMMODE_16BIT:
11179 IEM_MC_BEGIN(0, 1);
11180 IEM_MC_LOCAL(uint16_t, u16Value);
11181 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11182 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11183 IEM_MC_ADVANCE_RIP();
11184 IEM_MC_END();
11185 break;
11186
11187 case IEMMODE_32BIT:
11188 IEM_MC_BEGIN(0, 1);
11189 IEM_MC_LOCAL(uint32_t, u32Value);
11190 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11191 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11192 IEM_MC_ADVANCE_RIP();
11193 IEM_MC_END();
11194 break;
11195
11196 case IEMMODE_64BIT:
11197 IEM_MC_BEGIN(0, 1);
11198 IEM_MC_LOCAL(uint64_t, u64Value);
11199 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11200 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11201 IEM_MC_ADVANCE_RIP();
11202 IEM_MC_END();
11203 break;
11204 }
11205 }
11206 else
11207 {
11208 /*
11209 * We're writing a register to memory.
11210 */
11211 switch (pVCpu->iem.s.enmEffOpSize)
11212 {
11213 case IEMMODE_16BIT:
11214 IEM_MC_BEGIN(0, 2);
11215 IEM_MC_LOCAL(uint16_t, u16Value);
11216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11219 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11220 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11221 IEM_MC_ADVANCE_RIP();
11222 IEM_MC_END();
11223 break;
11224
11225 case IEMMODE_32BIT:
11226 IEM_MC_BEGIN(0, 2);
11227 IEM_MC_LOCAL(uint32_t, u32Value);
11228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11231 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11232 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11233 IEM_MC_ADVANCE_RIP();
11234 IEM_MC_END();
11235 break;
11236
11237 case IEMMODE_64BIT:
11238 IEM_MC_BEGIN(0, 2);
11239 IEM_MC_LOCAL(uint64_t, u64Value);
11240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11243 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11244 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11245 IEM_MC_ADVANCE_RIP();
11246 IEM_MC_END();
11247 break;
11248 }
11249 }
11250 return VINF_SUCCESS;
11251}
11252
11253
11254/** Opcode 0x8a. */
11255FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11256{
11257 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11258
11259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11260
11261 /*
11262 * If rm is denoting a register, no more instruction bytes.
11263 */
11264 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11265 {
11266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11267 IEM_MC_BEGIN(0, 1);
11268 IEM_MC_LOCAL(uint8_t, u8Value);
11269 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11270 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11271 IEM_MC_ADVANCE_RIP();
11272 IEM_MC_END();
11273 }
11274 else
11275 {
11276 /*
11277 * We're loading a register from memory.
11278 */
11279 IEM_MC_BEGIN(0, 2);
11280 IEM_MC_LOCAL(uint8_t, u8Value);
11281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11284 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11285 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11286 IEM_MC_ADVANCE_RIP();
11287 IEM_MC_END();
11288 }
11289 return VINF_SUCCESS;
11290}
11291
11292
11293/** Opcode 0x8b. */
11294FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11295{
11296 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11297
11298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11299
11300 /*
11301 * If rm is denoting a register, no more instruction bytes.
11302 */
11303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11304 {
11305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11306 switch (pVCpu->iem.s.enmEffOpSize)
11307 {
11308 case IEMMODE_16BIT:
11309 IEM_MC_BEGIN(0, 1);
11310 IEM_MC_LOCAL(uint16_t, u16Value);
11311 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11312 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11313 IEM_MC_ADVANCE_RIP();
11314 IEM_MC_END();
11315 break;
11316
11317 case IEMMODE_32BIT:
11318 IEM_MC_BEGIN(0, 1);
11319 IEM_MC_LOCAL(uint32_t, u32Value);
11320 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11321 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11322 IEM_MC_ADVANCE_RIP();
11323 IEM_MC_END();
11324 break;
11325
11326 case IEMMODE_64BIT:
11327 IEM_MC_BEGIN(0, 1);
11328 IEM_MC_LOCAL(uint64_t, u64Value);
11329 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11330 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11331 IEM_MC_ADVANCE_RIP();
11332 IEM_MC_END();
11333 break;
11334 }
11335 }
11336 else
11337 {
11338 /*
11339 * We're loading a register from memory.
11340 */
11341 switch (pVCpu->iem.s.enmEffOpSize)
11342 {
11343 case IEMMODE_16BIT:
11344 IEM_MC_BEGIN(0, 2);
11345 IEM_MC_LOCAL(uint16_t, u16Value);
11346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11349 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11350 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11351 IEM_MC_ADVANCE_RIP();
11352 IEM_MC_END();
11353 break;
11354
11355 case IEMMODE_32BIT:
11356 IEM_MC_BEGIN(0, 2);
11357 IEM_MC_LOCAL(uint32_t, u32Value);
11358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11361 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11362 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11363 IEM_MC_ADVANCE_RIP();
11364 IEM_MC_END();
11365 break;
11366
11367 case IEMMODE_64BIT:
11368 IEM_MC_BEGIN(0, 2);
11369 IEM_MC_LOCAL(uint64_t, u64Value);
11370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11373 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11374 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11375 IEM_MC_ADVANCE_RIP();
11376 IEM_MC_END();
11377 break;
11378 }
11379 }
11380 return VINF_SUCCESS;
11381}
11382
11383
11384/** Opcode 0x63. */
11385FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11386{
11387 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11388 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11389 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11390 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11391 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11392}
11393
11394
11395/** Opcode 0x8c. */
11396FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11397{
11398 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11399
11400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11401
11402 /*
11403 * Check that the destination register exists. The REX.R prefix is ignored.
11404 */
11405 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11406 if ( iSegReg > X86_SREG_GS)
11407 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11408
11409 /*
11410 * If rm is denoting a register, no more instruction bytes.
11411 * In that case, the operand size is respected and the upper bits are
11412 * cleared (starting with some pentium).
11413 */
11414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11415 {
11416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11417 switch (pVCpu->iem.s.enmEffOpSize)
11418 {
11419 case IEMMODE_16BIT:
11420 IEM_MC_BEGIN(0, 1);
11421 IEM_MC_LOCAL(uint16_t, u16Value);
11422 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11423 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11424 IEM_MC_ADVANCE_RIP();
11425 IEM_MC_END();
11426 break;
11427
11428 case IEMMODE_32BIT:
11429 IEM_MC_BEGIN(0, 1);
11430 IEM_MC_LOCAL(uint32_t, u32Value);
11431 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11432 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11433 IEM_MC_ADVANCE_RIP();
11434 IEM_MC_END();
11435 break;
11436
11437 case IEMMODE_64BIT:
11438 IEM_MC_BEGIN(0, 1);
11439 IEM_MC_LOCAL(uint64_t, u64Value);
11440 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11441 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11442 IEM_MC_ADVANCE_RIP();
11443 IEM_MC_END();
11444 break;
11445 }
11446 }
11447 else
11448 {
11449 /*
11450 * We're saving the register to memory. The access is word sized
11451 * regardless of operand size prefixes.
11452 */
11453#if 0 /* not necessary */
11454 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11455#endif
11456 IEM_MC_BEGIN(0, 2);
11457 IEM_MC_LOCAL(uint16_t, u16Value);
11458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11461 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11462 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11463 IEM_MC_ADVANCE_RIP();
11464 IEM_MC_END();
11465 }
11466 return VINF_SUCCESS;
11467}
11468
11469
11470
11471
11472/** Opcode 0x8d. */
11473FNIEMOP_DEF(iemOp_lea_Gv_M)
11474{
11475 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11478 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11479
11480 switch (pVCpu->iem.s.enmEffOpSize)
11481 {
11482 case IEMMODE_16BIT:
11483 IEM_MC_BEGIN(0, 2);
11484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11485 IEM_MC_LOCAL(uint16_t, u16Cast);
11486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11488 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11489 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11490 IEM_MC_ADVANCE_RIP();
11491 IEM_MC_END();
11492 return VINF_SUCCESS;
11493
11494 case IEMMODE_32BIT:
11495 IEM_MC_BEGIN(0, 2);
11496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11497 IEM_MC_LOCAL(uint32_t, u32Cast);
11498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11500 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11501 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11502 IEM_MC_ADVANCE_RIP();
11503 IEM_MC_END();
11504 return VINF_SUCCESS;
11505
11506 case IEMMODE_64BIT:
11507 IEM_MC_BEGIN(0, 1);
11508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11511 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11512 IEM_MC_ADVANCE_RIP();
11513 IEM_MC_END();
11514 return VINF_SUCCESS;
11515 }
11516 AssertFailedReturn(VERR_IEM_IPE_7);
11517}
11518
11519
11520/** Opcode 0x8e. */
11521FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11522{
11523 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11524
11525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11526
11527 /*
11528 * The practical operand size is 16-bit.
11529 */
11530#if 0 /* not necessary */
11531 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11532#endif
11533
11534 /*
11535 * Check that the destination register exists and can be used with this
11536 * instruction. The REX.R prefix is ignored.
11537 */
11538 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11539 if ( iSegReg == X86_SREG_CS
11540 || iSegReg > X86_SREG_GS)
11541 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11542
11543 /*
11544 * If rm is denoting a register, no more instruction bytes.
11545 */
11546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11547 {
11548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11549 IEM_MC_BEGIN(2, 0);
11550 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11551 IEM_MC_ARG(uint16_t, u16Value, 1);
11552 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11553 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11554 IEM_MC_END();
11555 }
11556 else
11557 {
11558 /*
11559 * We're loading the register from memory. The access is word sized
11560 * regardless of operand size prefixes.
11561 */
11562 IEM_MC_BEGIN(2, 1);
11563 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11564 IEM_MC_ARG(uint16_t, u16Value, 1);
11565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11568 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11569 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11570 IEM_MC_END();
11571 }
11572 return VINF_SUCCESS;
11573}
11574
11575
11576/** Opcode 0x8f /0. */
11577FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11578{
11579 /* This bugger is rather annoying as it requires rSP to be updated before
11580 doing the effective address calculations. Will eventually require a
11581 split between the R/M+SIB decoding and the effective address
11582 calculation - which is something that is required for any attempt at
11583 reusing this code for a recompiler. It may also be good to have if we
11584 need to delay #UD exception caused by invalid lock prefixes.
11585
11586 For now, we'll do a mostly safe interpreter-only implementation here. */
11587 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11588 * now until tests show it's checked.. */
11589 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11590
11591 /* Register access is relatively easy and can share code. */
11592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11593 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11594
11595 /*
11596 * Memory target.
11597 *
11598 * Intel says that RSP is incremented before it's used in any effective
11599 * address calcuations. This means some serious extra annoyance here since
11600 * we decode and calculate the effective address in one step and like to
11601 * delay committing registers till everything is done.
11602 *
11603 * So, we'll decode and calculate the effective address twice. This will
11604 * require some recoding if turned into a recompiler.
11605 */
11606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11607
11608#ifndef TST_IEM_CHECK_MC
11609 /* Calc effective address with modified ESP. */
11610/** @todo testcase */
11611 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11612 RTGCPTR GCPtrEff;
11613 VBOXSTRICTRC rcStrict;
11614 switch (pVCpu->iem.s.enmEffOpSize)
11615 {
11616 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11617 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11618 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11620 }
11621 if (rcStrict != VINF_SUCCESS)
11622 return rcStrict;
11623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11624
11625 /* Perform the operation - this should be CImpl. */
11626 RTUINT64U TmpRsp;
11627 TmpRsp.u = pCtx->rsp;
11628 switch (pVCpu->iem.s.enmEffOpSize)
11629 {
11630 case IEMMODE_16BIT:
11631 {
11632 uint16_t u16Value;
11633 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11634 if (rcStrict == VINF_SUCCESS)
11635 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11636 break;
11637 }
11638
11639 case IEMMODE_32BIT:
11640 {
11641 uint32_t u32Value;
11642 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11643 if (rcStrict == VINF_SUCCESS)
11644 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11645 break;
11646 }
11647
11648 case IEMMODE_64BIT:
11649 {
11650 uint64_t u64Value;
11651 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11652 if (rcStrict == VINF_SUCCESS)
11653 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11654 break;
11655 }
11656
11657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11658 }
11659 if (rcStrict == VINF_SUCCESS)
11660 {
11661 pCtx->rsp = TmpRsp.u;
11662 iemRegUpdateRipAndClearRF(pVCpu);
11663 }
11664 return rcStrict;
11665
11666#else
11667 return VERR_IEM_IPE_2;
11668#endif
11669}
11670
11671
11672/** Opcode 0x8f. */
11673FNIEMOP_DEF(iemOp_Grp1A)
11674{
11675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11676 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11677 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11678
11679 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11680 /** @todo XOP decoding. */
11681 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11682 return IEMOP_RAISE_INVALID_OPCODE();
11683}
11684
11685
11686/**
11687 * Common 'xchg reg,rAX' helper.
11688 */
11689FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11690{
11691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11692
11693 iReg |= pVCpu->iem.s.uRexB;
11694 switch (pVCpu->iem.s.enmEffOpSize)
11695 {
11696 case IEMMODE_16BIT:
11697 IEM_MC_BEGIN(0, 2);
11698 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11699 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11700 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11701 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11702 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11703 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11704 IEM_MC_ADVANCE_RIP();
11705 IEM_MC_END();
11706 return VINF_SUCCESS;
11707
11708 case IEMMODE_32BIT:
11709 IEM_MC_BEGIN(0, 2);
11710 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11711 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11712 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11713 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11714 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11715 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11716 IEM_MC_ADVANCE_RIP();
11717 IEM_MC_END();
11718 return VINF_SUCCESS;
11719
11720 case IEMMODE_64BIT:
11721 IEM_MC_BEGIN(0, 2);
11722 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11723 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11724 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11725 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11726 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11727 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11728 IEM_MC_ADVANCE_RIP();
11729 IEM_MC_END();
11730 return VINF_SUCCESS;
11731
11732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11733 }
11734}
11735
11736
11737/** Opcode 0x90. */
11738FNIEMOP_DEF(iemOp_nop)
11739{
11740 /* R8/R8D and RAX/EAX can be exchanged. */
11741 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11742 {
11743 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11744 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11745 }
11746
11747 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11748 IEMOP_MNEMONIC(pause, "pause");
11749 else
11750 IEMOP_MNEMONIC(nop, "nop");
11751 IEM_MC_BEGIN(0, 0);
11752 IEM_MC_ADVANCE_RIP();
11753 IEM_MC_END();
11754 return VINF_SUCCESS;
11755}
11756
11757
11758/** Opcode 0x91. */
11759FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11760{
11761 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11762 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11763}
11764
11765
11766/** Opcode 0x92. */
11767FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11768{
11769 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11770 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11771}
11772
11773
11774/** Opcode 0x93. */
11775FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11776{
11777 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11778 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11779}
11780
11781
11782/** Opcode 0x94. */
11783FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11784{
11785 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11786 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11787}
11788
11789
11790/** Opcode 0x95. */
11791FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11792{
11793 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11794 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11795}
11796
11797
11798/** Opcode 0x96. */
11799FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11800{
11801 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11802 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11803}
11804
11805
11806/** Opcode 0x97. */
11807FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11808{
11809 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11810 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11811}
11812
11813
11814/** Opcode 0x98. */
11815FNIEMOP_DEF(iemOp_cbw)
11816{
11817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11818 switch (pVCpu->iem.s.enmEffOpSize)
11819 {
11820 case IEMMODE_16BIT:
11821 IEMOP_MNEMONIC(cbw, "cbw");
11822 IEM_MC_BEGIN(0, 1);
11823 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11824 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11825 } IEM_MC_ELSE() {
11826 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11827 } IEM_MC_ENDIF();
11828 IEM_MC_ADVANCE_RIP();
11829 IEM_MC_END();
11830 return VINF_SUCCESS;
11831
11832 case IEMMODE_32BIT:
11833 IEMOP_MNEMONIC(cwde, "cwde");
11834 IEM_MC_BEGIN(0, 1);
11835 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11836 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11837 } IEM_MC_ELSE() {
11838 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11839 } IEM_MC_ENDIF();
11840 IEM_MC_ADVANCE_RIP();
11841 IEM_MC_END();
11842 return VINF_SUCCESS;
11843
11844 case IEMMODE_64BIT:
11845 IEMOP_MNEMONIC(cdqe, "cdqe");
11846 IEM_MC_BEGIN(0, 1);
11847 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11848 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11849 } IEM_MC_ELSE() {
11850 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11851 } IEM_MC_ENDIF();
11852 IEM_MC_ADVANCE_RIP();
11853 IEM_MC_END();
11854 return VINF_SUCCESS;
11855
11856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11857 }
11858}
11859
11860
11861/** Opcode 0x99. */
11862FNIEMOP_DEF(iemOp_cwd)
11863{
11864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11865 switch (pVCpu->iem.s.enmEffOpSize)
11866 {
11867 case IEMMODE_16BIT:
11868 IEMOP_MNEMONIC(cwd, "cwd");
11869 IEM_MC_BEGIN(0, 1);
11870 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11871 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11872 } IEM_MC_ELSE() {
11873 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11874 } IEM_MC_ENDIF();
11875 IEM_MC_ADVANCE_RIP();
11876 IEM_MC_END();
11877 return VINF_SUCCESS;
11878
11879 case IEMMODE_32BIT:
11880 IEMOP_MNEMONIC(cdq, "cdq");
11881 IEM_MC_BEGIN(0, 1);
11882 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11883 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11884 } IEM_MC_ELSE() {
11885 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11886 } IEM_MC_ENDIF();
11887 IEM_MC_ADVANCE_RIP();
11888 IEM_MC_END();
11889 return VINF_SUCCESS;
11890
11891 case IEMMODE_64BIT:
11892 IEMOP_MNEMONIC(cqo, "cqo");
11893 IEM_MC_BEGIN(0, 1);
11894 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11895 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11896 } IEM_MC_ELSE() {
11897 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11898 } IEM_MC_ENDIF();
11899 IEM_MC_ADVANCE_RIP();
11900 IEM_MC_END();
11901 return VINF_SUCCESS;
11902
11903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11904 }
11905}
11906
11907
11908/** Opcode 0x9a. */
11909FNIEMOP_DEF(iemOp_call_Ap)
11910{
11911 IEMOP_MNEMONIC(call_Ap, "call Ap");
11912 IEMOP_HLP_NO_64BIT();
11913
11914 /* Decode the far pointer address and pass it on to the far call C implementation. */
11915 uint32_t offSeg;
11916 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11917 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11918 else
11919 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11920 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11922 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11923}
11924
11925
11926/** Opcode 0x9b. (aka fwait) */
11927FNIEMOP_DEF(iemOp_wait)
11928{
11929 IEMOP_MNEMONIC(wait, "wait");
11930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11931
11932 IEM_MC_BEGIN(0, 0);
11933 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
11934 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11935 IEM_MC_ADVANCE_RIP();
11936 IEM_MC_END();
11937 return VINF_SUCCESS;
11938}
11939
11940
11941/** Opcode 0x9c. */
11942FNIEMOP_DEF(iemOp_pushf_Fv)
11943{
11944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11945 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11946 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11947}
11948
11949
11950/** Opcode 0x9d. */
11951FNIEMOP_DEF(iemOp_popf_Fv)
11952{
11953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11954 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11955 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11956}
11957
11958
11959/** Opcode 0x9e. */
11960FNIEMOP_DEF(iemOp_sahf)
11961{
11962 IEMOP_MNEMONIC(sahf, "sahf");
11963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11964 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11965 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11966 return IEMOP_RAISE_INVALID_OPCODE();
11967 IEM_MC_BEGIN(0, 2);
11968 IEM_MC_LOCAL(uint32_t, u32Flags);
11969 IEM_MC_LOCAL(uint32_t, EFlags);
11970 IEM_MC_FETCH_EFLAGS(EFlags);
11971 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11972 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11973 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11974 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11975 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11976 IEM_MC_COMMIT_EFLAGS(EFlags);
11977 IEM_MC_ADVANCE_RIP();
11978 IEM_MC_END();
11979 return VINF_SUCCESS;
11980}
11981
11982
11983/** Opcode 0x9f. */
11984FNIEMOP_DEF(iemOp_lahf)
11985{
11986 IEMOP_MNEMONIC(lahf, "lahf");
11987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11988 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11989 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11990 return IEMOP_RAISE_INVALID_OPCODE();
11991 IEM_MC_BEGIN(0, 1);
11992 IEM_MC_LOCAL(uint8_t, u8Flags);
11993 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11994 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11995 IEM_MC_ADVANCE_RIP();
11996 IEM_MC_END();
11997 return VINF_SUCCESS;
11998}
11999
12000
12001/**
12002 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12003 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12004 * prefixes. Will return on failures.
12005 * @param a_GCPtrMemOff The variable to store the offset in.
12006 */
12007#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12008 do \
12009 { \
12010 switch (pVCpu->iem.s.enmEffAddrMode) \
12011 { \
12012 case IEMMODE_16BIT: \
12013 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12014 break; \
12015 case IEMMODE_32BIT: \
12016 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12017 break; \
12018 case IEMMODE_64BIT: \
12019 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12020 break; \
12021 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12022 } \
12023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12024 } while (0)
12025
12026/** Opcode 0xa0. */
12027FNIEMOP_DEF(iemOp_mov_Al_Ob)
12028{
12029 /*
12030 * Get the offset and fend of lock prefixes.
12031 */
12032 RTGCPTR GCPtrMemOff;
12033 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12034
12035 /*
12036 * Fetch AL.
12037 */
12038 IEM_MC_BEGIN(0,1);
12039 IEM_MC_LOCAL(uint8_t, u8Tmp);
12040 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12041 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12042 IEM_MC_ADVANCE_RIP();
12043 IEM_MC_END();
12044 return VINF_SUCCESS;
12045}
12046
12047
12048/** Opcode 0xa1. */
12049FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12050{
12051 /*
12052 * Get the offset and fend of lock prefixes.
12053 */
12054 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12055 RTGCPTR GCPtrMemOff;
12056 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12057
12058 /*
12059 * Fetch rAX.
12060 */
12061 switch (pVCpu->iem.s.enmEffOpSize)
12062 {
12063 case IEMMODE_16BIT:
12064 IEM_MC_BEGIN(0,1);
12065 IEM_MC_LOCAL(uint16_t, u16Tmp);
12066 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12067 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12068 IEM_MC_ADVANCE_RIP();
12069 IEM_MC_END();
12070 return VINF_SUCCESS;
12071
12072 case IEMMODE_32BIT:
12073 IEM_MC_BEGIN(0,1);
12074 IEM_MC_LOCAL(uint32_t, u32Tmp);
12075 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12076 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12077 IEM_MC_ADVANCE_RIP();
12078 IEM_MC_END();
12079 return VINF_SUCCESS;
12080
12081 case IEMMODE_64BIT:
12082 IEM_MC_BEGIN(0,1);
12083 IEM_MC_LOCAL(uint64_t, u64Tmp);
12084 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12085 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12086 IEM_MC_ADVANCE_RIP();
12087 IEM_MC_END();
12088 return VINF_SUCCESS;
12089
12090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12091 }
12092}
12093
12094
12095/** Opcode 0xa2. */
12096FNIEMOP_DEF(iemOp_mov_Ob_AL)
12097{
12098 /*
12099 * Get the offset and fend of lock prefixes.
12100 */
12101 RTGCPTR GCPtrMemOff;
12102 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12103
12104 /*
12105 * Store AL.
12106 */
12107 IEM_MC_BEGIN(0,1);
12108 IEM_MC_LOCAL(uint8_t, u8Tmp);
12109 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12110 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12111 IEM_MC_ADVANCE_RIP();
12112 IEM_MC_END();
12113 return VINF_SUCCESS;
12114}
12115
12116
12117/** Opcode 0xa3. */
12118FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12119{
12120 /*
12121 * Get the offset and fend of lock prefixes.
12122 */
12123 RTGCPTR GCPtrMemOff;
12124 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12125
12126 /*
12127 * Store rAX.
12128 */
12129 switch (pVCpu->iem.s.enmEffOpSize)
12130 {
12131 case IEMMODE_16BIT:
12132 IEM_MC_BEGIN(0,1);
12133 IEM_MC_LOCAL(uint16_t, u16Tmp);
12134 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12135 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12136 IEM_MC_ADVANCE_RIP();
12137 IEM_MC_END();
12138 return VINF_SUCCESS;
12139
12140 case IEMMODE_32BIT:
12141 IEM_MC_BEGIN(0,1);
12142 IEM_MC_LOCAL(uint32_t, u32Tmp);
12143 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12144 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12145 IEM_MC_ADVANCE_RIP();
12146 IEM_MC_END();
12147 return VINF_SUCCESS;
12148
12149 case IEMMODE_64BIT:
12150 IEM_MC_BEGIN(0,1);
12151 IEM_MC_LOCAL(uint64_t, u64Tmp);
12152 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12153 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12154 IEM_MC_ADVANCE_RIP();
12155 IEM_MC_END();
12156 return VINF_SUCCESS;
12157
12158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12159 }
12160}
12161
12162/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12163#define IEM_MOVS_CASE(ValBits, AddrBits) \
12164 IEM_MC_BEGIN(0, 2); \
12165 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12166 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12167 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12168 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12169 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12170 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12171 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12172 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12173 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12174 } IEM_MC_ELSE() { \
12175 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12176 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12177 } IEM_MC_ENDIF(); \
12178 IEM_MC_ADVANCE_RIP(); \
12179 IEM_MC_END();
12180
12181/** Opcode 0xa4. */
12182FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12183{
12184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12185
12186 /*
12187 * Use the C implementation if a repeat prefix is encountered.
12188 */
12189 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12190 {
12191 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12192 switch (pVCpu->iem.s.enmEffAddrMode)
12193 {
12194 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12195 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12196 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12198 }
12199 }
12200 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12201
12202 /*
12203 * Sharing case implementation with movs[wdq] below.
12204 */
12205 switch (pVCpu->iem.s.enmEffAddrMode)
12206 {
12207 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12208 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12209 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12211 }
12212 return VINF_SUCCESS;
12213}
12214
12215
12216/** Opcode 0xa5. */
12217FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12218{
12219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12220
12221 /*
12222 * Use the C implementation if a repeat prefix is encountered.
12223 */
12224 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12225 {
12226 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12227 switch (pVCpu->iem.s.enmEffOpSize)
12228 {
12229 case IEMMODE_16BIT:
12230 switch (pVCpu->iem.s.enmEffAddrMode)
12231 {
12232 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12233 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12234 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12236 }
12237 break;
12238 case IEMMODE_32BIT:
12239 switch (pVCpu->iem.s.enmEffAddrMode)
12240 {
12241 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12242 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12243 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12245 }
12246 case IEMMODE_64BIT:
12247 switch (pVCpu->iem.s.enmEffAddrMode)
12248 {
12249 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12250 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12251 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12253 }
12254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12255 }
12256 }
12257 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12258
12259 /*
12260 * Annoying double switch here.
12261 * Using ugly macro for implementing the cases, sharing it with movsb.
12262 */
12263 switch (pVCpu->iem.s.enmEffOpSize)
12264 {
12265 case IEMMODE_16BIT:
12266 switch (pVCpu->iem.s.enmEffAddrMode)
12267 {
12268 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12269 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12270 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12272 }
12273 break;
12274
12275 case IEMMODE_32BIT:
12276 switch (pVCpu->iem.s.enmEffAddrMode)
12277 {
12278 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12279 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12280 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12282 }
12283 break;
12284
12285 case IEMMODE_64BIT:
12286 switch (pVCpu->iem.s.enmEffAddrMode)
12287 {
12288 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12289 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12290 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12292 }
12293 break;
12294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12295 }
12296 return VINF_SUCCESS;
12297}
12298
12299#undef IEM_MOVS_CASE
12300
12301/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12302#define IEM_CMPS_CASE(ValBits, AddrBits) \
12303 IEM_MC_BEGIN(3, 3); \
12304 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12305 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12306 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12307 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12308 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12309 \
12310 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12311 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12312 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12313 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12314 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12315 IEM_MC_REF_EFLAGS(pEFlags); \
12316 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12317 \
12318 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12319 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12320 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12321 } IEM_MC_ELSE() { \
12322 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12323 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12324 } IEM_MC_ENDIF(); \
12325 IEM_MC_ADVANCE_RIP(); \
12326 IEM_MC_END(); \
12327
12328/** Opcode 0xa6. */
12329FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12330{
12331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12332
12333 /*
12334 * Use the C implementation if a repeat prefix is encountered.
12335 */
12336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12337 {
12338 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12339 switch (pVCpu->iem.s.enmEffAddrMode)
12340 {
12341 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12342 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12343 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12345 }
12346 }
12347 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12348 {
12349 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12350 switch (pVCpu->iem.s.enmEffAddrMode)
12351 {
12352 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12353 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12354 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12356 }
12357 }
12358 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12359
12360 /*
12361 * Sharing case implementation with cmps[wdq] below.
12362 */
12363 switch (pVCpu->iem.s.enmEffAddrMode)
12364 {
12365 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12366 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12367 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12369 }
12370 return VINF_SUCCESS;
12371
12372}
12373
12374
12375/** Opcode 0xa7. */
12376FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12377{
12378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12379
12380 /*
12381 * Use the C implementation if a repeat prefix is encountered.
12382 */
12383 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12384 {
12385 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12386 switch (pVCpu->iem.s.enmEffOpSize)
12387 {
12388 case IEMMODE_16BIT:
12389 switch (pVCpu->iem.s.enmEffAddrMode)
12390 {
12391 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12392 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12393 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12395 }
12396 break;
12397 case IEMMODE_32BIT:
12398 switch (pVCpu->iem.s.enmEffAddrMode)
12399 {
12400 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12401 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12402 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12404 }
12405 case IEMMODE_64BIT:
12406 switch (pVCpu->iem.s.enmEffAddrMode)
12407 {
12408 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12409 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12410 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12412 }
12413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12414 }
12415 }
12416
12417 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12418 {
12419 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12420 switch (pVCpu->iem.s.enmEffOpSize)
12421 {
12422 case IEMMODE_16BIT:
12423 switch (pVCpu->iem.s.enmEffAddrMode)
12424 {
12425 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12426 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12427 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12429 }
12430 break;
12431 case IEMMODE_32BIT:
12432 switch (pVCpu->iem.s.enmEffAddrMode)
12433 {
12434 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12435 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12436 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12438 }
12439 case IEMMODE_64BIT:
12440 switch (pVCpu->iem.s.enmEffAddrMode)
12441 {
12442 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12443 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12444 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12446 }
12447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12448 }
12449 }
12450
12451 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12452
12453 /*
12454 * Annoying double switch here.
12455 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12456 */
12457 switch (pVCpu->iem.s.enmEffOpSize)
12458 {
12459 case IEMMODE_16BIT:
12460 switch (pVCpu->iem.s.enmEffAddrMode)
12461 {
12462 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12463 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12464 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12466 }
12467 break;
12468
12469 case IEMMODE_32BIT:
12470 switch (pVCpu->iem.s.enmEffAddrMode)
12471 {
12472 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12473 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12474 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12476 }
12477 break;
12478
12479 case IEMMODE_64BIT:
12480 switch (pVCpu->iem.s.enmEffAddrMode)
12481 {
12482 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12483 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12484 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12486 }
12487 break;
12488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12489 }
12490 return VINF_SUCCESS;
12491
12492}
12493
12494#undef IEM_CMPS_CASE
12495
12496/** Opcode 0xa8. */
12497FNIEMOP_DEF(iemOp_test_AL_Ib)
12498{
12499 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12500 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12501 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12502}
12503
12504
12505/** Opcode 0xa9. */
12506FNIEMOP_DEF(iemOp_test_eAX_Iz)
12507{
12508 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12511}
12512
12513
12514/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12515#define IEM_STOS_CASE(ValBits, AddrBits) \
12516 IEM_MC_BEGIN(0, 2); \
12517 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12518 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12519 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12520 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12521 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12522 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12523 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12524 } IEM_MC_ELSE() { \
12525 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12526 } IEM_MC_ENDIF(); \
12527 IEM_MC_ADVANCE_RIP(); \
12528 IEM_MC_END(); \
12529
12530/** Opcode 0xaa. */
12531FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12532{
12533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12534
12535 /*
12536 * Use the C implementation if a repeat prefix is encountered.
12537 */
12538 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12539 {
12540 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12541 switch (pVCpu->iem.s.enmEffAddrMode)
12542 {
12543 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12544 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12545 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12547 }
12548 }
12549 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12550
12551 /*
12552 * Sharing case implementation with stos[wdq] below.
12553 */
12554 switch (pVCpu->iem.s.enmEffAddrMode)
12555 {
12556 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12557 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12558 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12560 }
12561 return VINF_SUCCESS;
12562}
12563
12564
12565/** Opcode 0xab. */
12566FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12567{
12568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12569
12570 /*
12571 * Use the C implementation if a repeat prefix is encountered.
12572 */
12573 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12574 {
12575 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12576 switch (pVCpu->iem.s.enmEffOpSize)
12577 {
12578 case IEMMODE_16BIT:
12579 switch (pVCpu->iem.s.enmEffAddrMode)
12580 {
12581 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12582 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12583 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12585 }
12586 break;
12587 case IEMMODE_32BIT:
12588 switch (pVCpu->iem.s.enmEffAddrMode)
12589 {
12590 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12591 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12592 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12594 }
12595 case IEMMODE_64BIT:
12596 switch (pVCpu->iem.s.enmEffAddrMode)
12597 {
12598 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12599 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12600 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12602 }
12603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12604 }
12605 }
12606 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12607
12608 /*
12609 * Annoying double switch here.
12610 * Using ugly macro for implementing the cases, sharing it with stosb.
12611 */
12612 switch (pVCpu->iem.s.enmEffOpSize)
12613 {
12614 case IEMMODE_16BIT:
12615 switch (pVCpu->iem.s.enmEffAddrMode)
12616 {
12617 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12618 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12619 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12621 }
12622 break;
12623
12624 case IEMMODE_32BIT:
12625 switch (pVCpu->iem.s.enmEffAddrMode)
12626 {
12627 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12628 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12629 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12631 }
12632 break;
12633
12634 case IEMMODE_64BIT:
12635 switch (pVCpu->iem.s.enmEffAddrMode)
12636 {
12637 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12638 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12639 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12641 }
12642 break;
12643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12644 }
12645 return VINF_SUCCESS;
12646}
12647
12648#undef IEM_STOS_CASE
12649
12650/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12651#define IEM_LODS_CASE(ValBits, AddrBits) \
12652 IEM_MC_BEGIN(0, 2); \
12653 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12654 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12655 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12656 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12657 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12659 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12660 } IEM_MC_ELSE() { \
12661 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12662 } IEM_MC_ENDIF(); \
12663 IEM_MC_ADVANCE_RIP(); \
12664 IEM_MC_END();
12665
12666/** Opcode 0xac. */
12667FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12668{
12669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12670
12671 /*
12672 * Use the C implementation if a repeat prefix is encountered.
12673 */
12674 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12675 {
12676 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12677 switch (pVCpu->iem.s.enmEffAddrMode)
12678 {
12679 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12680 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12681 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12683 }
12684 }
12685 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12686
12687 /*
12688 * Sharing case implementation with stos[wdq] below.
12689 */
12690 switch (pVCpu->iem.s.enmEffAddrMode)
12691 {
12692 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12693 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12694 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12696 }
12697 return VINF_SUCCESS;
12698}
12699
12700
12701/** Opcode 0xad. */
12702FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12703{
12704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12705
12706 /*
12707 * Use the C implementation if a repeat prefix is encountered.
12708 */
12709 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12710 {
12711 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12712 switch (pVCpu->iem.s.enmEffOpSize)
12713 {
12714 case IEMMODE_16BIT:
12715 switch (pVCpu->iem.s.enmEffAddrMode)
12716 {
12717 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12718 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12719 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12721 }
12722 break;
12723 case IEMMODE_32BIT:
12724 switch (pVCpu->iem.s.enmEffAddrMode)
12725 {
12726 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12727 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12728 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12730 }
12731 case IEMMODE_64BIT:
12732 switch (pVCpu->iem.s.enmEffAddrMode)
12733 {
12734 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12735 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12736 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12738 }
12739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12740 }
12741 }
12742 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12743
12744 /*
12745 * Annoying double switch here.
12746 * Using ugly macro for implementing the cases, sharing it with lodsb.
12747 */
12748 switch (pVCpu->iem.s.enmEffOpSize)
12749 {
12750 case IEMMODE_16BIT:
12751 switch (pVCpu->iem.s.enmEffAddrMode)
12752 {
12753 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12754 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12755 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12757 }
12758 break;
12759
12760 case IEMMODE_32BIT:
12761 switch (pVCpu->iem.s.enmEffAddrMode)
12762 {
12763 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12764 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12765 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12767 }
12768 break;
12769
12770 case IEMMODE_64BIT:
12771 switch (pVCpu->iem.s.enmEffAddrMode)
12772 {
12773 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12774 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12775 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12777 }
12778 break;
12779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12780 }
12781 return VINF_SUCCESS;
12782}
12783
12784#undef IEM_LODS_CASE
12785
12786/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12787#define IEM_SCAS_CASE(ValBits, AddrBits) \
12788 IEM_MC_BEGIN(3, 2); \
12789 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12790 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12791 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12792 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12793 \
12794 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12795 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12796 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12797 IEM_MC_REF_EFLAGS(pEFlags); \
12798 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12799 \
12800 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12801 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12802 } IEM_MC_ELSE() { \
12803 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12804 } IEM_MC_ENDIF(); \
12805 IEM_MC_ADVANCE_RIP(); \
12806 IEM_MC_END();
12807
12808/** Opcode 0xae. */
12809FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12810{
12811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12812
12813 /*
12814 * Use the C implementation if a repeat prefix is encountered.
12815 */
12816 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12817 {
12818 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12819 switch (pVCpu->iem.s.enmEffAddrMode)
12820 {
12821 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12822 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12823 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12825 }
12826 }
12827 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12828 {
12829 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12830 switch (pVCpu->iem.s.enmEffAddrMode)
12831 {
12832 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12833 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12834 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12836 }
12837 }
12838 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12839
12840 /*
12841 * Sharing case implementation with stos[wdq] below.
12842 */
12843 switch (pVCpu->iem.s.enmEffAddrMode)
12844 {
12845 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12846 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12847 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12849 }
12850 return VINF_SUCCESS;
12851}
12852
12853
12854/** Opcode 0xaf. */
12855FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12856{
12857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12858
12859 /*
12860 * Use the C implementation if a repeat prefix is encountered.
12861 */
12862 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12863 {
12864 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12865 switch (pVCpu->iem.s.enmEffOpSize)
12866 {
12867 case IEMMODE_16BIT:
12868 switch (pVCpu->iem.s.enmEffAddrMode)
12869 {
12870 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12871 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12872 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12874 }
12875 break;
12876 case IEMMODE_32BIT:
12877 switch (pVCpu->iem.s.enmEffAddrMode)
12878 {
12879 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12880 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12881 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12883 }
12884 case IEMMODE_64BIT:
12885 switch (pVCpu->iem.s.enmEffAddrMode)
12886 {
12887 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12888 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12889 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12890 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12891 }
12892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12893 }
12894 }
12895 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12896 {
12897 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12898 switch (pVCpu->iem.s.enmEffOpSize)
12899 {
12900 case IEMMODE_16BIT:
12901 switch (pVCpu->iem.s.enmEffAddrMode)
12902 {
12903 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12904 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12905 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12907 }
12908 break;
12909 case IEMMODE_32BIT:
12910 switch (pVCpu->iem.s.enmEffAddrMode)
12911 {
12912 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12913 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12914 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12916 }
12917 case IEMMODE_64BIT:
12918 switch (pVCpu->iem.s.enmEffAddrMode)
12919 {
12920 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12921 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12922 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12924 }
12925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12926 }
12927 }
12928 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12929
12930 /*
12931 * Annoying double switch here.
12932 * Using ugly macro for implementing the cases, sharing it with scasb.
12933 */
12934 switch (pVCpu->iem.s.enmEffOpSize)
12935 {
12936 case IEMMODE_16BIT:
12937 switch (pVCpu->iem.s.enmEffAddrMode)
12938 {
12939 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12940 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12941 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12943 }
12944 break;
12945
12946 case IEMMODE_32BIT:
12947 switch (pVCpu->iem.s.enmEffAddrMode)
12948 {
12949 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12950 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12951 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12953 }
12954 break;
12955
12956 case IEMMODE_64BIT:
12957 switch (pVCpu->iem.s.enmEffAddrMode)
12958 {
12959 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12960 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12961 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12963 }
12964 break;
12965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12966 }
12967 return VINF_SUCCESS;
12968}
12969
12970#undef IEM_SCAS_CASE
12971
12972/**
12973 * Common 'mov r8, imm8' helper.
12974 */
12975FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12976{
12977 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12979
12980 IEM_MC_BEGIN(0, 1);
12981 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12982 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12983 IEM_MC_ADVANCE_RIP();
12984 IEM_MC_END();
12985
12986 return VINF_SUCCESS;
12987}
12988
12989
12990/** Opcode 0xb0. */
12991FNIEMOP_DEF(iemOp_mov_AL_Ib)
12992{
12993 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
12994 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12995}
12996
12997
12998/** Opcode 0xb1. */
12999FNIEMOP_DEF(iemOp_CL_Ib)
13000{
13001 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13002 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13003}
13004
13005
13006/** Opcode 0xb2. */
13007FNIEMOP_DEF(iemOp_DL_Ib)
13008{
13009 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13010 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13011}
13012
13013
13014/** Opcode 0xb3. */
13015FNIEMOP_DEF(iemOp_BL_Ib)
13016{
13017 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13018 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13019}
13020
13021
13022/** Opcode 0xb4. */
13023FNIEMOP_DEF(iemOp_mov_AH_Ib)
13024{
13025 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13026 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13027}
13028
13029
13030/** Opcode 0xb5. */
13031FNIEMOP_DEF(iemOp_CH_Ib)
13032{
13033 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13034 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13035}
13036
13037
13038/** Opcode 0xb6. */
13039FNIEMOP_DEF(iemOp_DH_Ib)
13040{
13041 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13042 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13043}
13044
13045
13046/** Opcode 0xb7. */
13047FNIEMOP_DEF(iemOp_BH_Ib)
13048{
13049 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13050 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13051}
13052
13053
13054/**
13055 * Common 'mov regX,immX' helper.
13056 */
13057FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13058{
13059 switch (pVCpu->iem.s.enmEffOpSize)
13060 {
13061 case IEMMODE_16BIT:
13062 {
13063 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13065
13066 IEM_MC_BEGIN(0, 1);
13067 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13068 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13069 IEM_MC_ADVANCE_RIP();
13070 IEM_MC_END();
13071 break;
13072 }
13073
13074 case IEMMODE_32BIT:
13075 {
13076 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13078
13079 IEM_MC_BEGIN(0, 1);
13080 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13081 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13082 IEM_MC_ADVANCE_RIP();
13083 IEM_MC_END();
13084 break;
13085 }
13086 case IEMMODE_64BIT:
13087 {
13088 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13090
13091 IEM_MC_BEGIN(0, 1);
13092 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13093 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13094 IEM_MC_ADVANCE_RIP();
13095 IEM_MC_END();
13096 break;
13097 }
13098 }
13099
13100 return VINF_SUCCESS;
13101}
13102
13103
13104/** Opcode 0xb8. */
13105FNIEMOP_DEF(iemOp_eAX_Iv)
13106{
13107 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13108 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13109}
13110
13111
13112/** Opcode 0xb9. */
13113FNIEMOP_DEF(iemOp_eCX_Iv)
13114{
13115 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13116 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13117}
13118
13119
13120/** Opcode 0xba. */
13121FNIEMOP_DEF(iemOp_eDX_Iv)
13122{
13123 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13124 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13125}
13126
13127
13128/** Opcode 0xbb. */
13129FNIEMOP_DEF(iemOp_eBX_Iv)
13130{
13131 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13132 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13133}
13134
13135
13136/** Opcode 0xbc. */
13137FNIEMOP_DEF(iemOp_eSP_Iv)
13138{
13139 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13140 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13141}
13142
13143
13144/** Opcode 0xbd. */
13145FNIEMOP_DEF(iemOp_eBP_Iv)
13146{
13147 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13148 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13149}
13150
13151
13152/** Opcode 0xbe. */
13153FNIEMOP_DEF(iemOp_eSI_Iv)
13154{
13155 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13156 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13157}
13158
13159
13160/** Opcode 0xbf. */
13161FNIEMOP_DEF(iemOp_eDI_Iv)
13162{
13163 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13164 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13165}
13166
13167
13168/** Opcode 0xc0. */
13169FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13170{
13171 IEMOP_HLP_MIN_186();
13172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13173 PCIEMOPSHIFTSIZES pImpl;
13174 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13175 {
13176 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13177 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13178 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13179 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13180 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13181 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13182 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13183 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13184 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13185 }
13186 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13187
13188 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13189 {
13190 /* register */
13191 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13193 IEM_MC_BEGIN(3, 0);
13194 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13195 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13196 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13197 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13198 IEM_MC_REF_EFLAGS(pEFlags);
13199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13200 IEM_MC_ADVANCE_RIP();
13201 IEM_MC_END();
13202 }
13203 else
13204 {
13205 /* memory */
13206 IEM_MC_BEGIN(3, 2);
13207 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13208 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13209 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13211
13212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13213 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13214 IEM_MC_ASSIGN(cShiftArg, cShift);
13215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13216 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13217 IEM_MC_FETCH_EFLAGS(EFlags);
13218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13219
13220 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13221 IEM_MC_COMMIT_EFLAGS(EFlags);
13222 IEM_MC_ADVANCE_RIP();
13223 IEM_MC_END();
13224 }
13225 return VINF_SUCCESS;
13226}
13227
13228
13229/** Opcode 0xc1. */
13230FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13231{
13232 IEMOP_HLP_MIN_186();
13233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13234 PCIEMOPSHIFTSIZES pImpl;
13235 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13236 {
13237 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13238 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13239 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13240 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13241 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13242 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13243 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13244 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13245 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13246 }
13247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13248
13249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13250 {
13251 /* register */
13252 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13254 switch (pVCpu->iem.s.enmEffOpSize)
13255 {
13256 case IEMMODE_16BIT:
13257 IEM_MC_BEGIN(3, 0);
13258 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13259 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13261 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13262 IEM_MC_REF_EFLAGS(pEFlags);
13263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13264 IEM_MC_ADVANCE_RIP();
13265 IEM_MC_END();
13266 return VINF_SUCCESS;
13267
13268 case IEMMODE_32BIT:
13269 IEM_MC_BEGIN(3, 0);
13270 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13271 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13273 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13274 IEM_MC_REF_EFLAGS(pEFlags);
13275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13276 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13277 IEM_MC_ADVANCE_RIP();
13278 IEM_MC_END();
13279 return VINF_SUCCESS;
13280
13281 case IEMMODE_64BIT:
13282 IEM_MC_BEGIN(3, 0);
13283 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13284 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13286 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13287 IEM_MC_REF_EFLAGS(pEFlags);
13288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13289 IEM_MC_ADVANCE_RIP();
13290 IEM_MC_END();
13291 return VINF_SUCCESS;
13292
13293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13294 }
13295 }
13296 else
13297 {
13298 /* memory */
13299 switch (pVCpu->iem.s.enmEffOpSize)
13300 {
13301 case IEMMODE_16BIT:
13302 IEM_MC_BEGIN(3, 2);
13303 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13304 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13305 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13307
13308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13309 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13310 IEM_MC_ASSIGN(cShiftArg, cShift);
13311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13312 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13313 IEM_MC_FETCH_EFLAGS(EFlags);
13314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13315
13316 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13317 IEM_MC_COMMIT_EFLAGS(EFlags);
13318 IEM_MC_ADVANCE_RIP();
13319 IEM_MC_END();
13320 return VINF_SUCCESS;
13321
13322 case IEMMODE_32BIT:
13323 IEM_MC_BEGIN(3, 2);
13324 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13325 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13326 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13328
13329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13330 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13331 IEM_MC_ASSIGN(cShiftArg, cShift);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13334 IEM_MC_FETCH_EFLAGS(EFlags);
13335 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13336
13337 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13338 IEM_MC_COMMIT_EFLAGS(EFlags);
13339 IEM_MC_ADVANCE_RIP();
13340 IEM_MC_END();
13341 return VINF_SUCCESS;
13342
13343 case IEMMODE_64BIT:
13344 IEM_MC_BEGIN(3, 2);
13345 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13346 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13347 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13349
13350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13351 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13352 IEM_MC_ASSIGN(cShiftArg, cShift);
13353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13354 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13355 IEM_MC_FETCH_EFLAGS(EFlags);
13356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13357
13358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13359 IEM_MC_COMMIT_EFLAGS(EFlags);
13360 IEM_MC_ADVANCE_RIP();
13361 IEM_MC_END();
13362 return VINF_SUCCESS;
13363
13364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13365 }
13366 }
13367}
13368
13369
13370/** Opcode 0xc2. */
13371FNIEMOP_DEF(iemOp_retn_Iw)
13372{
13373 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13374 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13376 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13377 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13378}
13379
13380
13381/** Opcode 0xc3. */
13382FNIEMOP_DEF(iemOp_retn)
13383{
13384 IEMOP_MNEMONIC(retn, "retn");
13385 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13387 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13388}
13389
13390
13391/** Opcode 0xc4. */
13392FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13393{
13394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13395 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13396 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13397 {
13398 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13399 /* The LES instruction is invalid 64-bit mode. In legacy and
13400 compatability mode it is invalid with MOD=3.
13401 The use as a VEX prefix is made possible by assigning the inverted
13402 REX.R to the top MOD bit, and the top bit in the inverted register
13403 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13404 to accessing registers 0..7 in this VEX form. */
13405 /** @todo VEX: Just use new tables for it. */
13406 return IEMOP_RAISE_INVALID_OPCODE();
13407 }
13408 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13409 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13410}
13411
13412
13413/** Opcode 0xc5. */
13414FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13415{
13416 /* The LDS instruction is invalid 64-bit mode. In legacy and
13417 compatability mode it is invalid with MOD=3.
13418 The use as a VEX prefix is made possible by assigning the inverted
13419 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13420 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13422 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13423 {
13424 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13425 {
13426 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13427 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13428 }
13429 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13430 }
13431
13432 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13433 /** @todo Test when exctly the VEX conformance checks kick in during
13434 * instruction decoding and fetching (using \#PF). */
13435 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13436 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13437 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13438#if 0 /* will make sense of this next week... */
13439 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13440 &&
13441 )
13442 {
13443
13444 }
13445#endif
13446
13447 /** @todo VEX: Just use new tables for it. */
13448 return IEMOP_RAISE_INVALID_OPCODE();
13449}
13450
13451
13452/** Opcode 0xc6. */
13453FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13454{
13455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13456 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13457 return IEMOP_RAISE_INVALID_OPCODE();
13458 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13459
13460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13461 {
13462 /* register access */
13463 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13465 IEM_MC_BEGIN(0, 0);
13466 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13467 IEM_MC_ADVANCE_RIP();
13468 IEM_MC_END();
13469 }
13470 else
13471 {
13472 /* memory access. */
13473 IEM_MC_BEGIN(0, 1);
13474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13476 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13478 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13479 IEM_MC_ADVANCE_RIP();
13480 IEM_MC_END();
13481 }
13482 return VINF_SUCCESS;
13483}
13484
13485
13486/** Opcode 0xc7. */
13487FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13488{
13489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13490 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13491 return IEMOP_RAISE_INVALID_OPCODE();
13492 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13493
13494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13495 {
13496 /* register access */
13497 switch (pVCpu->iem.s.enmEffOpSize)
13498 {
13499 case IEMMODE_16BIT:
13500 IEM_MC_BEGIN(0, 0);
13501 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13503 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13504 IEM_MC_ADVANCE_RIP();
13505 IEM_MC_END();
13506 return VINF_SUCCESS;
13507
13508 case IEMMODE_32BIT:
13509 IEM_MC_BEGIN(0, 0);
13510 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13512 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13513 IEM_MC_ADVANCE_RIP();
13514 IEM_MC_END();
13515 return VINF_SUCCESS;
13516
13517 case IEMMODE_64BIT:
13518 IEM_MC_BEGIN(0, 0);
13519 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13521 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13522 IEM_MC_ADVANCE_RIP();
13523 IEM_MC_END();
13524 return VINF_SUCCESS;
13525
13526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13527 }
13528 }
13529 else
13530 {
13531 /* memory access. */
13532 switch (pVCpu->iem.s.enmEffOpSize)
13533 {
13534 case IEMMODE_16BIT:
13535 IEM_MC_BEGIN(0, 1);
13536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13538 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13540 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13541 IEM_MC_ADVANCE_RIP();
13542 IEM_MC_END();
13543 return VINF_SUCCESS;
13544
13545 case IEMMODE_32BIT:
13546 IEM_MC_BEGIN(0, 1);
13547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13549 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13551 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13552 IEM_MC_ADVANCE_RIP();
13553 IEM_MC_END();
13554 return VINF_SUCCESS;
13555
13556 case IEMMODE_64BIT:
13557 IEM_MC_BEGIN(0, 1);
13558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13560 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13562 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13563 IEM_MC_ADVANCE_RIP();
13564 IEM_MC_END();
13565 return VINF_SUCCESS;
13566
13567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13568 }
13569 }
13570}
13571
13572
13573
13574
13575/** Opcode 0xc8. */
13576FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13577{
13578 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13579 IEMOP_HLP_MIN_186();
13580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13581 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13582 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13584 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13585}
13586
13587
13588/** Opcode 0xc9. */
13589FNIEMOP_DEF(iemOp_leave)
13590{
13591 IEMOP_MNEMONIC(leave, "leave");
13592 IEMOP_HLP_MIN_186();
13593 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13595 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13596}
13597
13598
13599/** Opcode 0xca. */
13600FNIEMOP_DEF(iemOp_retf_Iw)
13601{
13602 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13603 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13605 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13606 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13607}
13608
13609
13610/** Opcode 0xcb. */
13611FNIEMOP_DEF(iemOp_retf)
13612{
13613 IEMOP_MNEMONIC(retf, "retf");
13614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13615 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13616 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13617}
13618
13619
13620/** Opcode 0xcc. */
13621FNIEMOP_DEF(iemOp_int_3)
13622{
13623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13624 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13625}
13626
13627
13628/** Opcode 0xcd. */
13629FNIEMOP_DEF(iemOp_int_Ib)
13630{
13631 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13633 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13634}
13635
13636
13637/** Opcode 0xce. */
13638FNIEMOP_DEF(iemOp_into)
13639{
13640 IEMOP_MNEMONIC(into, "into");
13641 IEMOP_HLP_NO_64BIT();
13642
13643 IEM_MC_BEGIN(2, 0);
13644 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13645 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13646 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13647 IEM_MC_END();
13648 return VINF_SUCCESS;
13649}
13650
13651
13652/** Opcode 0xcf. */
13653FNIEMOP_DEF(iemOp_iret)
13654{
13655 IEMOP_MNEMONIC(iret, "iret");
13656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13657 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13658}
13659
13660
13661/** Opcode 0xd0. */
13662FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13663{
13664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13665 PCIEMOPSHIFTSIZES pImpl;
13666 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13667 {
13668 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13669 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13670 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13671 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13672 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13673 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13674 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13675 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13676 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13677 }
13678 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13679
13680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13681 {
13682 /* register */
13683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13684 IEM_MC_BEGIN(3, 0);
13685 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13686 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13687 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13688 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13689 IEM_MC_REF_EFLAGS(pEFlags);
13690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13691 IEM_MC_ADVANCE_RIP();
13692 IEM_MC_END();
13693 }
13694 else
13695 {
13696 /* memory */
13697 IEM_MC_BEGIN(3, 2);
13698 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13699 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13700 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13702
13703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13705 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13706 IEM_MC_FETCH_EFLAGS(EFlags);
13707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13708
13709 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13710 IEM_MC_COMMIT_EFLAGS(EFlags);
13711 IEM_MC_ADVANCE_RIP();
13712 IEM_MC_END();
13713 }
13714 return VINF_SUCCESS;
13715}
13716
13717
13718
13719/** Opcode 0xd1. */
13720FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13721{
13722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13723 PCIEMOPSHIFTSIZES pImpl;
13724 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13725 {
13726 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13727 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13728 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13729 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13730 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13731 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13732 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13733 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13734 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13735 }
13736 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13737
13738 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13739 {
13740 /* register */
13741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13742 switch (pVCpu->iem.s.enmEffOpSize)
13743 {
13744 case IEMMODE_16BIT:
13745 IEM_MC_BEGIN(3, 0);
13746 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13747 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13748 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13749 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13750 IEM_MC_REF_EFLAGS(pEFlags);
13751 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13752 IEM_MC_ADVANCE_RIP();
13753 IEM_MC_END();
13754 return VINF_SUCCESS;
13755
13756 case IEMMODE_32BIT:
13757 IEM_MC_BEGIN(3, 0);
13758 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13759 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13760 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13761 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13762 IEM_MC_REF_EFLAGS(pEFlags);
13763 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13764 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13765 IEM_MC_ADVANCE_RIP();
13766 IEM_MC_END();
13767 return VINF_SUCCESS;
13768
13769 case IEMMODE_64BIT:
13770 IEM_MC_BEGIN(3, 0);
13771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13772 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13773 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13774 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13775 IEM_MC_REF_EFLAGS(pEFlags);
13776 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13777 IEM_MC_ADVANCE_RIP();
13778 IEM_MC_END();
13779 return VINF_SUCCESS;
13780
13781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13782 }
13783 }
13784 else
13785 {
13786 /* memory */
13787 switch (pVCpu->iem.s.enmEffOpSize)
13788 {
13789 case IEMMODE_16BIT:
13790 IEM_MC_BEGIN(3, 2);
13791 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13792 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13793 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13795
13796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13798 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13799 IEM_MC_FETCH_EFLAGS(EFlags);
13800 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13801
13802 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13803 IEM_MC_COMMIT_EFLAGS(EFlags);
13804 IEM_MC_ADVANCE_RIP();
13805 IEM_MC_END();
13806 return VINF_SUCCESS;
13807
13808 case IEMMODE_32BIT:
13809 IEM_MC_BEGIN(3, 2);
13810 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13811 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13812 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13814
13815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13817 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13818 IEM_MC_FETCH_EFLAGS(EFlags);
13819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13820
13821 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13822 IEM_MC_COMMIT_EFLAGS(EFlags);
13823 IEM_MC_ADVANCE_RIP();
13824 IEM_MC_END();
13825 return VINF_SUCCESS;
13826
13827 case IEMMODE_64BIT:
13828 IEM_MC_BEGIN(3, 2);
13829 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13830 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13831 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13833
13834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13836 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13837 IEM_MC_FETCH_EFLAGS(EFlags);
13838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13839
13840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13841 IEM_MC_COMMIT_EFLAGS(EFlags);
13842 IEM_MC_ADVANCE_RIP();
13843 IEM_MC_END();
13844 return VINF_SUCCESS;
13845
13846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13847 }
13848 }
13849}
13850
13851
13852/** Opcode 0xd2. */
13853FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13854{
13855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13856 PCIEMOPSHIFTSIZES pImpl;
13857 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13858 {
13859 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13860 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13861 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13862 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13863 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13864 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13865 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13866 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13867 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13868 }
13869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13870
13871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13872 {
13873 /* register */
13874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13875 IEM_MC_BEGIN(3, 0);
13876 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13877 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13878 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13879 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13880 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13881 IEM_MC_REF_EFLAGS(pEFlags);
13882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13883 IEM_MC_ADVANCE_RIP();
13884 IEM_MC_END();
13885 }
13886 else
13887 {
13888 /* memory */
13889 IEM_MC_BEGIN(3, 2);
13890 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13891 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13892 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13894
13895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13897 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13898 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13899 IEM_MC_FETCH_EFLAGS(EFlags);
13900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13901
13902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13903 IEM_MC_COMMIT_EFLAGS(EFlags);
13904 IEM_MC_ADVANCE_RIP();
13905 IEM_MC_END();
13906 }
13907 return VINF_SUCCESS;
13908}
13909
13910
13911/** Opcode 0xd3. */
13912FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13913{
13914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13915 PCIEMOPSHIFTSIZES pImpl;
13916 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13917 {
13918 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13919 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13920 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13921 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13922 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13923 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13924 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13925 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13926 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13927 }
13928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13929
13930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13931 {
13932 /* register */
13933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13934 switch (pVCpu->iem.s.enmEffOpSize)
13935 {
13936 case IEMMODE_16BIT:
13937 IEM_MC_BEGIN(3, 0);
13938 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13939 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13940 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13941 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13942 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13943 IEM_MC_REF_EFLAGS(pEFlags);
13944 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13945 IEM_MC_ADVANCE_RIP();
13946 IEM_MC_END();
13947 return VINF_SUCCESS;
13948
13949 case IEMMODE_32BIT:
13950 IEM_MC_BEGIN(3, 0);
13951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13952 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13954 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13955 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13956 IEM_MC_REF_EFLAGS(pEFlags);
13957 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13958 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13959 IEM_MC_ADVANCE_RIP();
13960 IEM_MC_END();
13961 return VINF_SUCCESS;
13962
13963 case IEMMODE_64BIT:
13964 IEM_MC_BEGIN(3, 0);
13965 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13966 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13967 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13968 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13969 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13970 IEM_MC_REF_EFLAGS(pEFlags);
13971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13972 IEM_MC_ADVANCE_RIP();
13973 IEM_MC_END();
13974 return VINF_SUCCESS;
13975
13976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13977 }
13978 }
13979 else
13980 {
13981 /* memory */
13982 switch (pVCpu->iem.s.enmEffOpSize)
13983 {
13984 case IEMMODE_16BIT:
13985 IEM_MC_BEGIN(3, 2);
13986 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13987 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13988 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13990
13991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13993 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13994 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13995 IEM_MC_FETCH_EFLAGS(EFlags);
13996 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13997
13998 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13999 IEM_MC_COMMIT_EFLAGS(EFlags);
14000 IEM_MC_ADVANCE_RIP();
14001 IEM_MC_END();
14002 return VINF_SUCCESS;
14003
14004 case IEMMODE_32BIT:
14005 IEM_MC_BEGIN(3, 2);
14006 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14007 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14008 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14010
14011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14013 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14014 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14015 IEM_MC_FETCH_EFLAGS(EFlags);
14016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14017
14018 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14019 IEM_MC_COMMIT_EFLAGS(EFlags);
14020 IEM_MC_ADVANCE_RIP();
14021 IEM_MC_END();
14022 return VINF_SUCCESS;
14023
14024 case IEMMODE_64BIT:
14025 IEM_MC_BEGIN(3, 2);
14026 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14027 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14028 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14030
14031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14033 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14034 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14035 IEM_MC_FETCH_EFLAGS(EFlags);
14036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14037
14038 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14039 IEM_MC_COMMIT_EFLAGS(EFlags);
14040 IEM_MC_ADVANCE_RIP();
14041 IEM_MC_END();
14042 return VINF_SUCCESS;
14043
14044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14045 }
14046 }
14047}
14048
14049/** Opcode 0xd4. */
14050FNIEMOP_DEF(iemOp_aam_Ib)
14051{
14052 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14053 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14055 IEMOP_HLP_NO_64BIT();
14056 if (!bImm)
14057 return IEMOP_RAISE_DIVIDE_ERROR();
14058 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14059}
14060
14061
14062/** Opcode 0xd5. */
14063FNIEMOP_DEF(iemOp_aad_Ib)
14064{
14065 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14066 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14068 IEMOP_HLP_NO_64BIT();
14069 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14070}
14071
14072
14073/** Opcode 0xd6. */
14074FNIEMOP_DEF(iemOp_salc)
14075{
14076 IEMOP_MNEMONIC(salc, "salc");
14077 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14078 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14080 IEMOP_HLP_NO_64BIT();
14081
14082 IEM_MC_BEGIN(0, 0);
14083 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14084 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14085 } IEM_MC_ELSE() {
14086 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14087 } IEM_MC_ENDIF();
14088 IEM_MC_ADVANCE_RIP();
14089 IEM_MC_END();
14090 return VINF_SUCCESS;
14091}
14092
14093
14094/** Opcode 0xd7. */
14095FNIEMOP_DEF(iemOp_xlat)
14096{
14097 IEMOP_MNEMONIC(xlat, "xlat");
14098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14099 switch (pVCpu->iem.s.enmEffAddrMode)
14100 {
14101 case IEMMODE_16BIT:
14102 IEM_MC_BEGIN(2, 0);
14103 IEM_MC_LOCAL(uint8_t, u8Tmp);
14104 IEM_MC_LOCAL(uint16_t, u16Addr);
14105 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14106 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14107 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14108 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14109 IEM_MC_ADVANCE_RIP();
14110 IEM_MC_END();
14111 return VINF_SUCCESS;
14112
14113 case IEMMODE_32BIT:
14114 IEM_MC_BEGIN(2, 0);
14115 IEM_MC_LOCAL(uint8_t, u8Tmp);
14116 IEM_MC_LOCAL(uint32_t, u32Addr);
14117 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14118 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14119 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14120 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14121 IEM_MC_ADVANCE_RIP();
14122 IEM_MC_END();
14123 return VINF_SUCCESS;
14124
14125 case IEMMODE_64BIT:
14126 IEM_MC_BEGIN(2, 0);
14127 IEM_MC_LOCAL(uint8_t, u8Tmp);
14128 IEM_MC_LOCAL(uint64_t, u64Addr);
14129 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14130 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14131 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14132 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14133 IEM_MC_ADVANCE_RIP();
14134 IEM_MC_END();
14135 return VINF_SUCCESS;
14136
14137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14138 }
14139}
14140
14141
14142/**
14143 * Common worker for FPU instructions working on ST0 and STn, and storing the
14144 * result in ST0.
14145 *
14146 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14147 */
14148FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14149{
14150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14151
14152 IEM_MC_BEGIN(3, 1);
14153 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14154 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14155 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14156 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14157
14158 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14159 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14160 IEM_MC_PREPARE_FPU_USAGE();
14161 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14162 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14163 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14164 IEM_MC_ELSE()
14165 IEM_MC_FPU_STACK_UNDERFLOW(0);
14166 IEM_MC_ENDIF();
14167 IEM_MC_ADVANCE_RIP();
14168
14169 IEM_MC_END();
14170 return VINF_SUCCESS;
14171}
14172
14173
14174/**
14175 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14176 * flags.
14177 *
14178 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14179 */
14180FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14181{
14182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14183
14184 IEM_MC_BEGIN(3, 1);
14185 IEM_MC_LOCAL(uint16_t, u16Fsw);
14186 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14187 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14188 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14189
14190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14191 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14192 IEM_MC_PREPARE_FPU_USAGE();
14193 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14194 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14195 IEM_MC_UPDATE_FSW(u16Fsw);
14196 IEM_MC_ELSE()
14197 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14198 IEM_MC_ENDIF();
14199 IEM_MC_ADVANCE_RIP();
14200
14201 IEM_MC_END();
14202 return VINF_SUCCESS;
14203}
14204
14205
14206/**
14207 * Common worker for FPU instructions working on ST0 and STn, only affecting
14208 * flags, and popping when done.
14209 *
14210 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14211 */
14212FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14213{
14214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14215
14216 IEM_MC_BEGIN(3, 1);
14217 IEM_MC_LOCAL(uint16_t, u16Fsw);
14218 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14219 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14220 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14221
14222 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14223 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14224 IEM_MC_PREPARE_FPU_USAGE();
14225 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14226 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14227 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14228 IEM_MC_ELSE()
14229 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14230 IEM_MC_ENDIF();
14231 IEM_MC_ADVANCE_RIP();
14232
14233 IEM_MC_END();
14234 return VINF_SUCCESS;
14235}
14236
14237
14238/** Opcode 0xd8 11/0. */
14239FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14240{
14241 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14242 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14243}
14244
14245
14246/** Opcode 0xd8 11/1. */
14247FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14248{
14249 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14250 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14251}
14252
14253
14254/** Opcode 0xd8 11/2. */
14255FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14256{
14257 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14258 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14259}
14260
14261
14262/** Opcode 0xd8 11/3. */
14263FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14264{
14265 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14266 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14267}
14268
14269
14270/** Opcode 0xd8 11/4. */
14271FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14272{
14273 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14274 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14275}
14276
14277
14278/** Opcode 0xd8 11/5. */
14279FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14280{
14281 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14282 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14283}
14284
14285
14286/** Opcode 0xd8 11/6. */
14287FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14288{
14289 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14290 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14291}
14292
14293
14294/** Opcode 0xd8 11/7. */
14295FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14296{
14297 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14298 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14299}
14300
14301
14302/**
14303 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14304 * the result in ST0.
14305 *
14306 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14307 */
14308FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14309{
14310 IEM_MC_BEGIN(3, 3);
14311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14312 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14313 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14314 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14315 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14316 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14317
14318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14320
14321 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14322 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14323 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14324
14325 IEM_MC_PREPARE_FPU_USAGE();
14326 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14327 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14328 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14329 IEM_MC_ELSE()
14330 IEM_MC_FPU_STACK_UNDERFLOW(0);
14331 IEM_MC_ENDIF();
14332 IEM_MC_ADVANCE_RIP();
14333
14334 IEM_MC_END();
14335 return VINF_SUCCESS;
14336}
14337
14338
14339/** Opcode 0xd8 !11/0. */
14340FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14341{
14342 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14343 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14344}
14345
14346
14347/** Opcode 0xd8 !11/1. */
14348FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14349{
14350 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14351 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14352}
14353
14354
14355/** Opcode 0xd8 !11/2. */
14356FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14357{
14358 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14359
14360 IEM_MC_BEGIN(3, 3);
14361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14362 IEM_MC_LOCAL(uint16_t, u16Fsw);
14363 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14364 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14366 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14367
14368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14370
14371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14372 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14373 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14374
14375 IEM_MC_PREPARE_FPU_USAGE();
14376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14377 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14378 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14379 IEM_MC_ELSE()
14380 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14381 IEM_MC_ENDIF();
14382 IEM_MC_ADVANCE_RIP();
14383
14384 IEM_MC_END();
14385 return VINF_SUCCESS;
14386}
14387
14388
14389/** Opcode 0xd8 !11/3. */
14390FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14391{
14392 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14393
14394 IEM_MC_BEGIN(3, 3);
14395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14396 IEM_MC_LOCAL(uint16_t, u16Fsw);
14397 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14398 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14399 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14400 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14401
14402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14404
14405 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14406 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14407 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14408
14409 IEM_MC_PREPARE_FPU_USAGE();
14410 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14411 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14412 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14413 IEM_MC_ELSE()
14414 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14415 IEM_MC_ENDIF();
14416 IEM_MC_ADVANCE_RIP();
14417
14418 IEM_MC_END();
14419 return VINF_SUCCESS;
14420}
14421
14422
14423/** Opcode 0xd8 !11/4. */
14424FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14425{
14426 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14427 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14428}
14429
14430
14431/** Opcode 0xd8 !11/5. */
14432FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14433{
14434 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14435 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14436}
14437
14438
14439/** Opcode 0xd8 !11/6. */
14440FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14441{
14442 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14443 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14444}
14445
14446
14447/** Opcode 0xd8 !11/7. */
14448FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14449{
14450 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14451 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14452}
14453
14454
14455/** Opcode 0xd8. */
14456FNIEMOP_DEF(iemOp_EscF0)
14457{
14458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14459 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14460
14461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14462 {
14463 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14464 {
14465 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14466 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14467 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14468 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14469 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14470 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14471 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14472 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14474 }
14475 }
14476 else
14477 {
14478 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14479 {
14480 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14481 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14482 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14483 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14484 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14485 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14486 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14487 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14489 }
14490 }
14491}
14492
14493
14494/** Opcode 0xd9 /0 mem32real
14495 * @sa iemOp_fld_m64r */
14496FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14497{
14498 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14499
14500 IEM_MC_BEGIN(2, 3);
14501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14502 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14503 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14504 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14505 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14506
14507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14509
14510 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14511 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14512 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14513
14514 IEM_MC_PREPARE_FPU_USAGE();
14515 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14516 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14517 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14518 IEM_MC_ELSE()
14519 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14520 IEM_MC_ENDIF();
14521 IEM_MC_ADVANCE_RIP();
14522
14523 IEM_MC_END();
14524 return VINF_SUCCESS;
14525}
14526
14527
14528/** Opcode 0xd9 !11/2 mem32real */
14529FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14530{
14531 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14532 IEM_MC_BEGIN(3, 2);
14533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14534 IEM_MC_LOCAL(uint16_t, u16Fsw);
14535 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14536 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14537 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14538
14539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14541 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14542 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14543
14544 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14545 IEM_MC_PREPARE_FPU_USAGE();
14546 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14547 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14548 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14549 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14550 IEM_MC_ELSE()
14551 IEM_MC_IF_FCW_IM()
14552 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14553 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14554 IEM_MC_ENDIF();
14555 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14556 IEM_MC_ENDIF();
14557 IEM_MC_ADVANCE_RIP();
14558
14559 IEM_MC_END();
14560 return VINF_SUCCESS;
14561}
14562
14563
14564/** Opcode 0xd9 !11/3 */
14565FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14566{
14567 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14568 IEM_MC_BEGIN(3, 2);
14569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14570 IEM_MC_LOCAL(uint16_t, u16Fsw);
14571 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14572 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14573 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14574
14575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14577 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14578 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14579
14580 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14581 IEM_MC_PREPARE_FPU_USAGE();
14582 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14583 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14584 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14585 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14586 IEM_MC_ELSE()
14587 IEM_MC_IF_FCW_IM()
14588 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14589 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14590 IEM_MC_ENDIF();
14591 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14592 IEM_MC_ENDIF();
14593 IEM_MC_ADVANCE_RIP();
14594
14595 IEM_MC_END();
14596 return VINF_SUCCESS;
14597}
14598
14599
14600/** Opcode 0xd9 !11/4 */
14601FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14602{
14603 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14604 IEM_MC_BEGIN(3, 0);
14605 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14606 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14607 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14610 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14611 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14612 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14613 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14614 IEM_MC_END();
14615 return VINF_SUCCESS;
14616}
14617
14618
14619/** Opcode 0xd9 !11/5 */
14620FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14621{
14622 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14623 IEM_MC_BEGIN(1, 1);
14624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14625 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14629 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14630 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14631 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14632 IEM_MC_END();
14633 return VINF_SUCCESS;
14634}
14635
14636
14637/** Opcode 0xd9 !11/6 */
14638FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14639{
14640 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14641 IEM_MC_BEGIN(3, 0);
14642 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14643 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14644 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14647 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14648 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14649 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14650 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14651 IEM_MC_END();
14652 return VINF_SUCCESS;
14653}
14654
14655
14656/** Opcode 0xd9 !11/7 */
14657FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14658{
14659 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14660 IEM_MC_BEGIN(2, 0);
14661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14662 IEM_MC_LOCAL(uint16_t, u16Fcw);
14663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14666 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14667 IEM_MC_FETCH_FCW(u16Fcw);
14668 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14669 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14670 IEM_MC_END();
14671 return VINF_SUCCESS;
14672}
14673
14674
14675/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14676FNIEMOP_DEF(iemOp_fnop)
14677{
14678 IEMOP_MNEMONIC(fnop, "fnop");
14679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14680
14681 IEM_MC_BEGIN(0, 0);
14682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14684 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14685 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14686 * intel optimizations. Investigate. */
14687 IEM_MC_UPDATE_FPU_OPCODE_IP();
14688 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14689 IEM_MC_END();
14690 return VINF_SUCCESS;
14691}
14692
14693
14694/** Opcode 0xd9 11/0 stN */
14695FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14696{
14697 IEMOP_MNEMONIC(fld_stN, "fld stN");
14698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14699
14700 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14701 * indicates that it does. */
14702 IEM_MC_BEGIN(0, 2);
14703 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14704 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14705 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14706 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14707
14708 IEM_MC_PREPARE_FPU_USAGE();
14709 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14710 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14711 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14712 IEM_MC_ELSE()
14713 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14714 IEM_MC_ENDIF();
14715
14716 IEM_MC_ADVANCE_RIP();
14717 IEM_MC_END();
14718
14719 return VINF_SUCCESS;
14720}
14721
14722
14723/** Opcode 0xd9 11/3 stN */
14724FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14725{
14726 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14728
14729 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14730 * indicates that it does. */
14731 IEM_MC_BEGIN(1, 3);
14732 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14733 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14734 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14735 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14738
14739 IEM_MC_PREPARE_FPU_USAGE();
14740 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14741 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14742 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14743 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14744 IEM_MC_ELSE()
14745 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14746 IEM_MC_ENDIF();
14747
14748 IEM_MC_ADVANCE_RIP();
14749 IEM_MC_END();
14750
14751 return VINF_SUCCESS;
14752}
14753
14754
14755/** Opcode 0xd9 11/4, 0xdd 11/2. */
14756FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14757{
14758 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14760
14761 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14762 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14763 if (!iDstReg)
14764 {
14765 IEM_MC_BEGIN(0, 1);
14766 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14767 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14768 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14769
14770 IEM_MC_PREPARE_FPU_USAGE();
14771 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14772 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14773 IEM_MC_ELSE()
14774 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14775 IEM_MC_ENDIF();
14776
14777 IEM_MC_ADVANCE_RIP();
14778 IEM_MC_END();
14779 }
14780 else
14781 {
14782 IEM_MC_BEGIN(0, 2);
14783 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14784 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14785 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14786 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14787
14788 IEM_MC_PREPARE_FPU_USAGE();
14789 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14790 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14791 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14792 IEM_MC_ELSE()
14793 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14794 IEM_MC_ENDIF();
14795
14796 IEM_MC_ADVANCE_RIP();
14797 IEM_MC_END();
14798 }
14799 return VINF_SUCCESS;
14800}
14801
14802
14803/**
14804 * Common worker for FPU instructions working on ST0 and replaces it with the
14805 * result, i.e. unary operators.
14806 *
14807 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14808 */
14809FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14810{
14811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14812
14813 IEM_MC_BEGIN(2, 1);
14814 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14815 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14816 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14817
14818 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14819 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14820 IEM_MC_PREPARE_FPU_USAGE();
14821 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14822 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14823 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14824 IEM_MC_ELSE()
14825 IEM_MC_FPU_STACK_UNDERFLOW(0);
14826 IEM_MC_ENDIF();
14827 IEM_MC_ADVANCE_RIP();
14828
14829 IEM_MC_END();
14830 return VINF_SUCCESS;
14831}
14832
14833
14834/** Opcode 0xd9 0xe0. */
14835FNIEMOP_DEF(iemOp_fchs)
14836{
14837 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14838 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14839}
14840
14841
14842/** Opcode 0xd9 0xe1. */
14843FNIEMOP_DEF(iemOp_fabs)
14844{
14845 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14846 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14847}
14848
14849
14850/**
14851 * Common worker for FPU instructions working on ST0 and only returns FSW.
14852 *
14853 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14854 */
14855FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14856{
14857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14858
14859 IEM_MC_BEGIN(2, 1);
14860 IEM_MC_LOCAL(uint16_t, u16Fsw);
14861 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14862 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14863
14864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14865 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14866 IEM_MC_PREPARE_FPU_USAGE();
14867 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14868 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14869 IEM_MC_UPDATE_FSW(u16Fsw);
14870 IEM_MC_ELSE()
14871 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14872 IEM_MC_ENDIF();
14873 IEM_MC_ADVANCE_RIP();
14874
14875 IEM_MC_END();
14876 return VINF_SUCCESS;
14877}
14878
14879
14880/** Opcode 0xd9 0xe4. */
14881FNIEMOP_DEF(iemOp_ftst)
14882{
14883 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14884 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14885}
14886
14887
14888/** Opcode 0xd9 0xe5. */
14889FNIEMOP_DEF(iemOp_fxam)
14890{
14891 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14892 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14893}
14894
14895
14896/**
14897 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14898 *
14899 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14900 */
14901FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14902{
14903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14904
14905 IEM_MC_BEGIN(1, 1);
14906 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14907 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14908
14909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14911 IEM_MC_PREPARE_FPU_USAGE();
14912 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14913 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14914 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14915 IEM_MC_ELSE()
14916 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14917 IEM_MC_ENDIF();
14918 IEM_MC_ADVANCE_RIP();
14919
14920 IEM_MC_END();
14921 return VINF_SUCCESS;
14922}
14923
14924
14925/** Opcode 0xd9 0xe8. */
14926FNIEMOP_DEF(iemOp_fld1)
14927{
14928 IEMOP_MNEMONIC(fld1, "fld1");
14929 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14930}
14931
14932
14933/** Opcode 0xd9 0xe9. */
14934FNIEMOP_DEF(iemOp_fldl2t)
14935{
14936 IEMOP_MNEMONIC(fldl2t, "fldl2t");
14937 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14938}
14939
14940
14941/** Opcode 0xd9 0xea. */
14942FNIEMOP_DEF(iemOp_fldl2e)
14943{
14944 IEMOP_MNEMONIC(fldl2e, "fldl2e");
14945 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14946}
14947
14948/** Opcode 0xd9 0xeb. */
14949FNIEMOP_DEF(iemOp_fldpi)
14950{
14951 IEMOP_MNEMONIC(fldpi, "fldpi");
14952 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14953}
14954
14955
14956/** Opcode 0xd9 0xec. */
14957FNIEMOP_DEF(iemOp_fldlg2)
14958{
14959 IEMOP_MNEMONIC(fldlg2, "fldlg2");
14960 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14961}
14962
14963/** Opcode 0xd9 0xed. */
14964FNIEMOP_DEF(iemOp_fldln2)
14965{
14966 IEMOP_MNEMONIC(fldln2, "fldln2");
14967 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14968}
14969
14970
14971/** Opcode 0xd9 0xee. */
14972FNIEMOP_DEF(iemOp_fldz)
14973{
14974 IEMOP_MNEMONIC(fldz, "fldz");
14975 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14976}
14977
14978
14979/** Opcode 0xd9 0xf0. */
14980FNIEMOP_DEF(iemOp_f2xm1)
14981{
14982 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
14983 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14984}
14985
14986
14987/**
14988 * Common worker for FPU instructions working on STn and ST0, storing the result
14989 * in STn, and popping the stack unless IE, DE or ZE was raised.
14990 *
14991 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14992 */
14993FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14994{
14995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14996
14997 IEM_MC_BEGIN(3, 1);
14998 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14999 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15000 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15001 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15002
15003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15004 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15005
15006 IEM_MC_PREPARE_FPU_USAGE();
15007 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15008 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15009 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15010 IEM_MC_ELSE()
15011 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15012 IEM_MC_ENDIF();
15013 IEM_MC_ADVANCE_RIP();
15014
15015 IEM_MC_END();
15016 return VINF_SUCCESS;
15017}
15018
15019
15020/** Opcode 0xd9 0xf1. */
15021FNIEMOP_DEF(iemOp_fyl2x)
15022{
15023 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15024 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15025}
15026
15027
15028/**
15029 * Common worker for FPU instructions working on ST0 and having two outputs, one
15030 * replacing ST0 and one pushed onto the stack.
15031 *
15032 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15033 */
15034FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15035{
15036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15037
15038 IEM_MC_BEGIN(2, 1);
15039 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15040 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15041 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15042
15043 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15044 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15045 IEM_MC_PREPARE_FPU_USAGE();
15046 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15047 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15048 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15049 IEM_MC_ELSE()
15050 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15051 IEM_MC_ENDIF();
15052 IEM_MC_ADVANCE_RIP();
15053
15054 IEM_MC_END();
15055 return VINF_SUCCESS;
15056}
15057
15058
15059/** Opcode 0xd9 0xf2. */
15060FNIEMOP_DEF(iemOp_fptan)
15061{
15062 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15063 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15064}
15065
15066
15067/** Opcode 0xd9 0xf3. */
15068FNIEMOP_DEF(iemOp_fpatan)
15069{
15070 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15071 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15072}
15073
15074
15075/** Opcode 0xd9 0xf4. */
15076FNIEMOP_DEF(iemOp_fxtract)
15077{
15078 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15079 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15080}
15081
15082
15083/** Opcode 0xd9 0xf5. */
15084FNIEMOP_DEF(iemOp_fprem1)
15085{
15086 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15087 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15088}
15089
15090
15091/** Opcode 0xd9 0xf6. */
15092FNIEMOP_DEF(iemOp_fdecstp)
15093{
15094 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15096 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15097 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15098 * FINCSTP and FDECSTP. */
15099
15100 IEM_MC_BEGIN(0,0);
15101
15102 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15103 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15104
15105 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15106 IEM_MC_FPU_STACK_DEC_TOP();
15107 IEM_MC_UPDATE_FSW_CONST(0);
15108
15109 IEM_MC_ADVANCE_RIP();
15110 IEM_MC_END();
15111 return VINF_SUCCESS;
15112}
15113
15114
15115/** Opcode 0xd9 0xf7. */
15116FNIEMOP_DEF(iemOp_fincstp)
15117{
15118 IEMOP_MNEMONIC(fincstp, "fincstp");
15119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15120 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15121 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15122 * FINCSTP and FDECSTP. */
15123
15124 IEM_MC_BEGIN(0,0);
15125
15126 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15127 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15128
15129 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15130 IEM_MC_FPU_STACK_INC_TOP();
15131 IEM_MC_UPDATE_FSW_CONST(0);
15132
15133 IEM_MC_ADVANCE_RIP();
15134 IEM_MC_END();
15135 return VINF_SUCCESS;
15136}
15137
15138
15139/** Opcode 0xd9 0xf8. */
15140FNIEMOP_DEF(iemOp_fprem)
15141{
15142 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15143 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15144}
15145
15146
15147/** Opcode 0xd9 0xf9. */
15148FNIEMOP_DEF(iemOp_fyl2xp1)
15149{
15150 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15151 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15152}
15153
15154
15155/** Opcode 0xd9 0xfa. */
15156FNIEMOP_DEF(iemOp_fsqrt)
15157{
15158 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15159 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15160}
15161
15162
15163/** Opcode 0xd9 0xfb. */
15164FNIEMOP_DEF(iemOp_fsincos)
15165{
15166 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15167 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15168}
15169
15170
15171/** Opcode 0xd9 0xfc. */
15172FNIEMOP_DEF(iemOp_frndint)
15173{
15174 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15175 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15176}
15177
15178
15179/** Opcode 0xd9 0xfd. */
15180FNIEMOP_DEF(iemOp_fscale)
15181{
15182 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15183 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15184}
15185
15186
15187/** Opcode 0xd9 0xfe. */
15188FNIEMOP_DEF(iemOp_fsin)
15189{
15190 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15191 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15192}
15193
15194
15195/** Opcode 0xd9 0xff. */
15196FNIEMOP_DEF(iemOp_fcos)
15197{
15198 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15199 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15200}
15201
15202
15203/** Used by iemOp_EscF1. */
15204IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15205{
15206 /* 0xe0 */ iemOp_fchs,
15207 /* 0xe1 */ iemOp_fabs,
15208 /* 0xe2 */ iemOp_Invalid,
15209 /* 0xe3 */ iemOp_Invalid,
15210 /* 0xe4 */ iemOp_ftst,
15211 /* 0xe5 */ iemOp_fxam,
15212 /* 0xe6 */ iemOp_Invalid,
15213 /* 0xe7 */ iemOp_Invalid,
15214 /* 0xe8 */ iemOp_fld1,
15215 /* 0xe9 */ iemOp_fldl2t,
15216 /* 0xea */ iemOp_fldl2e,
15217 /* 0xeb */ iemOp_fldpi,
15218 /* 0xec */ iemOp_fldlg2,
15219 /* 0xed */ iemOp_fldln2,
15220 /* 0xee */ iemOp_fldz,
15221 /* 0xef */ iemOp_Invalid,
15222 /* 0xf0 */ iemOp_f2xm1,
15223 /* 0xf1 */ iemOp_fyl2x,
15224 /* 0xf2 */ iemOp_fptan,
15225 /* 0xf3 */ iemOp_fpatan,
15226 /* 0xf4 */ iemOp_fxtract,
15227 /* 0xf5 */ iemOp_fprem1,
15228 /* 0xf6 */ iemOp_fdecstp,
15229 /* 0xf7 */ iemOp_fincstp,
15230 /* 0xf8 */ iemOp_fprem,
15231 /* 0xf9 */ iemOp_fyl2xp1,
15232 /* 0xfa */ iemOp_fsqrt,
15233 /* 0xfb */ iemOp_fsincos,
15234 /* 0xfc */ iemOp_frndint,
15235 /* 0xfd */ iemOp_fscale,
15236 /* 0xfe */ iemOp_fsin,
15237 /* 0xff */ iemOp_fcos
15238};
15239
15240
15241/** Opcode 0xd9. */
15242FNIEMOP_DEF(iemOp_EscF1)
15243{
15244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15245 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15246
15247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15248 {
15249 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15250 {
15251 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15252 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15253 case 2:
15254 if (bRm == 0xd0)
15255 return FNIEMOP_CALL(iemOp_fnop);
15256 return IEMOP_RAISE_INVALID_OPCODE();
15257 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15258 case 4:
15259 case 5:
15260 case 6:
15261 case 7:
15262 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15263 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15265 }
15266 }
15267 else
15268 {
15269 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15270 {
15271 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15272 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15273 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15274 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15275 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15276 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15277 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15278 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15280 }
15281 }
15282}
15283
15284
15285/** Opcode 0xda 11/0. */
15286FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15287{
15288 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15290
15291 IEM_MC_BEGIN(0, 1);
15292 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15293
15294 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15295 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15296
15297 IEM_MC_PREPARE_FPU_USAGE();
15298 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15299 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15300 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15301 IEM_MC_ENDIF();
15302 IEM_MC_UPDATE_FPU_OPCODE_IP();
15303 IEM_MC_ELSE()
15304 IEM_MC_FPU_STACK_UNDERFLOW(0);
15305 IEM_MC_ENDIF();
15306 IEM_MC_ADVANCE_RIP();
15307
15308 IEM_MC_END();
15309 return VINF_SUCCESS;
15310}
15311
15312
15313/** Opcode 0xda 11/1. */
15314FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15315{
15316 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15318
15319 IEM_MC_BEGIN(0, 1);
15320 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15321
15322 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15323 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15324
15325 IEM_MC_PREPARE_FPU_USAGE();
15326 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15327 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15328 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15329 IEM_MC_ENDIF();
15330 IEM_MC_UPDATE_FPU_OPCODE_IP();
15331 IEM_MC_ELSE()
15332 IEM_MC_FPU_STACK_UNDERFLOW(0);
15333 IEM_MC_ENDIF();
15334 IEM_MC_ADVANCE_RIP();
15335
15336 IEM_MC_END();
15337 return VINF_SUCCESS;
15338}
15339
15340
15341/** Opcode 0xda 11/2. */
15342FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15343{
15344 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15346
15347 IEM_MC_BEGIN(0, 1);
15348 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15349
15350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15351 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15352
15353 IEM_MC_PREPARE_FPU_USAGE();
15354 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15355 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15356 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15357 IEM_MC_ENDIF();
15358 IEM_MC_UPDATE_FPU_OPCODE_IP();
15359 IEM_MC_ELSE()
15360 IEM_MC_FPU_STACK_UNDERFLOW(0);
15361 IEM_MC_ENDIF();
15362 IEM_MC_ADVANCE_RIP();
15363
15364 IEM_MC_END();
15365 return VINF_SUCCESS;
15366}
15367
15368
15369/** Opcode 0xda 11/3. */
15370FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15371{
15372 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15374
15375 IEM_MC_BEGIN(0, 1);
15376 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15377
15378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15379 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15380
15381 IEM_MC_PREPARE_FPU_USAGE();
15382 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15383 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15384 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15385 IEM_MC_ENDIF();
15386 IEM_MC_UPDATE_FPU_OPCODE_IP();
15387 IEM_MC_ELSE()
15388 IEM_MC_FPU_STACK_UNDERFLOW(0);
15389 IEM_MC_ENDIF();
15390 IEM_MC_ADVANCE_RIP();
15391
15392 IEM_MC_END();
15393 return VINF_SUCCESS;
15394}
15395
15396
15397/**
15398 * Common worker for FPU instructions working on ST0 and STn, only affecting
15399 * flags, and popping twice when done.
15400 *
15401 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15402 */
15403FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15404{
15405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15406
15407 IEM_MC_BEGIN(3, 1);
15408 IEM_MC_LOCAL(uint16_t, u16Fsw);
15409 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15410 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15411 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15412
15413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15415
15416 IEM_MC_PREPARE_FPU_USAGE();
15417 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15418 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15419 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15420 IEM_MC_ELSE()
15421 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15422 IEM_MC_ENDIF();
15423 IEM_MC_ADVANCE_RIP();
15424
15425 IEM_MC_END();
15426 return VINF_SUCCESS;
15427}
15428
15429
15430/** Opcode 0xda 0xe9. */
15431FNIEMOP_DEF(iemOp_fucompp)
15432{
15433 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15434 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15435}
15436
15437
15438/**
15439 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15440 * the result in ST0.
15441 *
15442 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15443 */
15444FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15445{
15446 IEM_MC_BEGIN(3, 3);
15447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15448 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15449 IEM_MC_LOCAL(int32_t, i32Val2);
15450 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15451 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15452 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15453
15454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15456
15457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15458 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15459 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15460
15461 IEM_MC_PREPARE_FPU_USAGE();
15462 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15463 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15464 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15465 IEM_MC_ELSE()
15466 IEM_MC_FPU_STACK_UNDERFLOW(0);
15467 IEM_MC_ENDIF();
15468 IEM_MC_ADVANCE_RIP();
15469
15470 IEM_MC_END();
15471 return VINF_SUCCESS;
15472}
15473
15474
15475/** Opcode 0xda !11/0. */
15476FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15477{
15478 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15479 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15480}
15481
15482
15483/** Opcode 0xda !11/1. */
15484FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15485{
15486 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15487 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15488}
15489
15490
15491/** Opcode 0xda !11/2. */
15492FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15493{
15494 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15495
15496 IEM_MC_BEGIN(3, 3);
15497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15498 IEM_MC_LOCAL(uint16_t, u16Fsw);
15499 IEM_MC_LOCAL(int32_t, i32Val2);
15500 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15501 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15502 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15503
15504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15506
15507 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15508 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15509 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15510
15511 IEM_MC_PREPARE_FPU_USAGE();
15512 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15513 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15514 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15515 IEM_MC_ELSE()
15516 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15517 IEM_MC_ENDIF();
15518 IEM_MC_ADVANCE_RIP();
15519
15520 IEM_MC_END();
15521 return VINF_SUCCESS;
15522}
15523
15524
15525/** Opcode 0xda !11/3. */
15526FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15527{
15528 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15529
15530 IEM_MC_BEGIN(3, 3);
15531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15532 IEM_MC_LOCAL(uint16_t, u16Fsw);
15533 IEM_MC_LOCAL(int32_t, i32Val2);
15534 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15535 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15536 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15537
15538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15540
15541 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15542 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15543 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15544
15545 IEM_MC_PREPARE_FPU_USAGE();
15546 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15547 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15548 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15549 IEM_MC_ELSE()
15550 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15551 IEM_MC_ENDIF();
15552 IEM_MC_ADVANCE_RIP();
15553
15554 IEM_MC_END();
15555 return VINF_SUCCESS;
15556}
15557
15558
15559/** Opcode 0xda !11/4. */
15560FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15561{
15562 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15563 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15564}
15565
15566
15567/** Opcode 0xda !11/5. */
15568FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15569{
15570 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15571 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15572}
15573
15574
15575/** Opcode 0xda !11/6. */
15576FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15577{
15578 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15579 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15580}
15581
15582
15583/** Opcode 0xda !11/7. */
15584FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15585{
15586 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15587 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15588}
15589
15590
15591/** Opcode 0xda. */
15592FNIEMOP_DEF(iemOp_EscF2)
15593{
15594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15595 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15597 {
15598 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15599 {
15600 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15601 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15602 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15603 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15604 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15605 case 5:
15606 if (bRm == 0xe9)
15607 return FNIEMOP_CALL(iemOp_fucompp);
15608 return IEMOP_RAISE_INVALID_OPCODE();
15609 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15610 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15612 }
15613 }
15614 else
15615 {
15616 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15617 {
15618 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15619 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15620 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15621 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15622 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15623 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15624 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15625 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15627 }
15628 }
15629}
15630
15631
15632/** Opcode 0xdb !11/0. */
15633FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15634{
15635 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15636
15637 IEM_MC_BEGIN(2, 3);
15638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15639 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15640 IEM_MC_LOCAL(int32_t, i32Val);
15641 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15642 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15643
15644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15646
15647 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15648 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15649 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15650
15651 IEM_MC_PREPARE_FPU_USAGE();
15652 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15653 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15654 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15655 IEM_MC_ELSE()
15656 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15657 IEM_MC_ENDIF();
15658 IEM_MC_ADVANCE_RIP();
15659
15660 IEM_MC_END();
15661 return VINF_SUCCESS;
15662}
15663
15664
15665/** Opcode 0xdb !11/1. */
15666FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15667{
15668 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15669 IEM_MC_BEGIN(3, 2);
15670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15671 IEM_MC_LOCAL(uint16_t, u16Fsw);
15672 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15673 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15674 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15675
15676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15679 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15680
15681 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15682 IEM_MC_PREPARE_FPU_USAGE();
15683 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15684 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15685 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15686 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15687 IEM_MC_ELSE()
15688 IEM_MC_IF_FCW_IM()
15689 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15690 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15691 IEM_MC_ENDIF();
15692 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15693 IEM_MC_ENDIF();
15694 IEM_MC_ADVANCE_RIP();
15695
15696 IEM_MC_END();
15697 return VINF_SUCCESS;
15698}
15699
15700
15701/** Opcode 0xdb !11/2. */
15702FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15703{
15704 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15705 IEM_MC_BEGIN(3, 2);
15706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15707 IEM_MC_LOCAL(uint16_t, u16Fsw);
15708 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15709 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15710 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15711
15712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15715 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15716
15717 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15718 IEM_MC_PREPARE_FPU_USAGE();
15719 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15720 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15721 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15722 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15723 IEM_MC_ELSE()
15724 IEM_MC_IF_FCW_IM()
15725 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15726 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15727 IEM_MC_ENDIF();
15728 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15729 IEM_MC_ENDIF();
15730 IEM_MC_ADVANCE_RIP();
15731
15732 IEM_MC_END();
15733 return VINF_SUCCESS;
15734}
15735
15736
15737/** Opcode 0xdb !11/3. */
15738FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15739{
15740 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15741 IEM_MC_BEGIN(3, 2);
15742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15743 IEM_MC_LOCAL(uint16_t, u16Fsw);
15744 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15745 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15746 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15747
15748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15751 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15752
15753 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15754 IEM_MC_PREPARE_FPU_USAGE();
15755 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15756 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15757 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15758 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15759 IEM_MC_ELSE()
15760 IEM_MC_IF_FCW_IM()
15761 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15762 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15763 IEM_MC_ENDIF();
15764 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15765 IEM_MC_ENDIF();
15766 IEM_MC_ADVANCE_RIP();
15767
15768 IEM_MC_END();
15769 return VINF_SUCCESS;
15770}
15771
15772
15773/** Opcode 0xdb !11/5. */
15774FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15775{
15776 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15777
15778 IEM_MC_BEGIN(2, 3);
15779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15780 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15781 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15782 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15783 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15784
15785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15787
15788 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15789 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15790 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15791
15792 IEM_MC_PREPARE_FPU_USAGE();
15793 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15794 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15795 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15796 IEM_MC_ELSE()
15797 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15798 IEM_MC_ENDIF();
15799 IEM_MC_ADVANCE_RIP();
15800
15801 IEM_MC_END();
15802 return VINF_SUCCESS;
15803}
15804
15805
15806/** Opcode 0xdb !11/7. */
15807FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15808{
15809 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15810 IEM_MC_BEGIN(3, 2);
15811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15812 IEM_MC_LOCAL(uint16_t, u16Fsw);
15813 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15814 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15815 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15816
15817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15819 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15820 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15821
15822 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15823 IEM_MC_PREPARE_FPU_USAGE();
15824 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15825 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15826 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15827 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15828 IEM_MC_ELSE()
15829 IEM_MC_IF_FCW_IM()
15830 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15831 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15832 IEM_MC_ENDIF();
15833 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15834 IEM_MC_ENDIF();
15835 IEM_MC_ADVANCE_RIP();
15836
15837 IEM_MC_END();
15838 return VINF_SUCCESS;
15839}
15840
15841
15842/** Opcode 0xdb 11/0. */
15843FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15844{
15845 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15847
15848 IEM_MC_BEGIN(0, 1);
15849 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15850
15851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15853
15854 IEM_MC_PREPARE_FPU_USAGE();
15855 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15856 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15857 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15858 IEM_MC_ENDIF();
15859 IEM_MC_UPDATE_FPU_OPCODE_IP();
15860 IEM_MC_ELSE()
15861 IEM_MC_FPU_STACK_UNDERFLOW(0);
15862 IEM_MC_ENDIF();
15863 IEM_MC_ADVANCE_RIP();
15864
15865 IEM_MC_END();
15866 return VINF_SUCCESS;
15867}
15868
15869
15870/** Opcode 0xdb 11/1. */
15871FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15872{
15873 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15875
15876 IEM_MC_BEGIN(0, 1);
15877 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15878
15879 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15880 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15881
15882 IEM_MC_PREPARE_FPU_USAGE();
15883 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15884 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15885 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15886 IEM_MC_ENDIF();
15887 IEM_MC_UPDATE_FPU_OPCODE_IP();
15888 IEM_MC_ELSE()
15889 IEM_MC_FPU_STACK_UNDERFLOW(0);
15890 IEM_MC_ENDIF();
15891 IEM_MC_ADVANCE_RIP();
15892
15893 IEM_MC_END();
15894 return VINF_SUCCESS;
15895}
15896
15897
15898/** Opcode 0xdb 11/2. */
15899FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15900{
15901 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15903
15904 IEM_MC_BEGIN(0, 1);
15905 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15906
15907 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15908 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15909
15910 IEM_MC_PREPARE_FPU_USAGE();
15911 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15912 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15913 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15914 IEM_MC_ENDIF();
15915 IEM_MC_UPDATE_FPU_OPCODE_IP();
15916 IEM_MC_ELSE()
15917 IEM_MC_FPU_STACK_UNDERFLOW(0);
15918 IEM_MC_ENDIF();
15919 IEM_MC_ADVANCE_RIP();
15920
15921 IEM_MC_END();
15922 return VINF_SUCCESS;
15923}
15924
15925
15926/** Opcode 0xdb 11/3. */
15927FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15928{
15929 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15931
15932 IEM_MC_BEGIN(0, 1);
15933 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15934
15935 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15936 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15937
15938 IEM_MC_PREPARE_FPU_USAGE();
15939 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15940 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15941 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15942 IEM_MC_ENDIF();
15943 IEM_MC_UPDATE_FPU_OPCODE_IP();
15944 IEM_MC_ELSE()
15945 IEM_MC_FPU_STACK_UNDERFLOW(0);
15946 IEM_MC_ENDIF();
15947 IEM_MC_ADVANCE_RIP();
15948
15949 IEM_MC_END();
15950 return VINF_SUCCESS;
15951}
15952
15953
15954/** Opcode 0xdb 0xe0. */
15955FNIEMOP_DEF(iemOp_fneni)
15956{
15957 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
15958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15959 IEM_MC_BEGIN(0,0);
15960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15961 IEM_MC_ADVANCE_RIP();
15962 IEM_MC_END();
15963 return VINF_SUCCESS;
15964}
15965
15966
15967/** Opcode 0xdb 0xe1. */
15968FNIEMOP_DEF(iemOp_fndisi)
15969{
15970 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
15971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15972 IEM_MC_BEGIN(0,0);
15973 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15974 IEM_MC_ADVANCE_RIP();
15975 IEM_MC_END();
15976 return VINF_SUCCESS;
15977}
15978
15979
15980/** Opcode 0xdb 0xe2. */
15981FNIEMOP_DEF(iemOp_fnclex)
15982{
15983 IEMOP_MNEMONIC(fnclex, "fnclex");
15984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15985
15986 IEM_MC_BEGIN(0,0);
15987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15988 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15989 IEM_MC_CLEAR_FSW_EX();
15990 IEM_MC_ADVANCE_RIP();
15991 IEM_MC_END();
15992 return VINF_SUCCESS;
15993}
15994
15995
15996/** Opcode 0xdb 0xe3. */
15997FNIEMOP_DEF(iemOp_fninit)
15998{
15999 IEMOP_MNEMONIC(fninit, "fninit");
16000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16001 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16002}
16003
16004
16005/** Opcode 0xdb 0xe4. */
16006FNIEMOP_DEF(iemOp_fnsetpm)
16007{
16008 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16010 IEM_MC_BEGIN(0,0);
16011 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16012 IEM_MC_ADVANCE_RIP();
16013 IEM_MC_END();
16014 return VINF_SUCCESS;
16015}
16016
16017
16018/** Opcode 0xdb 0xe5. */
16019FNIEMOP_DEF(iemOp_frstpm)
16020{
16021 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16022#if 0 /* #UDs on newer CPUs */
16023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16024 IEM_MC_BEGIN(0,0);
16025 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16026 IEM_MC_ADVANCE_RIP();
16027 IEM_MC_END();
16028 return VINF_SUCCESS;
16029#else
16030 return IEMOP_RAISE_INVALID_OPCODE();
16031#endif
16032}
16033
16034
16035/** Opcode 0xdb 11/5. */
16036FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16037{
16038 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16039 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16040}
16041
16042
16043/** Opcode 0xdb 11/6. */
16044FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16045{
16046 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16047 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16048}
16049
16050
16051/** Opcode 0xdb. */
16052FNIEMOP_DEF(iemOp_EscF3)
16053{
16054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16055 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16057 {
16058 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16059 {
16060 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16061 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16062 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16063 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16064 case 4:
16065 switch (bRm)
16066 {
16067 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16068 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16069 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16070 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16071 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16072 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16073 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16074 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16076 }
16077 break;
16078 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16079 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16080 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16082 }
16083 }
16084 else
16085 {
16086 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16087 {
16088 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16089 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16090 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16091 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16092 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16093 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16094 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16095 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16097 }
16098 }
16099}
16100
16101
16102/**
16103 * Common worker for FPU instructions working on STn and ST0, and storing the
16104 * result in STn unless IE, DE or ZE was raised.
16105 *
16106 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16107 */
16108FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16109{
16110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16111
16112 IEM_MC_BEGIN(3, 1);
16113 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16114 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16115 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16116 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16117
16118 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16119 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16120
16121 IEM_MC_PREPARE_FPU_USAGE();
16122 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16123 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16124 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16125 IEM_MC_ELSE()
16126 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16127 IEM_MC_ENDIF();
16128 IEM_MC_ADVANCE_RIP();
16129
16130 IEM_MC_END();
16131 return VINF_SUCCESS;
16132}
16133
16134
16135/** Opcode 0xdc 11/0. */
16136FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16137{
16138 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16139 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16140}
16141
16142
16143/** Opcode 0xdc 11/1. */
16144FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16145{
16146 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16147 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16148}
16149
16150
16151/** Opcode 0xdc 11/4. */
16152FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16153{
16154 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16155 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16156}
16157
16158
16159/** Opcode 0xdc 11/5. */
16160FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16161{
16162 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16163 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16164}
16165
16166
16167/** Opcode 0xdc 11/6. */
16168FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16169{
16170 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16171 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16172}
16173
16174
16175/** Opcode 0xdc 11/7. */
16176FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16177{
16178 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16179 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16180}
16181
16182
16183/**
16184 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16185 * memory operand, and storing the result in ST0.
16186 *
16187 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16188 */
16189FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16190{
16191 IEM_MC_BEGIN(3, 3);
16192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16193 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16194 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16195 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16196 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16197 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16198
16199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16201 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16202 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16203
16204 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16205 IEM_MC_PREPARE_FPU_USAGE();
16206 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16207 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16208 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16209 IEM_MC_ELSE()
16210 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16211 IEM_MC_ENDIF();
16212 IEM_MC_ADVANCE_RIP();
16213
16214 IEM_MC_END();
16215 return VINF_SUCCESS;
16216}
16217
16218
16219/** Opcode 0xdc !11/0. */
16220FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16221{
16222 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16223 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16224}
16225
16226
16227/** Opcode 0xdc !11/1. */
16228FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16229{
16230 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16231 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16232}
16233
16234
16235/** Opcode 0xdc !11/2. */
16236FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16237{
16238 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16239
16240 IEM_MC_BEGIN(3, 3);
16241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16242 IEM_MC_LOCAL(uint16_t, u16Fsw);
16243 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16244 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16245 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16246 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16247
16248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16250
16251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16253 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16254
16255 IEM_MC_PREPARE_FPU_USAGE();
16256 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16257 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16258 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16259 IEM_MC_ELSE()
16260 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16261 IEM_MC_ENDIF();
16262 IEM_MC_ADVANCE_RIP();
16263
16264 IEM_MC_END();
16265 return VINF_SUCCESS;
16266}
16267
16268
16269/** Opcode 0xdc !11/3. */
16270FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16271{
16272 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16273
16274 IEM_MC_BEGIN(3, 3);
16275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16276 IEM_MC_LOCAL(uint16_t, u16Fsw);
16277 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16278 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16279 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16280 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16281
16282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16284
16285 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16286 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16287 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16288
16289 IEM_MC_PREPARE_FPU_USAGE();
16290 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16291 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16292 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16293 IEM_MC_ELSE()
16294 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16295 IEM_MC_ENDIF();
16296 IEM_MC_ADVANCE_RIP();
16297
16298 IEM_MC_END();
16299 return VINF_SUCCESS;
16300}
16301
16302
16303/** Opcode 0xdc !11/4. */
16304FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16305{
16306 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16307 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16308}
16309
16310
16311/** Opcode 0xdc !11/5. */
16312FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16313{
16314 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16315 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16316}
16317
16318
16319/** Opcode 0xdc !11/6. */
16320FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16321{
16322 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16323 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16324}
16325
16326
16327/** Opcode 0xdc !11/7. */
16328FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16329{
16330 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16331 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16332}
16333
16334
16335/** Opcode 0xdc. */
16336FNIEMOP_DEF(iemOp_EscF4)
16337{
16338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16339 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16340 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16341 {
16342 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16343 {
16344 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16345 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16346 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16347 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16348 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16349 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16350 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16351 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16353 }
16354 }
16355 else
16356 {
16357 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16358 {
16359 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16360 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16361 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16362 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16363 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16364 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16365 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16366 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16368 }
16369 }
16370}
16371
16372
16373/** Opcode 0xdd !11/0.
16374 * @sa iemOp_fld_m32r */
16375FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16376{
16377 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16378
16379 IEM_MC_BEGIN(2, 3);
16380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16381 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16382 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16383 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16384 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16385
16386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16388 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16389 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16390
16391 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16392 IEM_MC_PREPARE_FPU_USAGE();
16393 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16394 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16395 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16396 IEM_MC_ELSE()
16397 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16398 IEM_MC_ENDIF();
16399 IEM_MC_ADVANCE_RIP();
16400
16401 IEM_MC_END();
16402 return VINF_SUCCESS;
16403}
16404
16405
16406/** Opcode 0xdd !11/0. */
16407FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16408{
16409 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16410 IEM_MC_BEGIN(3, 2);
16411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16412 IEM_MC_LOCAL(uint16_t, u16Fsw);
16413 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16414 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16415 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16416
16417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16421
16422 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16423 IEM_MC_PREPARE_FPU_USAGE();
16424 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16425 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16426 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16427 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16428 IEM_MC_ELSE()
16429 IEM_MC_IF_FCW_IM()
16430 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16431 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16432 IEM_MC_ENDIF();
16433 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16434 IEM_MC_ENDIF();
16435 IEM_MC_ADVANCE_RIP();
16436
16437 IEM_MC_END();
16438 return VINF_SUCCESS;
16439}
16440
16441
16442/** Opcode 0xdd !11/0. */
16443FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16444{
16445 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16446 IEM_MC_BEGIN(3, 2);
16447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16448 IEM_MC_LOCAL(uint16_t, u16Fsw);
16449 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16450 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16451 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16452
16453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16457
16458 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16459 IEM_MC_PREPARE_FPU_USAGE();
16460 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16461 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16462 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16463 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16464 IEM_MC_ELSE()
16465 IEM_MC_IF_FCW_IM()
16466 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16467 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16468 IEM_MC_ENDIF();
16469 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16470 IEM_MC_ENDIF();
16471 IEM_MC_ADVANCE_RIP();
16472
16473 IEM_MC_END();
16474 return VINF_SUCCESS;
16475}
16476
16477
16478
16479
16480/** Opcode 0xdd !11/0. */
16481FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16482{
16483 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16484 IEM_MC_BEGIN(3, 2);
16485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16486 IEM_MC_LOCAL(uint16_t, u16Fsw);
16487 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16488 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16489 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16490
16491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16493 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16494 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16495
16496 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16497 IEM_MC_PREPARE_FPU_USAGE();
16498 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16499 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16500 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16501 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16502 IEM_MC_ELSE()
16503 IEM_MC_IF_FCW_IM()
16504 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16505 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16506 IEM_MC_ENDIF();
16507 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16508 IEM_MC_ENDIF();
16509 IEM_MC_ADVANCE_RIP();
16510
16511 IEM_MC_END();
16512 return VINF_SUCCESS;
16513}
16514
16515
16516/** Opcode 0xdd !11/0. */
16517FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16518{
16519 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16520 IEM_MC_BEGIN(3, 0);
16521 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16522 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16523 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16526 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16527 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16528 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16529 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16530 IEM_MC_END();
16531 return VINF_SUCCESS;
16532}
16533
16534
16535/** Opcode 0xdd !11/0. */
16536FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16537{
16538 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16539 IEM_MC_BEGIN(3, 0);
16540 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16541 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16542 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16545 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16546 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16547 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16548 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16549 IEM_MC_END();
16550 return VINF_SUCCESS;
16551
16552}
16553
16554/** Opcode 0xdd !11/0. */
16555FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16556{
16557 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16558
16559 IEM_MC_BEGIN(0, 2);
16560 IEM_MC_LOCAL(uint16_t, u16Tmp);
16561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16562
16563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16566
16567 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16568 IEM_MC_FETCH_FSW(u16Tmp);
16569 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16570 IEM_MC_ADVANCE_RIP();
16571
16572/** @todo Debug / drop a hint to the verifier that things may differ
16573 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16574 * NT4SP1. (X86_FSW_PE) */
16575 IEM_MC_END();
16576 return VINF_SUCCESS;
16577}
16578
16579
16580/** Opcode 0xdd 11/0. */
16581FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16582{
16583 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16585 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16586 unmodified. */
16587
16588 IEM_MC_BEGIN(0, 0);
16589
16590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16591 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16592
16593 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16594 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16595 IEM_MC_UPDATE_FPU_OPCODE_IP();
16596
16597 IEM_MC_ADVANCE_RIP();
16598 IEM_MC_END();
16599 return VINF_SUCCESS;
16600}
16601
16602
16603/** Opcode 0xdd 11/1. */
16604FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16605{
16606 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16608
16609 IEM_MC_BEGIN(0, 2);
16610 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16611 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16613 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16614
16615 IEM_MC_PREPARE_FPU_USAGE();
16616 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16617 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16618 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16619 IEM_MC_ELSE()
16620 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16621 IEM_MC_ENDIF();
16622
16623 IEM_MC_ADVANCE_RIP();
16624 IEM_MC_END();
16625 return VINF_SUCCESS;
16626}
16627
16628
16629/** Opcode 0xdd 11/3. */
16630FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16631{
16632 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16633 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16634}
16635
16636
16637/** Opcode 0xdd 11/4. */
16638FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16639{
16640 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16641 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16642}
16643
16644
16645/** Opcode 0xdd. */
16646FNIEMOP_DEF(iemOp_EscF5)
16647{
16648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16649 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16651 {
16652 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16653 {
16654 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16655 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16656 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16657 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16658 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16659 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16660 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16661 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16663 }
16664 }
16665 else
16666 {
16667 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16668 {
16669 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16670 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16671 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16672 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16673 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16674 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16675 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16676 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16678 }
16679 }
16680}
16681
16682
16683/** Opcode 0xde 11/0. */
16684FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16685{
16686 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16687 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16688}
16689
16690
16691/** Opcode 0xde 11/0. */
16692FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16693{
16694 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16695 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16696}
16697
16698
16699/** Opcode 0xde 0xd9. */
16700FNIEMOP_DEF(iemOp_fcompp)
16701{
16702 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16703 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16704}
16705
16706
16707/** Opcode 0xde 11/4. */
16708FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16709{
16710 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16711 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16712}
16713
16714
16715/** Opcode 0xde 11/5. */
16716FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16717{
16718 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16719 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16720}
16721
16722
16723/** Opcode 0xde 11/6. */
16724FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16725{
16726 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16727 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16728}
16729
16730
16731/** Opcode 0xde 11/7. */
16732FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16733{
16734 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16735 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16736}
16737
16738
16739/**
16740 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16741 * the result in ST0.
16742 *
16743 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16744 */
16745FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16746{
16747 IEM_MC_BEGIN(3, 3);
16748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16749 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16750 IEM_MC_LOCAL(int16_t, i16Val2);
16751 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16752 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16753 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16754
16755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16757
16758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16760 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16761
16762 IEM_MC_PREPARE_FPU_USAGE();
16763 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16764 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16765 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16766 IEM_MC_ELSE()
16767 IEM_MC_FPU_STACK_UNDERFLOW(0);
16768 IEM_MC_ENDIF();
16769 IEM_MC_ADVANCE_RIP();
16770
16771 IEM_MC_END();
16772 return VINF_SUCCESS;
16773}
16774
16775
16776/** Opcode 0xde !11/0. */
16777FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16778{
16779 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16780 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16781}
16782
16783
16784/** Opcode 0xde !11/1. */
16785FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16786{
16787 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16788 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16789}
16790
16791
16792/** Opcode 0xde !11/2. */
16793FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16794{
16795 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16796
16797 IEM_MC_BEGIN(3, 3);
16798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16799 IEM_MC_LOCAL(uint16_t, u16Fsw);
16800 IEM_MC_LOCAL(int16_t, i16Val2);
16801 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16802 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16803 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16804
16805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16807
16808 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16809 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16810 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16811
16812 IEM_MC_PREPARE_FPU_USAGE();
16813 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16814 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16815 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16816 IEM_MC_ELSE()
16817 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16818 IEM_MC_ENDIF();
16819 IEM_MC_ADVANCE_RIP();
16820
16821 IEM_MC_END();
16822 return VINF_SUCCESS;
16823}
16824
16825
16826/** Opcode 0xde !11/3. */
16827FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16828{
16829 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16830
16831 IEM_MC_BEGIN(3, 3);
16832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16833 IEM_MC_LOCAL(uint16_t, u16Fsw);
16834 IEM_MC_LOCAL(int16_t, i16Val2);
16835 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16836 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16837 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16838
16839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16841
16842 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16843 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16844 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16845
16846 IEM_MC_PREPARE_FPU_USAGE();
16847 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16848 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16849 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16850 IEM_MC_ELSE()
16851 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16852 IEM_MC_ENDIF();
16853 IEM_MC_ADVANCE_RIP();
16854
16855 IEM_MC_END();
16856 return VINF_SUCCESS;
16857}
16858
16859
16860/** Opcode 0xde !11/4. */
16861FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16862{
16863 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16864 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16865}
16866
16867
16868/** Opcode 0xde !11/5. */
16869FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16870{
16871 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16872 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16873}
16874
16875
16876/** Opcode 0xde !11/6. */
16877FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16878{
16879 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16880 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16881}
16882
16883
16884/** Opcode 0xde !11/7. */
16885FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16886{
16887 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16888 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16889}
16890
16891
16892/** Opcode 0xde. */
16893FNIEMOP_DEF(iemOp_EscF6)
16894{
16895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16896 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16897 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16898 {
16899 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16900 {
16901 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16902 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16903 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16904 case 3: if (bRm == 0xd9)
16905 return FNIEMOP_CALL(iemOp_fcompp);
16906 return IEMOP_RAISE_INVALID_OPCODE();
16907 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16908 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16909 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16910 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16912 }
16913 }
16914 else
16915 {
16916 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16917 {
16918 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16919 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16920 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16921 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16922 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16923 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16924 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16925 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16927 }
16928 }
16929}
16930
16931
16932/** Opcode 0xdf 11/0.
16933 * Undocument instruction, assumed to work like ffree + fincstp. */
16934FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16935{
16936 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
16937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16938
16939 IEM_MC_BEGIN(0, 0);
16940
16941 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16942 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16943
16944 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16945 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16946 IEM_MC_FPU_STACK_INC_TOP();
16947 IEM_MC_UPDATE_FPU_OPCODE_IP();
16948
16949 IEM_MC_ADVANCE_RIP();
16950 IEM_MC_END();
16951 return VINF_SUCCESS;
16952}
16953
16954
16955/** Opcode 0xdf 0xe0. */
16956FNIEMOP_DEF(iemOp_fnstsw_ax)
16957{
16958 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
16959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16960
16961 IEM_MC_BEGIN(0, 1);
16962 IEM_MC_LOCAL(uint16_t, u16Tmp);
16963 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16964 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16965 IEM_MC_FETCH_FSW(u16Tmp);
16966 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16967 IEM_MC_ADVANCE_RIP();
16968 IEM_MC_END();
16969 return VINF_SUCCESS;
16970}
16971
16972
16973/** Opcode 0xdf 11/5. */
16974FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16975{
16976 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
16977 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16978}
16979
16980
16981/** Opcode 0xdf 11/6. */
16982FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16983{
16984 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
16985 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16986}
16987
16988
16989/** Opcode 0xdf !11/0. */
16990FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16991{
16992 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
16993
16994 IEM_MC_BEGIN(2, 3);
16995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16996 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16997 IEM_MC_LOCAL(int16_t, i16Val);
16998 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16999 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17000
17001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17003
17004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17005 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17006 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17007
17008 IEM_MC_PREPARE_FPU_USAGE();
17009 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17010 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17011 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17012 IEM_MC_ELSE()
17013 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17014 IEM_MC_ENDIF();
17015 IEM_MC_ADVANCE_RIP();
17016
17017 IEM_MC_END();
17018 return VINF_SUCCESS;
17019}
17020
17021
17022/** Opcode 0xdf !11/1. */
17023FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17024{
17025 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17026 IEM_MC_BEGIN(3, 2);
17027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17028 IEM_MC_LOCAL(uint16_t, u16Fsw);
17029 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17030 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17031 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17032
17033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17037
17038 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17039 IEM_MC_PREPARE_FPU_USAGE();
17040 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17041 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17042 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17043 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17044 IEM_MC_ELSE()
17045 IEM_MC_IF_FCW_IM()
17046 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17047 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17048 IEM_MC_ENDIF();
17049 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17050 IEM_MC_ENDIF();
17051 IEM_MC_ADVANCE_RIP();
17052
17053 IEM_MC_END();
17054 return VINF_SUCCESS;
17055}
17056
17057
17058/** Opcode 0xdf !11/2. */
17059FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17060{
17061 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17062 IEM_MC_BEGIN(3, 2);
17063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17064 IEM_MC_LOCAL(uint16_t, u16Fsw);
17065 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17066 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17067 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17068
17069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17073
17074 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17075 IEM_MC_PREPARE_FPU_USAGE();
17076 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17077 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17078 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17079 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17080 IEM_MC_ELSE()
17081 IEM_MC_IF_FCW_IM()
17082 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17083 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17084 IEM_MC_ENDIF();
17085 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17086 IEM_MC_ENDIF();
17087 IEM_MC_ADVANCE_RIP();
17088
17089 IEM_MC_END();
17090 return VINF_SUCCESS;
17091}
17092
17093
17094/** Opcode 0xdf !11/3. */
17095FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17096{
17097 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17098 IEM_MC_BEGIN(3, 2);
17099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17100 IEM_MC_LOCAL(uint16_t, u16Fsw);
17101 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17102 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17103 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17104
17105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17107 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17108 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17109
17110 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17111 IEM_MC_PREPARE_FPU_USAGE();
17112 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17113 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17114 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17115 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17116 IEM_MC_ELSE()
17117 IEM_MC_IF_FCW_IM()
17118 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17119 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17120 IEM_MC_ENDIF();
17121 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17122 IEM_MC_ENDIF();
17123 IEM_MC_ADVANCE_RIP();
17124
17125 IEM_MC_END();
17126 return VINF_SUCCESS;
17127}
17128
17129
17130/** Opcode 0xdf !11/4. */
17131FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17132
17133
17134/** Opcode 0xdf !11/5. */
17135FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17136{
17137 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17138
17139 IEM_MC_BEGIN(2, 3);
17140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17141 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17142 IEM_MC_LOCAL(int64_t, i64Val);
17143 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17144 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17145
17146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17148
17149 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17150 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17151 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17152
17153 IEM_MC_PREPARE_FPU_USAGE();
17154 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17155 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17156 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17157 IEM_MC_ELSE()
17158 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17159 IEM_MC_ENDIF();
17160 IEM_MC_ADVANCE_RIP();
17161
17162 IEM_MC_END();
17163 return VINF_SUCCESS;
17164}
17165
17166
17167/** Opcode 0xdf !11/6. */
17168FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17169
17170
17171/** Opcode 0xdf !11/7. */
17172FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17173{
17174 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17175 IEM_MC_BEGIN(3, 2);
17176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17177 IEM_MC_LOCAL(uint16_t, u16Fsw);
17178 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17179 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17180 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17181
17182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17184 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17185 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17186
17187 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17188 IEM_MC_PREPARE_FPU_USAGE();
17189 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17190 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17191 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17192 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17193 IEM_MC_ELSE()
17194 IEM_MC_IF_FCW_IM()
17195 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17196 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17197 IEM_MC_ENDIF();
17198 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17199 IEM_MC_ENDIF();
17200 IEM_MC_ADVANCE_RIP();
17201
17202 IEM_MC_END();
17203 return VINF_SUCCESS;
17204}
17205
17206
17207/** Opcode 0xdf. */
17208FNIEMOP_DEF(iemOp_EscF7)
17209{
17210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17212 {
17213 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17214 {
17215 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17216 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17217 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17218 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17219 case 4: if (bRm == 0xe0)
17220 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17221 return IEMOP_RAISE_INVALID_OPCODE();
17222 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17223 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17224 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17226 }
17227 }
17228 else
17229 {
17230 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17231 {
17232 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17233 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17234 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17235 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17236 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17237 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17238 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17239 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17241 }
17242 }
17243}
17244
17245
17246/** Opcode 0xe0. */
17247FNIEMOP_DEF(iemOp_loopne_Jb)
17248{
17249 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17250 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17252 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17253
17254 switch (pVCpu->iem.s.enmEffAddrMode)
17255 {
17256 case IEMMODE_16BIT:
17257 IEM_MC_BEGIN(0,0);
17258 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17259 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17260 IEM_MC_REL_JMP_S8(i8Imm);
17261 } IEM_MC_ELSE() {
17262 IEM_MC_ADVANCE_RIP();
17263 } IEM_MC_ENDIF();
17264 IEM_MC_END();
17265 return VINF_SUCCESS;
17266
17267 case IEMMODE_32BIT:
17268 IEM_MC_BEGIN(0,0);
17269 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17270 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17271 IEM_MC_REL_JMP_S8(i8Imm);
17272 } IEM_MC_ELSE() {
17273 IEM_MC_ADVANCE_RIP();
17274 } IEM_MC_ENDIF();
17275 IEM_MC_END();
17276 return VINF_SUCCESS;
17277
17278 case IEMMODE_64BIT:
17279 IEM_MC_BEGIN(0,0);
17280 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17281 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17282 IEM_MC_REL_JMP_S8(i8Imm);
17283 } IEM_MC_ELSE() {
17284 IEM_MC_ADVANCE_RIP();
17285 } IEM_MC_ENDIF();
17286 IEM_MC_END();
17287 return VINF_SUCCESS;
17288
17289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17290 }
17291}
17292
17293
17294/** Opcode 0xe1. */
17295FNIEMOP_DEF(iemOp_loope_Jb)
17296{
17297 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17298 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17300 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17301
17302 switch (pVCpu->iem.s.enmEffAddrMode)
17303 {
17304 case IEMMODE_16BIT:
17305 IEM_MC_BEGIN(0,0);
17306 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17307 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17308 IEM_MC_REL_JMP_S8(i8Imm);
17309 } IEM_MC_ELSE() {
17310 IEM_MC_ADVANCE_RIP();
17311 } IEM_MC_ENDIF();
17312 IEM_MC_END();
17313 return VINF_SUCCESS;
17314
17315 case IEMMODE_32BIT:
17316 IEM_MC_BEGIN(0,0);
17317 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17318 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17319 IEM_MC_REL_JMP_S8(i8Imm);
17320 } IEM_MC_ELSE() {
17321 IEM_MC_ADVANCE_RIP();
17322 } IEM_MC_ENDIF();
17323 IEM_MC_END();
17324 return VINF_SUCCESS;
17325
17326 case IEMMODE_64BIT:
17327 IEM_MC_BEGIN(0,0);
17328 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17329 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17330 IEM_MC_REL_JMP_S8(i8Imm);
17331 } IEM_MC_ELSE() {
17332 IEM_MC_ADVANCE_RIP();
17333 } IEM_MC_ENDIF();
17334 IEM_MC_END();
17335 return VINF_SUCCESS;
17336
17337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17338 }
17339}
17340
17341
17342/** Opcode 0xe2. */
17343FNIEMOP_DEF(iemOp_loop_Jb)
17344{
17345 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17346 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17348 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17349
17350 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17351 * using the 32-bit operand size override. How can that be restarted? See
17352 * weird pseudo code in intel manual. */
17353 switch (pVCpu->iem.s.enmEffAddrMode)
17354 {
17355 case IEMMODE_16BIT:
17356 IEM_MC_BEGIN(0,0);
17357 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17358 {
17359 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17360 IEM_MC_IF_CX_IS_NZ() {
17361 IEM_MC_REL_JMP_S8(i8Imm);
17362 } IEM_MC_ELSE() {
17363 IEM_MC_ADVANCE_RIP();
17364 } IEM_MC_ENDIF();
17365 }
17366 else
17367 {
17368 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17369 IEM_MC_ADVANCE_RIP();
17370 }
17371 IEM_MC_END();
17372 return VINF_SUCCESS;
17373
17374 case IEMMODE_32BIT:
17375 IEM_MC_BEGIN(0,0);
17376 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17377 {
17378 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17379 IEM_MC_IF_ECX_IS_NZ() {
17380 IEM_MC_REL_JMP_S8(i8Imm);
17381 } IEM_MC_ELSE() {
17382 IEM_MC_ADVANCE_RIP();
17383 } IEM_MC_ENDIF();
17384 }
17385 else
17386 {
17387 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17388 IEM_MC_ADVANCE_RIP();
17389 }
17390 IEM_MC_END();
17391 return VINF_SUCCESS;
17392
17393 case IEMMODE_64BIT:
17394 IEM_MC_BEGIN(0,0);
17395 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17396 {
17397 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17398 IEM_MC_IF_RCX_IS_NZ() {
17399 IEM_MC_REL_JMP_S8(i8Imm);
17400 } IEM_MC_ELSE() {
17401 IEM_MC_ADVANCE_RIP();
17402 } IEM_MC_ENDIF();
17403 }
17404 else
17405 {
17406 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17407 IEM_MC_ADVANCE_RIP();
17408 }
17409 IEM_MC_END();
17410 return VINF_SUCCESS;
17411
17412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17413 }
17414}
17415
17416
17417/** Opcode 0xe3. */
17418FNIEMOP_DEF(iemOp_jecxz_Jb)
17419{
17420 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17421 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17423 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17424
17425 switch (pVCpu->iem.s.enmEffAddrMode)
17426 {
17427 case IEMMODE_16BIT:
17428 IEM_MC_BEGIN(0,0);
17429 IEM_MC_IF_CX_IS_NZ() {
17430 IEM_MC_ADVANCE_RIP();
17431 } IEM_MC_ELSE() {
17432 IEM_MC_REL_JMP_S8(i8Imm);
17433 } IEM_MC_ENDIF();
17434 IEM_MC_END();
17435 return VINF_SUCCESS;
17436
17437 case IEMMODE_32BIT:
17438 IEM_MC_BEGIN(0,0);
17439 IEM_MC_IF_ECX_IS_NZ() {
17440 IEM_MC_ADVANCE_RIP();
17441 } IEM_MC_ELSE() {
17442 IEM_MC_REL_JMP_S8(i8Imm);
17443 } IEM_MC_ENDIF();
17444 IEM_MC_END();
17445 return VINF_SUCCESS;
17446
17447 case IEMMODE_64BIT:
17448 IEM_MC_BEGIN(0,0);
17449 IEM_MC_IF_RCX_IS_NZ() {
17450 IEM_MC_ADVANCE_RIP();
17451 } IEM_MC_ELSE() {
17452 IEM_MC_REL_JMP_S8(i8Imm);
17453 } IEM_MC_ENDIF();
17454 IEM_MC_END();
17455 return VINF_SUCCESS;
17456
17457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17458 }
17459}
17460
17461
17462/** Opcode 0xe4 */
17463FNIEMOP_DEF(iemOp_in_AL_Ib)
17464{
17465 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17466 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17468 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17469}
17470
17471
17472/** Opcode 0xe5 */
17473FNIEMOP_DEF(iemOp_in_eAX_Ib)
17474{
17475 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17476 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17478 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17479}
17480
17481
17482/** Opcode 0xe6 */
17483FNIEMOP_DEF(iemOp_out_Ib_AL)
17484{
17485 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17486 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17488 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17489}
17490
17491
17492/** Opcode 0xe7 */
17493FNIEMOP_DEF(iemOp_out_Ib_eAX)
17494{
17495 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17496 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17498 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17499}
17500
17501
17502/** Opcode 0xe8. */
17503FNIEMOP_DEF(iemOp_call_Jv)
17504{
17505 IEMOP_MNEMONIC(call_Jv, "call Jv");
17506 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17507 switch (pVCpu->iem.s.enmEffOpSize)
17508 {
17509 case IEMMODE_16BIT:
17510 {
17511 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17512 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17513 }
17514
17515 case IEMMODE_32BIT:
17516 {
17517 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17518 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17519 }
17520
17521 case IEMMODE_64BIT:
17522 {
17523 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17524 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17525 }
17526
17527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17528 }
17529}
17530
17531
17532/** Opcode 0xe9. */
17533FNIEMOP_DEF(iemOp_jmp_Jv)
17534{
17535 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17536 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17537 switch (pVCpu->iem.s.enmEffOpSize)
17538 {
17539 case IEMMODE_16BIT:
17540 {
17541 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17542 IEM_MC_BEGIN(0, 0);
17543 IEM_MC_REL_JMP_S16(i16Imm);
17544 IEM_MC_END();
17545 return VINF_SUCCESS;
17546 }
17547
17548 case IEMMODE_64BIT:
17549 case IEMMODE_32BIT:
17550 {
17551 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17552 IEM_MC_BEGIN(0, 0);
17553 IEM_MC_REL_JMP_S32(i32Imm);
17554 IEM_MC_END();
17555 return VINF_SUCCESS;
17556 }
17557
17558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17559 }
17560}
17561
17562
17563/** Opcode 0xea. */
17564FNIEMOP_DEF(iemOp_jmp_Ap)
17565{
17566 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17567 IEMOP_HLP_NO_64BIT();
17568
17569 /* Decode the far pointer address and pass it on to the far call C implementation. */
17570 uint32_t offSeg;
17571 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17572 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17573 else
17574 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17575 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17577 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17578}
17579
17580
17581/** Opcode 0xeb. */
17582FNIEMOP_DEF(iemOp_jmp_Jb)
17583{
17584 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17585 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17587 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17588
17589 IEM_MC_BEGIN(0, 0);
17590 IEM_MC_REL_JMP_S8(i8Imm);
17591 IEM_MC_END();
17592 return VINF_SUCCESS;
17593}
17594
17595
17596/** Opcode 0xec */
17597FNIEMOP_DEF(iemOp_in_AL_DX)
17598{
17599 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17601 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17602}
17603
17604
17605/** Opcode 0xed */
17606FNIEMOP_DEF(iemOp_eAX_DX)
17607{
17608 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17610 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17611}
17612
17613
17614/** Opcode 0xee */
17615FNIEMOP_DEF(iemOp_out_DX_AL)
17616{
17617 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17619 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17620}
17621
17622
17623/** Opcode 0xef */
17624FNIEMOP_DEF(iemOp_out_DX_eAX)
17625{
17626 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17628 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17629}
17630
17631
17632/** Opcode 0xf0. */
17633FNIEMOP_DEF(iemOp_lock)
17634{
17635 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17636 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17637
17638 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17639 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17640}
17641
17642
17643/** Opcode 0xf1. */
17644FNIEMOP_DEF(iemOp_int_1)
17645{
17646 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17647 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17648 /** @todo testcase! */
17649 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17650}
17651
17652
17653/** Opcode 0xf2. */
17654FNIEMOP_DEF(iemOp_repne)
17655{
17656 /* This overrides any previous REPE prefix. */
17657 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17658 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17659 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17660
17661 /* For the 4 entry opcode tables, REPNZ overrides any previous
17662 REPZ and operand size prefixes. */
17663 pVCpu->iem.s.idxPrefix = 3;
17664
17665 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17666 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17667}
17668
17669
17670/** Opcode 0xf3. */
17671FNIEMOP_DEF(iemOp_repe)
17672{
17673 /* This overrides any previous REPNE prefix. */
17674 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17675 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17676 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17677
17678 /* For the 4 entry opcode tables, REPNZ overrides any previous
17679 REPNZ and operand size prefixes. */
17680 pVCpu->iem.s.idxPrefix = 2;
17681
17682 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17683 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17684}
17685
17686
17687/** Opcode 0xf4. */
17688FNIEMOP_DEF(iemOp_hlt)
17689{
17690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17691 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17692}
17693
17694
17695/** Opcode 0xf5. */
17696FNIEMOP_DEF(iemOp_cmc)
17697{
17698 IEMOP_MNEMONIC(cmc, "cmc");
17699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17700 IEM_MC_BEGIN(0, 0);
17701 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17702 IEM_MC_ADVANCE_RIP();
17703 IEM_MC_END();
17704 return VINF_SUCCESS;
17705}
17706
17707
17708/**
17709 * Common implementation of 'inc/dec/not/neg Eb'.
17710 *
17711 * @param bRm The RM byte.
17712 * @param pImpl The instruction implementation.
17713 */
17714FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17715{
17716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17717 {
17718 /* register access */
17719 IEM_MC_BEGIN(2, 0);
17720 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17721 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17722 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17723 IEM_MC_REF_EFLAGS(pEFlags);
17724 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17725 IEM_MC_ADVANCE_RIP();
17726 IEM_MC_END();
17727 }
17728 else
17729 {
17730 /* memory access. */
17731 IEM_MC_BEGIN(2, 2);
17732 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17733 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17735
17736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17737 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17738 IEM_MC_FETCH_EFLAGS(EFlags);
17739 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17740 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17741 else
17742 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17743
17744 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17745 IEM_MC_COMMIT_EFLAGS(EFlags);
17746 IEM_MC_ADVANCE_RIP();
17747 IEM_MC_END();
17748 }
17749 return VINF_SUCCESS;
17750}
17751
17752
17753/**
17754 * Common implementation of 'inc/dec/not/neg Ev'.
17755 *
17756 * @param bRm The RM byte.
17757 * @param pImpl The instruction implementation.
17758 */
17759FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17760{
17761 /* Registers are handled by a common worker. */
17762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17763 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17764
17765 /* Memory we do here. */
17766 switch (pVCpu->iem.s.enmEffOpSize)
17767 {
17768 case IEMMODE_16BIT:
17769 IEM_MC_BEGIN(2, 2);
17770 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17771 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17773
17774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17775 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17776 IEM_MC_FETCH_EFLAGS(EFlags);
17777 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17778 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17779 else
17780 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17781
17782 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17783 IEM_MC_COMMIT_EFLAGS(EFlags);
17784 IEM_MC_ADVANCE_RIP();
17785 IEM_MC_END();
17786 return VINF_SUCCESS;
17787
17788 case IEMMODE_32BIT:
17789 IEM_MC_BEGIN(2, 2);
17790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17791 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17793
17794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17795 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17796 IEM_MC_FETCH_EFLAGS(EFlags);
17797 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17798 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17799 else
17800 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17801
17802 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17803 IEM_MC_COMMIT_EFLAGS(EFlags);
17804 IEM_MC_ADVANCE_RIP();
17805 IEM_MC_END();
17806 return VINF_SUCCESS;
17807
17808 case IEMMODE_64BIT:
17809 IEM_MC_BEGIN(2, 2);
17810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17811 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17813
17814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17815 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17816 IEM_MC_FETCH_EFLAGS(EFlags);
17817 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17818 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17819 else
17820 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17821
17822 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17823 IEM_MC_COMMIT_EFLAGS(EFlags);
17824 IEM_MC_ADVANCE_RIP();
17825 IEM_MC_END();
17826 return VINF_SUCCESS;
17827
17828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17829 }
17830}
17831
17832
17833/** Opcode 0xf6 /0. */
17834FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17835{
17836 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17837 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17838
17839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17840 {
17841 /* register access */
17842 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17844
17845 IEM_MC_BEGIN(3, 0);
17846 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17847 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17848 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17849 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17850 IEM_MC_REF_EFLAGS(pEFlags);
17851 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17852 IEM_MC_ADVANCE_RIP();
17853 IEM_MC_END();
17854 }
17855 else
17856 {
17857 /* memory access. */
17858 IEM_MC_BEGIN(3, 2);
17859 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17860 IEM_MC_ARG(uint8_t, u8Src, 1);
17861 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17863
17864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17865 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17866 IEM_MC_ASSIGN(u8Src, u8Imm);
17867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17868 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17869 IEM_MC_FETCH_EFLAGS(EFlags);
17870 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17871
17872 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17873 IEM_MC_COMMIT_EFLAGS(EFlags);
17874 IEM_MC_ADVANCE_RIP();
17875 IEM_MC_END();
17876 }
17877 return VINF_SUCCESS;
17878}
17879
17880
17881/** Opcode 0xf7 /0. */
17882FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17883{
17884 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17885 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17886
17887 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17888 {
17889 /* register access */
17890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17891 switch (pVCpu->iem.s.enmEffOpSize)
17892 {
17893 case IEMMODE_16BIT:
17894 {
17895 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17896 IEM_MC_BEGIN(3, 0);
17897 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17898 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17900 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17901 IEM_MC_REF_EFLAGS(pEFlags);
17902 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17903 IEM_MC_ADVANCE_RIP();
17904 IEM_MC_END();
17905 return VINF_SUCCESS;
17906 }
17907
17908 case IEMMODE_32BIT:
17909 {
17910 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17911 IEM_MC_BEGIN(3, 0);
17912 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17913 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17914 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17915 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17916 IEM_MC_REF_EFLAGS(pEFlags);
17917 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17918 /* No clearing the high dword here - test doesn't write back the result. */
17919 IEM_MC_ADVANCE_RIP();
17920 IEM_MC_END();
17921 return VINF_SUCCESS;
17922 }
17923
17924 case IEMMODE_64BIT:
17925 {
17926 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17927 IEM_MC_BEGIN(3, 0);
17928 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17929 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17930 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17931 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17932 IEM_MC_REF_EFLAGS(pEFlags);
17933 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17934 IEM_MC_ADVANCE_RIP();
17935 IEM_MC_END();
17936 return VINF_SUCCESS;
17937 }
17938
17939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17940 }
17941 }
17942 else
17943 {
17944 /* memory access. */
17945 switch (pVCpu->iem.s.enmEffOpSize)
17946 {
17947 case IEMMODE_16BIT:
17948 {
17949 IEM_MC_BEGIN(3, 2);
17950 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17951 IEM_MC_ARG(uint16_t, u16Src, 1);
17952 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17954
17955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17956 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17957 IEM_MC_ASSIGN(u16Src, u16Imm);
17958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17959 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17960 IEM_MC_FETCH_EFLAGS(EFlags);
17961 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17962
17963 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17964 IEM_MC_COMMIT_EFLAGS(EFlags);
17965 IEM_MC_ADVANCE_RIP();
17966 IEM_MC_END();
17967 return VINF_SUCCESS;
17968 }
17969
17970 case IEMMODE_32BIT:
17971 {
17972 IEM_MC_BEGIN(3, 2);
17973 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17974 IEM_MC_ARG(uint32_t, u32Src, 1);
17975 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17977
17978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17979 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17980 IEM_MC_ASSIGN(u32Src, u32Imm);
17981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17982 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17983 IEM_MC_FETCH_EFLAGS(EFlags);
17984 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17985
17986 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17987 IEM_MC_COMMIT_EFLAGS(EFlags);
17988 IEM_MC_ADVANCE_RIP();
17989 IEM_MC_END();
17990 return VINF_SUCCESS;
17991 }
17992
17993 case IEMMODE_64BIT:
17994 {
17995 IEM_MC_BEGIN(3, 2);
17996 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17997 IEM_MC_ARG(uint64_t, u64Src, 1);
17998 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18000
18001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18002 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18003 IEM_MC_ASSIGN(u64Src, u64Imm);
18004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18005 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18006 IEM_MC_FETCH_EFLAGS(EFlags);
18007 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18008
18009 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18010 IEM_MC_COMMIT_EFLAGS(EFlags);
18011 IEM_MC_ADVANCE_RIP();
18012 IEM_MC_END();
18013 return VINF_SUCCESS;
18014 }
18015
18016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18017 }
18018 }
18019}
18020
18021
18022/** Opcode 0xf6 /4, /5, /6 and /7. */
18023FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18024{
18025 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18026 {
18027 /* register access */
18028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18029 IEM_MC_BEGIN(3, 1);
18030 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18031 IEM_MC_ARG(uint8_t, u8Value, 1);
18032 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18033 IEM_MC_LOCAL(int32_t, rc);
18034
18035 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18036 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18037 IEM_MC_REF_EFLAGS(pEFlags);
18038 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18039 IEM_MC_IF_LOCAL_IS_Z(rc) {
18040 IEM_MC_ADVANCE_RIP();
18041 } IEM_MC_ELSE() {
18042 IEM_MC_RAISE_DIVIDE_ERROR();
18043 } IEM_MC_ENDIF();
18044
18045 IEM_MC_END();
18046 }
18047 else
18048 {
18049 /* memory access. */
18050 IEM_MC_BEGIN(3, 2);
18051 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18052 IEM_MC_ARG(uint8_t, u8Value, 1);
18053 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18055 IEM_MC_LOCAL(int32_t, rc);
18056
18057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18059 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18060 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18061 IEM_MC_REF_EFLAGS(pEFlags);
18062 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18063 IEM_MC_IF_LOCAL_IS_Z(rc) {
18064 IEM_MC_ADVANCE_RIP();
18065 } IEM_MC_ELSE() {
18066 IEM_MC_RAISE_DIVIDE_ERROR();
18067 } IEM_MC_ENDIF();
18068
18069 IEM_MC_END();
18070 }
18071 return VINF_SUCCESS;
18072}
18073
18074
18075/** Opcode 0xf7 /4, /5, /6 and /7. */
18076FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18077{
18078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18079
18080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18081 {
18082 /* register access */
18083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18084 switch (pVCpu->iem.s.enmEffOpSize)
18085 {
18086 case IEMMODE_16BIT:
18087 {
18088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18089 IEM_MC_BEGIN(4, 1);
18090 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18091 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18092 IEM_MC_ARG(uint16_t, u16Value, 2);
18093 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18094 IEM_MC_LOCAL(int32_t, rc);
18095
18096 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18097 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18098 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18099 IEM_MC_REF_EFLAGS(pEFlags);
18100 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18101 IEM_MC_IF_LOCAL_IS_Z(rc) {
18102 IEM_MC_ADVANCE_RIP();
18103 } IEM_MC_ELSE() {
18104 IEM_MC_RAISE_DIVIDE_ERROR();
18105 } IEM_MC_ENDIF();
18106
18107 IEM_MC_END();
18108 return VINF_SUCCESS;
18109 }
18110
18111 case IEMMODE_32BIT:
18112 {
18113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18114 IEM_MC_BEGIN(4, 1);
18115 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18116 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18117 IEM_MC_ARG(uint32_t, u32Value, 2);
18118 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18119 IEM_MC_LOCAL(int32_t, rc);
18120
18121 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18122 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18123 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18124 IEM_MC_REF_EFLAGS(pEFlags);
18125 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18126 IEM_MC_IF_LOCAL_IS_Z(rc) {
18127 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18128 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18129 IEM_MC_ADVANCE_RIP();
18130 } IEM_MC_ELSE() {
18131 IEM_MC_RAISE_DIVIDE_ERROR();
18132 } IEM_MC_ENDIF();
18133
18134 IEM_MC_END();
18135 return VINF_SUCCESS;
18136 }
18137
18138 case IEMMODE_64BIT:
18139 {
18140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18141 IEM_MC_BEGIN(4, 1);
18142 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18143 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18144 IEM_MC_ARG(uint64_t, u64Value, 2);
18145 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18146 IEM_MC_LOCAL(int32_t, rc);
18147
18148 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18149 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18150 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18151 IEM_MC_REF_EFLAGS(pEFlags);
18152 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18153 IEM_MC_IF_LOCAL_IS_Z(rc) {
18154 IEM_MC_ADVANCE_RIP();
18155 } IEM_MC_ELSE() {
18156 IEM_MC_RAISE_DIVIDE_ERROR();
18157 } IEM_MC_ENDIF();
18158
18159 IEM_MC_END();
18160 return VINF_SUCCESS;
18161 }
18162
18163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18164 }
18165 }
18166 else
18167 {
18168 /* memory access. */
18169 switch (pVCpu->iem.s.enmEffOpSize)
18170 {
18171 case IEMMODE_16BIT:
18172 {
18173 IEM_MC_BEGIN(4, 2);
18174 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18175 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18176 IEM_MC_ARG(uint16_t, u16Value, 2);
18177 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18179 IEM_MC_LOCAL(int32_t, rc);
18180
18181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18183 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18184 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18185 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18186 IEM_MC_REF_EFLAGS(pEFlags);
18187 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18188 IEM_MC_IF_LOCAL_IS_Z(rc) {
18189 IEM_MC_ADVANCE_RIP();
18190 } IEM_MC_ELSE() {
18191 IEM_MC_RAISE_DIVIDE_ERROR();
18192 } IEM_MC_ENDIF();
18193
18194 IEM_MC_END();
18195 return VINF_SUCCESS;
18196 }
18197
18198 case IEMMODE_32BIT:
18199 {
18200 IEM_MC_BEGIN(4, 2);
18201 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18202 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18203 IEM_MC_ARG(uint32_t, u32Value, 2);
18204 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18206 IEM_MC_LOCAL(int32_t, rc);
18207
18208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18210 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18211 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18212 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18213 IEM_MC_REF_EFLAGS(pEFlags);
18214 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18215 IEM_MC_IF_LOCAL_IS_Z(rc) {
18216 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18217 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18218 IEM_MC_ADVANCE_RIP();
18219 } IEM_MC_ELSE() {
18220 IEM_MC_RAISE_DIVIDE_ERROR();
18221 } IEM_MC_ENDIF();
18222
18223 IEM_MC_END();
18224 return VINF_SUCCESS;
18225 }
18226
18227 case IEMMODE_64BIT:
18228 {
18229 IEM_MC_BEGIN(4, 2);
18230 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18231 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18232 IEM_MC_ARG(uint64_t, u64Value, 2);
18233 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18235 IEM_MC_LOCAL(int32_t, rc);
18236
18237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18239 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18240 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18241 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18242 IEM_MC_REF_EFLAGS(pEFlags);
18243 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18244 IEM_MC_IF_LOCAL_IS_Z(rc) {
18245 IEM_MC_ADVANCE_RIP();
18246 } IEM_MC_ELSE() {
18247 IEM_MC_RAISE_DIVIDE_ERROR();
18248 } IEM_MC_ENDIF();
18249
18250 IEM_MC_END();
18251 return VINF_SUCCESS;
18252 }
18253
18254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18255 }
18256 }
18257}
18258
18259/** Opcode 0xf6. */
18260FNIEMOP_DEF(iemOp_Grp3_Eb)
18261{
18262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18263 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18264 {
18265 case 0:
18266 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18267 case 1:
18268/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18269 return IEMOP_RAISE_INVALID_OPCODE();
18270 case 2:
18271 IEMOP_MNEMONIC(not_Eb, "not Eb");
18272 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18273 case 3:
18274 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18275 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18276 case 4:
18277 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18278 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18279 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18280 case 5:
18281 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18282 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18283 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18284 case 6:
18285 IEMOP_MNEMONIC(div_Eb, "div Eb");
18286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18287 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18288 case 7:
18289 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18290 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18291 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18293 }
18294}
18295
18296
18297/** Opcode 0xf7. */
18298FNIEMOP_DEF(iemOp_Grp3_Ev)
18299{
18300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18301 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18302 {
18303 case 0:
18304 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18305 case 1:
18306/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18307 return IEMOP_RAISE_INVALID_OPCODE();
18308 case 2:
18309 IEMOP_MNEMONIC(not_Ev, "not Ev");
18310 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18311 case 3:
18312 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18313 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18314 case 4:
18315 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18317 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18318 case 5:
18319 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18321 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18322 case 6:
18323 IEMOP_MNEMONIC(div_Ev, "div Ev");
18324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18325 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18326 case 7:
18327 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18329 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18331 }
18332}
18333
18334
18335/** Opcode 0xf8. */
18336FNIEMOP_DEF(iemOp_clc)
18337{
18338 IEMOP_MNEMONIC(clc, "clc");
18339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18340 IEM_MC_BEGIN(0, 0);
18341 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18342 IEM_MC_ADVANCE_RIP();
18343 IEM_MC_END();
18344 return VINF_SUCCESS;
18345}
18346
18347
18348/** Opcode 0xf9. */
18349FNIEMOP_DEF(iemOp_stc)
18350{
18351 IEMOP_MNEMONIC(stc, "stc");
18352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18353 IEM_MC_BEGIN(0, 0);
18354 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18355 IEM_MC_ADVANCE_RIP();
18356 IEM_MC_END();
18357 return VINF_SUCCESS;
18358}
18359
18360
18361/** Opcode 0xfa. */
18362FNIEMOP_DEF(iemOp_cli)
18363{
18364 IEMOP_MNEMONIC(cli, "cli");
18365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18367}
18368
18369
18370FNIEMOP_DEF(iemOp_sti)
18371{
18372 IEMOP_MNEMONIC(sti, "sti");
18373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18374 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18375}
18376
18377
18378/** Opcode 0xfc. */
18379FNIEMOP_DEF(iemOp_cld)
18380{
18381 IEMOP_MNEMONIC(cld, "cld");
18382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18383 IEM_MC_BEGIN(0, 0);
18384 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18385 IEM_MC_ADVANCE_RIP();
18386 IEM_MC_END();
18387 return VINF_SUCCESS;
18388}
18389
18390
18391/** Opcode 0xfd. */
18392FNIEMOP_DEF(iemOp_std)
18393{
18394 IEMOP_MNEMONIC(std, "std");
18395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18396 IEM_MC_BEGIN(0, 0);
18397 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18398 IEM_MC_ADVANCE_RIP();
18399 IEM_MC_END();
18400 return VINF_SUCCESS;
18401}
18402
18403
18404/** Opcode 0xfe. */
18405FNIEMOP_DEF(iemOp_Grp4)
18406{
18407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18408 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18409 {
18410 case 0:
18411 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18412 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18413 case 1:
18414 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18415 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18416 default:
18417 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18418 return IEMOP_RAISE_INVALID_OPCODE();
18419 }
18420}
18421
18422
18423/**
18424 * Opcode 0xff /2.
18425 * @param bRm The RM byte.
18426 */
18427FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18428{
18429 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18430 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18431
18432 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18433 {
18434 /* The new RIP is taken from a register. */
18435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18436 switch (pVCpu->iem.s.enmEffOpSize)
18437 {
18438 case IEMMODE_16BIT:
18439 IEM_MC_BEGIN(1, 0);
18440 IEM_MC_ARG(uint16_t, u16Target, 0);
18441 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18442 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18443 IEM_MC_END()
18444 return VINF_SUCCESS;
18445
18446 case IEMMODE_32BIT:
18447 IEM_MC_BEGIN(1, 0);
18448 IEM_MC_ARG(uint32_t, u32Target, 0);
18449 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18450 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18451 IEM_MC_END()
18452 return VINF_SUCCESS;
18453
18454 case IEMMODE_64BIT:
18455 IEM_MC_BEGIN(1, 0);
18456 IEM_MC_ARG(uint64_t, u64Target, 0);
18457 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18458 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18459 IEM_MC_END()
18460 return VINF_SUCCESS;
18461
18462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18463 }
18464 }
18465 else
18466 {
18467 /* The new RIP is taken from a register. */
18468 switch (pVCpu->iem.s.enmEffOpSize)
18469 {
18470 case IEMMODE_16BIT:
18471 IEM_MC_BEGIN(1, 1);
18472 IEM_MC_ARG(uint16_t, u16Target, 0);
18473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18476 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18477 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18478 IEM_MC_END()
18479 return VINF_SUCCESS;
18480
18481 case IEMMODE_32BIT:
18482 IEM_MC_BEGIN(1, 1);
18483 IEM_MC_ARG(uint32_t, u32Target, 0);
18484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18487 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18488 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18489 IEM_MC_END()
18490 return VINF_SUCCESS;
18491
18492 case IEMMODE_64BIT:
18493 IEM_MC_BEGIN(1, 1);
18494 IEM_MC_ARG(uint64_t, u64Target, 0);
18495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18498 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18499 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18500 IEM_MC_END()
18501 return VINF_SUCCESS;
18502
18503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18504 }
18505 }
18506}
18507
18508typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18509
18510FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18511{
18512 /* Registers? How?? */
18513 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18514 { /* likely */ }
18515 else
18516 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18517
18518 /* Far pointer loaded from memory. */
18519 switch (pVCpu->iem.s.enmEffOpSize)
18520 {
18521 case IEMMODE_16BIT:
18522 IEM_MC_BEGIN(3, 1);
18523 IEM_MC_ARG(uint16_t, u16Sel, 0);
18524 IEM_MC_ARG(uint16_t, offSeg, 1);
18525 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18529 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18530 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18531 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18532 IEM_MC_END();
18533 return VINF_SUCCESS;
18534
18535 case IEMMODE_64BIT:
18536 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18537 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18538 * and call far qword [rsp] encodings. */
18539 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18540 {
18541 IEM_MC_BEGIN(3, 1);
18542 IEM_MC_ARG(uint16_t, u16Sel, 0);
18543 IEM_MC_ARG(uint64_t, offSeg, 1);
18544 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18548 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18549 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18550 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18551 IEM_MC_END();
18552 return VINF_SUCCESS;
18553 }
18554 /* AMD falls thru. */
18555
18556 case IEMMODE_32BIT:
18557 IEM_MC_BEGIN(3, 1);
18558 IEM_MC_ARG(uint16_t, u16Sel, 0);
18559 IEM_MC_ARG(uint32_t, offSeg, 1);
18560 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18564 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18565 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18566 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18567 IEM_MC_END();
18568 return VINF_SUCCESS;
18569
18570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18571 }
18572}
18573
18574
18575/**
18576 * Opcode 0xff /3.
18577 * @param bRm The RM byte.
18578 */
18579FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18580{
18581 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18582 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18583}
18584
18585
18586/**
18587 * Opcode 0xff /4.
18588 * @param bRm The RM byte.
18589 */
18590FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18591{
18592 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18593 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18594
18595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18596 {
18597 /* The new RIP is taken from a register. */
18598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18599 switch (pVCpu->iem.s.enmEffOpSize)
18600 {
18601 case IEMMODE_16BIT:
18602 IEM_MC_BEGIN(0, 1);
18603 IEM_MC_LOCAL(uint16_t, u16Target);
18604 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18605 IEM_MC_SET_RIP_U16(u16Target);
18606 IEM_MC_END()
18607 return VINF_SUCCESS;
18608
18609 case IEMMODE_32BIT:
18610 IEM_MC_BEGIN(0, 1);
18611 IEM_MC_LOCAL(uint32_t, u32Target);
18612 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18613 IEM_MC_SET_RIP_U32(u32Target);
18614 IEM_MC_END()
18615 return VINF_SUCCESS;
18616
18617 case IEMMODE_64BIT:
18618 IEM_MC_BEGIN(0, 1);
18619 IEM_MC_LOCAL(uint64_t, u64Target);
18620 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18621 IEM_MC_SET_RIP_U64(u64Target);
18622 IEM_MC_END()
18623 return VINF_SUCCESS;
18624
18625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18626 }
18627 }
18628 else
18629 {
18630 /* The new RIP is taken from a memory location. */
18631 switch (pVCpu->iem.s.enmEffOpSize)
18632 {
18633 case IEMMODE_16BIT:
18634 IEM_MC_BEGIN(0, 2);
18635 IEM_MC_LOCAL(uint16_t, u16Target);
18636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18639 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18640 IEM_MC_SET_RIP_U16(u16Target);
18641 IEM_MC_END()
18642 return VINF_SUCCESS;
18643
18644 case IEMMODE_32BIT:
18645 IEM_MC_BEGIN(0, 2);
18646 IEM_MC_LOCAL(uint32_t, u32Target);
18647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18650 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18651 IEM_MC_SET_RIP_U32(u32Target);
18652 IEM_MC_END()
18653 return VINF_SUCCESS;
18654
18655 case IEMMODE_64BIT:
18656 IEM_MC_BEGIN(0, 2);
18657 IEM_MC_LOCAL(uint64_t, u64Target);
18658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18661 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18662 IEM_MC_SET_RIP_U64(u64Target);
18663 IEM_MC_END()
18664 return VINF_SUCCESS;
18665
18666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18667 }
18668 }
18669}
18670
18671
18672/**
18673 * Opcode 0xff /5.
18674 * @param bRm The RM byte.
18675 */
18676FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18677{
18678 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18679 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18680}
18681
18682
18683/**
18684 * Opcode 0xff /6.
18685 * @param bRm The RM byte.
18686 */
18687FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18688{
18689 IEMOP_MNEMONIC(push_Ev, "push Ev");
18690
18691 /* Registers are handled by a common worker. */
18692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18693 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18694
18695 /* Memory we do here. */
18696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18697 switch (pVCpu->iem.s.enmEffOpSize)
18698 {
18699 case IEMMODE_16BIT:
18700 IEM_MC_BEGIN(0, 2);
18701 IEM_MC_LOCAL(uint16_t, u16Src);
18702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18705 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18706 IEM_MC_PUSH_U16(u16Src);
18707 IEM_MC_ADVANCE_RIP();
18708 IEM_MC_END();
18709 return VINF_SUCCESS;
18710
18711 case IEMMODE_32BIT:
18712 IEM_MC_BEGIN(0, 2);
18713 IEM_MC_LOCAL(uint32_t, u32Src);
18714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18717 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18718 IEM_MC_PUSH_U32(u32Src);
18719 IEM_MC_ADVANCE_RIP();
18720 IEM_MC_END();
18721 return VINF_SUCCESS;
18722
18723 case IEMMODE_64BIT:
18724 IEM_MC_BEGIN(0, 2);
18725 IEM_MC_LOCAL(uint64_t, u64Src);
18726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18729 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18730 IEM_MC_PUSH_U64(u64Src);
18731 IEM_MC_ADVANCE_RIP();
18732 IEM_MC_END();
18733 return VINF_SUCCESS;
18734
18735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18736 }
18737}
18738
18739
18740/** Opcode 0xff. */
18741FNIEMOP_DEF(iemOp_Grp5)
18742{
18743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18744 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18745 {
18746 case 0:
18747 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18748 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18749 case 1:
18750 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18751 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18752 case 2:
18753 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18754 case 3:
18755 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18756 case 4:
18757 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18758 case 5:
18759 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18760 case 6:
18761 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18762 case 7:
18763 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18764 return IEMOP_RAISE_INVALID_OPCODE();
18765 }
18766 AssertFailedReturn(VERR_IEM_IPE_3);
18767}
18768
18769
18770
18771const PFNIEMOP g_apfnOneByteMap[256] =
18772{
18773 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18774 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18775 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18776 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18777 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18778 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18779 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18780 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18781 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18782 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18783 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18784 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18785 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18786 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18787 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18788 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18789 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18790 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18791 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18792 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18793 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18794 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18795 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18796 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18797 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18798 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18799 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18800 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18801 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18802 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18803 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18804 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18805 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18806 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18807 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18808 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18809 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18810 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18811 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18812 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18813 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18814 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18815 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18816 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18817 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18818 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18819 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18820 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18821 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18822 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18823 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18824 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18825 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18826 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18827 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18828 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18829 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18830 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18831 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18832 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18833 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18834 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18835 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18836 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18837};
18838
18839
18840/** @} */
18841
18842#ifdef _MSC_VER
18843# pragma warning(pop)
18844#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette