VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65607

Last change on this file since 65607 was 65607, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x11 split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 662.8 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65607 2017-02-03 20:00:37Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 RTGCPTR GCPtrEff;
560 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
561 if (rcStrict != VINF_SUCCESS)
562 return rcStrict;
563#endif
564 IEMOP_HLP_DONE_DECODING();
565 }
566 return IEMOP_RAISE_INVALID_OPCODE();
567}
568
569
570/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
571 * immediate. */
572FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
573{
574 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
575 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
576 {
577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
578#ifndef TST_IEM_CHECK_MC
579 RTGCPTR GCPtrEff;
580 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
581 if (rcStrict != VINF_SUCCESS)
582 return rcStrict;
583#endif
584 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
585 IEMOP_HLP_DONE_DECODING();
586 }
587 return IEMOP_RAISE_INVALID_OPCODE();
588}
589
590
591/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
592 * sequence. */
593FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
594{
595 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
596 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
597 {
598 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
600#ifndef TST_IEM_CHECK_MC
601 RTGCPTR GCPtrEff;
602 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
603 if (rcStrict != VINF_SUCCESS)
604 return rcStrict;
605#endif
606 IEMOP_HLP_DONE_DECODING();
607 }
608 return IEMOP_RAISE_INVALID_OPCODE();
609}
610
611
612/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
613 * a 8-byte immediate. */
614FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
615{
616 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
617 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
618 {
619 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
621#ifndef TST_IEM_CHECK_MC
622 RTGCPTR GCPtrEff;
623 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
624 if (rcStrict != VINF_SUCCESS)
625 return rcStrict;
626#endif
627 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
628 IEMOP_HLP_DONE_DECODING();
629 }
630 return IEMOP_RAISE_INVALID_OPCODE();
631}
632
633
634
635/** @name ..... opcodes.
636 *
637 * @{
638 */
639
640/** @} */
641
642
643/** @name Two byte opcodes (first byte 0x0f).
644 *
645 * @{
646 */
647
648/** Opcode 0x0f 0x00 /0. */
649FNIEMOPRM_DEF(iemOp_Grp6_sldt)
650{
651 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
652 IEMOP_HLP_MIN_286();
653 IEMOP_HLP_NO_REAL_OR_V86_MODE();
654
655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
656 {
657 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
658 switch (pVCpu->iem.s.enmEffOpSize)
659 {
660 case IEMMODE_16BIT:
661 IEM_MC_BEGIN(0, 1);
662 IEM_MC_LOCAL(uint16_t, u16Ldtr);
663 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
664 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
665 IEM_MC_ADVANCE_RIP();
666 IEM_MC_END();
667 break;
668
669 case IEMMODE_32BIT:
670 IEM_MC_BEGIN(0, 1);
671 IEM_MC_LOCAL(uint32_t, u32Ldtr);
672 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
673 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 break;
677
678 case IEMMODE_64BIT:
679 IEM_MC_BEGIN(0, 1);
680 IEM_MC_LOCAL(uint64_t, u64Ldtr);
681 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
682 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
683 IEM_MC_ADVANCE_RIP();
684 IEM_MC_END();
685 break;
686
687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
688 }
689 }
690 else
691 {
692 IEM_MC_BEGIN(0, 2);
693 IEM_MC_LOCAL(uint16_t, u16Ldtr);
694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
696 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
697 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
698 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
699 IEM_MC_ADVANCE_RIP();
700 IEM_MC_END();
701 }
702 return VINF_SUCCESS;
703}
704
705
706/** Opcode 0x0f 0x00 /1. */
707FNIEMOPRM_DEF(iemOp_Grp6_str)
708{
709 IEMOP_MNEMONIC(str, "str Rv/Mw");
710 IEMOP_HLP_MIN_286();
711 IEMOP_HLP_NO_REAL_OR_V86_MODE();
712
713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
714 {
715 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
716 switch (pVCpu->iem.s.enmEffOpSize)
717 {
718 case IEMMODE_16BIT:
719 IEM_MC_BEGIN(0, 1);
720 IEM_MC_LOCAL(uint16_t, u16Tr);
721 IEM_MC_FETCH_TR_U16(u16Tr);
722 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
723 IEM_MC_ADVANCE_RIP();
724 IEM_MC_END();
725 break;
726
727 case IEMMODE_32BIT:
728 IEM_MC_BEGIN(0, 1);
729 IEM_MC_LOCAL(uint32_t, u32Tr);
730 IEM_MC_FETCH_TR_U32(u32Tr);
731 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
732 IEM_MC_ADVANCE_RIP();
733 IEM_MC_END();
734 break;
735
736 case IEMMODE_64BIT:
737 IEM_MC_BEGIN(0, 1);
738 IEM_MC_LOCAL(uint64_t, u64Tr);
739 IEM_MC_FETCH_TR_U64(u64Tr);
740 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
741 IEM_MC_ADVANCE_RIP();
742 IEM_MC_END();
743 break;
744
745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
746 }
747 }
748 else
749 {
750 IEM_MC_BEGIN(0, 2);
751 IEM_MC_LOCAL(uint16_t, u16Tr);
752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
754 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 IEM_MC_FETCH_TR_U16(u16Tr);
756 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
757 IEM_MC_ADVANCE_RIP();
758 IEM_MC_END();
759 }
760 return VINF_SUCCESS;
761}
762
763
764/** Opcode 0x0f 0x00 /2. */
765FNIEMOPRM_DEF(iemOp_Grp6_lldt)
766{
767 IEMOP_MNEMONIC(lldt, "lldt Ew");
768 IEMOP_HLP_MIN_286();
769 IEMOP_HLP_NO_REAL_OR_V86_MODE();
770
771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
772 {
773 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
774 IEM_MC_BEGIN(1, 0);
775 IEM_MC_ARG(uint16_t, u16Sel, 0);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
778 IEM_MC_END();
779 }
780 else
781 {
782 IEM_MC_BEGIN(1, 1);
783 IEM_MC_ARG(uint16_t, u16Sel, 0);
784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
787 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
788 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 return VINF_SUCCESS;
793}
794
795
796/** Opcode 0x0f 0x00 /3. */
797FNIEMOPRM_DEF(iemOp_Grp6_ltr)
798{
799 IEMOP_MNEMONIC(ltr, "ltr Ew");
800 IEMOP_HLP_MIN_286();
801 IEMOP_HLP_NO_REAL_OR_V86_MODE();
802
803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
804 {
805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
806 IEM_MC_BEGIN(1, 0);
807 IEM_MC_ARG(uint16_t, u16Sel, 0);
808 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
809 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
810 IEM_MC_END();
811 }
812 else
813 {
814 IEM_MC_BEGIN(1, 1);
815 IEM_MC_ARG(uint16_t, u16Sel, 0);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
819 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
820 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 return VINF_SUCCESS;
825}
826
827
828/** Opcode 0x0f 0x00 /3. */
829FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
830{
831 IEMOP_HLP_MIN_286();
832 IEMOP_HLP_NO_REAL_OR_V86_MODE();
833
834 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
835 {
836 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
837 IEM_MC_BEGIN(2, 0);
838 IEM_MC_ARG(uint16_t, u16Sel, 0);
839 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
840 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
841 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
842 IEM_MC_END();
843 }
844 else
845 {
846 IEM_MC_BEGIN(2, 1);
847 IEM_MC_ARG(uint16_t, u16Sel, 0);
848 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
852 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 return VINF_SUCCESS;
857}
858
859
860/** Opcode 0x0f 0x00 /4. */
861FNIEMOPRM_DEF(iemOp_Grp6_verr)
862{
863 IEMOP_MNEMONIC(verr, "verr Ew");
864 IEMOP_HLP_MIN_286();
865 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
866}
867
868
869/** Opcode 0x0f 0x00 /5. */
870FNIEMOPRM_DEF(iemOp_Grp6_verw)
871{
872 IEMOP_MNEMONIC(verw, "verw Ew");
873 IEMOP_HLP_MIN_286();
874 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
875}
876
877
878/**
879 * Group 6 jump table.
880 */
881IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
882{
883 iemOp_Grp6_sldt,
884 iemOp_Grp6_str,
885 iemOp_Grp6_lldt,
886 iemOp_Grp6_ltr,
887 iemOp_Grp6_verr,
888 iemOp_Grp6_verw,
889 iemOp_InvalidWithRM,
890 iemOp_InvalidWithRM
891};
892
893/** Opcode 0x0f 0x00. */
894FNIEMOP_DEF(iemOp_Grp6)
895{
896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
897 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
898}
899
900
901/** Opcode 0x0f 0x01 /0. */
902FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
903{
904 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
905 IEMOP_HLP_MIN_286();
906 IEMOP_HLP_64BIT_OP_SIZE();
907 IEM_MC_BEGIN(2, 1);
908 IEM_MC_ARG(uint8_t, iEffSeg, 0);
909 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
912 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
913 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
914 IEM_MC_END();
915 return VINF_SUCCESS;
916}
917
918
919/** Opcode 0x0f 0x01 /0. */
920FNIEMOP_DEF(iemOp_Grp7_vmcall)
921{
922 IEMOP_BITCH_ABOUT_STUB();
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926
927/** Opcode 0x0f 0x01 /0. */
928FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
929{
930 IEMOP_BITCH_ABOUT_STUB();
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 /0. */
936FNIEMOP_DEF(iemOp_Grp7_vmresume)
937{
938 IEMOP_BITCH_ABOUT_STUB();
939 return IEMOP_RAISE_INVALID_OPCODE();
940}
941
942
943/** Opcode 0x0f 0x01 /0. */
944FNIEMOP_DEF(iemOp_Grp7_vmxoff)
945{
946 IEMOP_BITCH_ABOUT_STUB();
947 return IEMOP_RAISE_INVALID_OPCODE();
948}
949
950
951/** Opcode 0x0f 0x01 /1. */
952FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
953{
954 IEMOP_MNEMONIC(sidt, "sidt Ms");
955 IEMOP_HLP_MIN_286();
956 IEMOP_HLP_64BIT_OP_SIZE();
957 IEM_MC_BEGIN(2, 1);
958 IEM_MC_ARG(uint8_t, iEffSeg, 0);
959 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
962 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
963 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
964 IEM_MC_END();
965 return VINF_SUCCESS;
966}
967
968
969/** Opcode 0x0f 0x01 /1. */
970FNIEMOP_DEF(iemOp_Grp7_monitor)
971{
972 IEMOP_MNEMONIC(monitor, "monitor");
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
974 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
975}
976
977
978/** Opcode 0x0f 0x01 /1. */
979FNIEMOP_DEF(iemOp_Grp7_mwait)
980{
981 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
983 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
984}
985
986
987/** Opcode 0x0f 0x01 /2. */
988FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
989{
990 IEMOP_MNEMONIC(lgdt, "lgdt");
991 IEMOP_HLP_64BIT_OP_SIZE();
992 IEM_MC_BEGIN(3, 1);
993 IEM_MC_ARG(uint8_t, iEffSeg, 0);
994 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
995 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
998 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
999 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1000 IEM_MC_END();
1001 return VINF_SUCCESS;
1002}
1003
1004
1005/** Opcode 0x0f 0x01 0xd0. */
1006FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1007{
1008 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1009 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1010 {
1011 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1012 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1013 }
1014 return IEMOP_RAISE_INVALID_OPCODE();
1015}
1016
1017
1018/** Opcode 0x0f 0x01 0xd1. */
1019FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1020{
1021 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1022 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1023 {
1024 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1025 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1026 }
1027 return IEMOP_RAISE_INVALID_OPCODE();
1028}
1029
1030
1031/** Opcode 0x0f 0x01 /3. */
1032FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1033{
1034 IEMOP_MNEMONIC(lidt, "lidt");
1035 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1036 ? IEMMODE_64BIT
1037 : pVCpu->iem.s.enmEffOpSize;
1038 IEM_MC_BEGIN(3, 1);
1039 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1040 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1041 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1044 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1045 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1046 IEM_MC_END();
1047 return VINF_SUCCESS;
1048}
1049
1050
1051/** Opcode 0x0f 0x01 0xd8. */
1052FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1053
1054/** Opcode 0x0f 0x01 0xd9. */
1055FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1056
1057/** Opcode 0x0f 0x01 0xda. */
1058FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1059
1060/** Opcode 0x0f 0x01 0xdb. */
1061FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1062
1063/** Opcode 0x0f 0x01 0xdc. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1065
1066/** Opcode 0x0f 0x01 0xdd. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1068
1069/** Opcode 0x0f 0x01 0xde. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1071
1072/** Opcode 0x0f 0x01 0xdf. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1074
1075/** Opcode 0x0f 0x01 /4. */
1076FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1077{
1078 IEMOP_MNEMONIC(smsw, "smsw");
1079 IEMOP_HLP_MIN_286();
1080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1081 {
1082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1083 switch (pVCpu->iem.s.enmEffOpSize)
1084 {
1085 case IEMMODE_16BIT:
1086 IEM_MC_BEGIN(0, 1);
1087 IEM_MC_LOCAL(uint16_t, u16Tmp);
1088 IEM_MC_FETCH_CR0_U16(u16Tmp);
1089 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1090 { /* likely */ }
1091 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1092 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1093 else
1094 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1095 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1096 IEM_MC_ADVANCE_RIP();
1097 IEM_MC_END();
1098 return VINF_SUCCESS;
1099
1100 case IEMMODE_32BIT:
1101 IEM_MC_BEGIN(0, 1);
1102 IEM_MC_LOCAL(uint32_t, u32Tmp);
1103 IEM_MC_FETCH_CR0_U32(u32Tmp);
1104 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1105 IEM_MC_ADVANCE_RIP();
1106 IEM_MC_END();
1107 return VINF_SUCCESS;
1108
1109 case IEMMODE_64BIT:
1110 IEM_MC_BEGIN(0, 1);
1111 IEM_MC_LOCAL(uint64_t, u64Tmp);
1112 IEM_MC_FETCH_CR0_U64(u64Tmp);
1113 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1114 IEM_MC_ADVANCE_RIP();
1115 IEM_MC_END();
1116 return VINF_SUCCESS;
1117
1118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1119 }
1120 }
1121 else
1122 {
1123 /* Ignore operand size here, memory refs are always 16-bit. */
1124 IEM_MC_BEGIN(0, 2);
1125 IEM_MC_LOCAL(uint16_t, u16Tmp);
1126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1129 IEM_MC_FETCH_CR0_U16(u16Tmp);
1130 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1131 { /* likely */ }
1132 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1133 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1134 else
1135 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1136 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1137 IEM_MC_ADVANCE_RIP();
1138 IEM_MC_END();
1139 return VINF_SUCCESS;
1140 }
1141}
1142
1143
1144/** Opcode 0x0f 0x01 /6. */
1145FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1146{
1147 /* The operand size is effectively ignored, all is 16-bit and only the
1148 lower 3-bits are used. */
1149 IEMOP_MNEMONIC(lmsw, "lmsw");
1150 IEMOP_HLP_MIN_286();
1151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1152 {
1153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1154 IEM_MC_BEGIN(1, 0);
1155 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1156 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1157 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1158 IEM_MC_END();
1159 }
1160 else
1161 {
1162 IEM_MC_BEGIN(1, 1);
1163 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1168 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1169 IEM_MC_END();
1170 }
1171 return VINF_SUCCESS;
1172}
1173
1174
1175/** Opcode 0x0f 0x01 /7. */
1176FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1177{
1178 IEMOP_MNEMONIC(invlpg, "invlpg");
1179 IEMOP_HLP_MIN_486();
1180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1181 IEM_MC_BEGIN(1, 1);
1182 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1184 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1185 IEM_MC_END();
1186 return VINF_SUCCESS;
1187}
1188
1189
1190/** Opcode 0x0f 0x01 /7. */
1191FNIEMOP_DEF(iemOp_Grp7_swapgs)
1192{
1193 IEMOP_MNEMONIC(swapgs, "swapgs");
1194 IEMOP_HLP_ONLY_64BIT();
1195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1196 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1197}
1198
1199
1200/** Opcode 0x0f 0x01 /7. */
1201FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1202{
1203 NOREF(pVCpu);
1204 IEMOP_BITCH_ABOUT_STUB();
1205 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1206}
1207
1208
1209/** Opcode 0x0f 0x01. */
1210FNIEMOP_DEF(iemOp_Grp7)
1211{
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1214 {
1215 case 0:
1216 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1217 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1218 switch (bRm & X86_MODRM_RM_MASK)
1219 {
1220 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1221 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1222 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1223 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1224 }
1225 return IEMOP_RAISE_INVALID_OPCODE();
1226
1227 case 1:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1233 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1234 }
1235 return IEMOP_RAISE_INVALID_OPCODE();
1236
1237 case 2:
1238 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1239 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1240 switch (bRm & X86_MODRM_RM_MASK)
1241 {
1242 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1243 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1244 }
1245 return IEMOP_RAISE_INVALID_OPCODE();
1246
1247 case 3:
1248 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1249 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1250 switch (bRm & X86_MODRM_RM_MASK)
1251 {
1252 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1253 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1254 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1255 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1256 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1257 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1258 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1259 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1261 }
1262
1263 case 4:
1264 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1265
1266 case 5:
1267 return IEMOP_RAISE_INVALID_OPCODE();
1268
1269 case 6:
1270 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1271
1272 case 7:
1273 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1274 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1275 switch (bRm & X86_MODRM_RM_MASK)
1276 {
1277 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1278 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1279 }
1280 return IEMOP_RAISE_INVALID_OPCODE();
1281
1282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1283 }
1284}
1285
1286/** Opcode 0x0f 0x00 /3. */
1287FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1288{
1289 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291
1292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1293 {
1294 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1295 switch (pVCpu->iem.s.enmEffOpSize)
1296 {
1297 case IEMMODE_16BIT:
1298 {
1299 IEM_MC_BEGIN(3, 0);
1300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1301 IEM_MC_ARG(uint16_t, u16Sel, 1);
1302 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1303
1304 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1305 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1306 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1307
1308 IEM_MC_END();
1309 return VINF_SUCCESS;
1310 }
1311
1312 case IEMMODE_32BIT:
1313 case IEMMODE_64BIT:
1314 {
1315 IEM_MC_BEGIN(3, 0);
1316 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1317 IEM_MC_ARG(uint16_t, u16Sel, 1);
1318 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1319
1320 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1321 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1322 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1323
1324 IEM_MC_END();
1325 return VINF_SUCCESS;
1326 }
1327
1328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1329 }
1330 }
1331 else
1332 {
1333 switch (pVCpu->iem.s.enmEffOpSize)
1334 {
1335 case IEMMODE_16BIT:
1336 {
1337 IEM_MC_BEGIN(3, 1);
1338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1339 IEM_MC_ARG(uint16_t, u16Sel, 1);
1340 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1342
1343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1344 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1345
1346 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1347 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1348 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1349
1350 IEM_MC_END();
1351 return VINF_SUCCESS;
1352 }
1353
1354 case IEMMODE_32BIT:
1355 case IEMMODE_64BIT:
1356 {
1357 IEM_MC_BEGIN(3, 1);
1358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1359 IEM_MC_ARG(uint16_t, u16Sel, 1);
1360 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1362
1363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1364 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1365/** @todo testcase: make sure it's a 16-bit read. */
1366
1367 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1368 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1369 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1370
1371 IEM_MC_END();
1372 return VINF_SUCCESS;
1373 }
1374
1375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1376 }
1377 }
1378}
1379
1380
1381
1382/** Opcode 0x0f 0x02. */
1383FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1384{
1385 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1386 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1387}
1388
1389
1390/** Opcode 0x0f 0x03. */
1391FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1392{
1393 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1394 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1395}
1396
1397
1398/** Opcode 0x0f 0x05. */
1399FNIEMOP_DEF(iemOp_syscall)
1400{
1401 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1403 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1404}
1405
1406
1407/** Opcode 0x0f 0x06. */
1408FNIEMOP_DEF(iemOp_clts)
1409{
1410 IEMOP_MNEMONIC(clts, "clts");
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1413}
1414
1415
1416/** Opcode 0x0f 0x07. */
1417FNIEMOP_DEF(iemOp_sysret)
1418{
1419 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1421 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1422}
1423
1424
1425/** Opcode 0x0f 0x08. */
1426FNIEMOP_STUB(iemOp_invd);
1427// IEMOP_HLP_MIN_486();
1428
1429
1430/** Opcode 0x0f 0x09. */
1431FNIEMOP_DEF(iemOp_wbinvd)
1432{
1433 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1434 IEMOP_HLP_MIN_486();
1435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1436 IEM_MC_BEGIN(0, 0);
1437 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1438 IEM_MC_ADVANCE_RIP();
1439 IEM_MC_END();
1440 return VINF_SUCCESS; /* ignore for now */
1441}
1442
1443
1444/** Opcode 0x0f 0x0b. */
1445FNIEMOP_DEF(iemOp_ud2)
1446{
1447 IEMOP_MNEMONIC(ud2, "ud2");
1448 return IEMOP_RAISE_INVALID_OPCODE();
1449}
1450
1451/** Opcode 0x0f 0x0d. */
1452FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1453{
1454 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1455 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1456 {
1457 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1458 return IEMOP_RAISE_INVALID_OPCODE();
1459 }
1460
1461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1463 {
1464 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1465 return IEMOP_RAISE_INVALID_OPCODE();
1466 }
1467
1468 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1469 {
1470 case 2: /* Aliased to /0 for the time being. */
1471 case 4: /* Aliased to /0 for the time being. */
1472 case 5: /* Aliased to /0 for the time being. */
1473 case 6: /* Aliased to /0 for the time being. */
1474 case 7: /* Aliased to /0 for the time being. */
1475 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1476 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1477 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1479 }
1480
1481 IEM_MC_BEGIN(0, 1);
1482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1485 /* Currently a NOP. */
1486 NOREF(GCPtrEffSrc);
1487 IEM_MC_ADVANCE_RIP();
1488 IEM_MC_END();
1489 return VINF_SUCCESS;
1490}
1491
1492
1493/** Opcode 0x0f 0x0e. */
1494FNIEMOP_STUB(iemOp_femms);
1495
1496
1497/** Opcode 0x0f 0x0f 0x0c. */
1498FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1499
1500/** Opcode 0x0f 0x0f 0x0d. */
1501FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1502
1503/** Opcode 0x0f 0x0f 0x1c. */
1504FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1505
1506/** Opcode 0x0f 0x0f 0x1d. */
1507FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1508
1509/** Opcode 0x0f 0x0f 0x8a. */
1510FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x8e. */
1513FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x90. */
1516FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x94. */
1519FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x96. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x97. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x9a. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x9e. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1532
1533/** Opcode 0x0f 0x0f 0xa0. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0xa4. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0xa6. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0xa7. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xaa. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xae. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xb0. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xb4. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xb6. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xb7. */
1561FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xbb. */
1564FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xbf. */
1567FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1568
1569
1570/** Opcode 0x0f 0x0f. */
1571FNIEMOP_DEF(iemOp_3Dnow)
1572{
1573 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1574 {
1575 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577 }
1578
1579 /* This is pretty sparse, use switch instead of table. */
1580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1581 switch (b)
1582 {
1583 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1584 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1585 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1586 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1587 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1588 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1589 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1590 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1591 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1592 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1593 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1594 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1595 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1596 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1597 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1598 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1599 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1600 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1601 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1602 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1603 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1604 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1605 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1606 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1607 default:
1608 return IEMOP_RAISE_INVALID_OPCODE();
1609 }
1610}
1611
1612
1613/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1614FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1615/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1616FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1617/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1618FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1619/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1620FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1621
1622
1623/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1624FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1625{
1626 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1629 {
1630 /*
1631 * Register, register.
1632 */
1633 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1634 IEM_MC_BEGIN(0, 0);
1635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1637 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1638 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1639 IEM_MC_ADVANCE_RIP();
1640 IEM_MC_END();
1641 }
1642 else
1643 {
1644 /*
1645 * Memory, register.
1646 */
1647 IEM_MC_BEGIN(0, 2);
1648 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1650
1651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1652 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1654 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1655
1656 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1657 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1658
1659 IEM_MC_ADVANCE_RIP();
1660 IEM_MC_END();
1661 }
1662 return VINF_SUCCESS;
1663}
1664
1665
1666/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1667FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1668
1669/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1670FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1671
1672/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1673FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1674{
1675 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1678 {
1679 /*
1680 * Register, register.
1681 */
1682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1683 IEM_MC_BEGIN(0, 1);
1684 IEM_MC_LOCAL(uint64_t, uSrc);
1685
1686 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1687 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1688 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1689 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1690
1691 IEM_MC_ADVANCE_RIP();
1692 IEM_MC_END();
1693 }
1694 else
1695 {
1696 /*
1697 * Memory, register.
1698 */
1699 IEM_MC_BEGIN(0, 2);
1700 IEM_MC_LOCAL(uint64_t, uSrc);
1701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1702
1703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1706 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1707
1708 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1709 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1710
1711 IEM_MC_ADVANCE_RIP();
1712 IEM_MC_END();
1713 }
1714 return VINF_SUCCESS;
1715}
1716
1717
1718/** Opcode 0x0f 0x12. */
1719FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1720
1721/** Opcode 0x66 0x0f 0x12. */
1722FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1723
1724/** Opcode 0xf3 0x0f 0x12. */
1725FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1726
1727/** Opcode 0xf2 0x0f 0x12. */
1728FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1729
1730
1731/** Opcode 0x0f 0x13. */
1732FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1733{
1734 /* Quick hack. Need to restructure all of this later some time. */
1735 if (pVCpu->iem.s.fPrefixes == IEM_OP_PRF_SIZE_OP)
1736 {
1737 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1739 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1740 {
1741#if 0
1742 /*
1743 * Register, register.
1744 */
1745 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1746 IEM_MC_BEGIN(0, 1);
1747 IEM_MC_LOCAL(uint64_t, uSrc);
1748 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1749 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1750 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1751 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754#else
1755 return IEMOP_RAISE_INVALID_OPCODE();
1756#endif
1757 }
1758 else
1759 {
1760 /*
1761 * Memory, register.
1762 */
1763 IEM_MC_BEGIN(0, 2);
1764 IEM_MC_LOCAL(uint64_t, uSrc);
1765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1766
1767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1768 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1769 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1770 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1771
1772 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1773 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1774
1775 IEM_MC_ADVANCE_RIP();
1776 IEM_MC_END();
1777 }
1778 return VINF_SUCCESS;
1779 }
1780
1781 IEMOP_BITCH_ABOUT_STUB();
1782 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1783}
1784
1785
1786/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1787FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1788/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1789FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1790/* Opcode 0xf3 0x0f 0x14 - invalid */
1791/* Opcode 0xf2 0x0f 0x14 - invalid */
1792/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1793FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1794/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1795FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1796/* Opcode 0xf3 0x0f 0x15 - invalid */
1797/* Opcode 0xf2 0x0f 0x15 - invalid */
1798/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1799FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1800/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1801FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1802/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1803FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1804/* Opcode 0xf2 0x0f 0x16 - invalid */
1805/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1806FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1807/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1808FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1809/* Opcode 0xf3 0x0f 0x17 - invalid */
1810/* Opcode 0xf2 0x0f 0x17 - invalid */
1811
1812
1813/** Opcode 0x0f 0x18. */
1814FNIEMOP_DEF(iemOp_prefetch_Grp16)
1815{
1816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1817 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1818 {
1819 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1820 {
1821 case 4: /* Aliased to /0 for the time being according to AMD. */
1822 case 5: /* Aliased to /0 for the time being according to AMD. */
1823 case 6: /* Aliased to /0 for the time being according to AMD. */
1824 case 7: /* Aliased to /0 for the time being according to AMD. */
1825 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1826 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1827 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1828 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1830 }
1831
1832 IEM_MC_BEGIN(0, 1);
1833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1836 /* Currently a NOP. */
1837 NOREF(GCPtrEffSrc);
1838 IEM_MC_ADVANCE_RIP();
1839 IEM_MC_END();
1840 return VINF_SUCCESS;
1841 }
1842
1843 return IEMOP_RAISE_INVALID_OPCODE();
1844}
1845
1846
1847/** Opcode 0x0f 0x19..0x1f. */
1848FNIEMOP_DEF(iemOp_nop_Ev)
1849{
1850 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1853 {
1854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1855 IEM_MC_BEGIN(0, 0);
1856 IEM_MC_ADVANCE_RIP();
1857 IEM_MC_END();
1858 }
1859 else
1860 {
1861 IEM_MC_BEGIN(0, 1);
1862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1865 /* Currently a NOP. */
1866 NOREF(GCPtrEffSrc);
1867 IEM_MC_ADVANCE_RIP();
1868 IEM_MC_END();
1869 }
1870 return VINF_SUCCESS;
1871}
1872
1873
1874/** Opcode 0x0f 0x20. */
1875FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1876{
1877 /* mod is ignored, as is operand size overrides. */
1878 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1879 IEMOP_HLP_MIN_386();
1880 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1881 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1882 else
1883 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1884
1885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1886 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1887 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1888 {
1889 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1890 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1891 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1892 iCrReg |= 8;
1893 }
1894 switch (iCrReg)
1895 {
1896 case 0: case 2: case 3: case 4: case 8:
1897 break;
1898 default:
1899 return IEMOP_RAISE_INVALID_OPCODE();
1900 }
1901 IEMOP_HLP_DONE_DECODING();
1902
1903 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1904}
1905
1906
1907/** Opcode 0x0f 0x21. */
1908FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1909{
1910 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1911 IEMOP_HLP_MIN_386();
1912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1914 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1915 return IEMOP_RAISE_INVALID_OPCODE();
1916 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1917 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1918 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1919}
1920
1921
1922/** Opcode 0x0f 0x22. */
1923FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1924{
1925 /* mod is ignored, as is operand size overrides. */
1926 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1927 IEMOP_HLP_MIN_386();
1928 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1929 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1930 else
1931 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1932
1933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1934 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1935 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1936 {
1937 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1938 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1939 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1940 iCrReg |= 8;
1941 }
1942 switch (iCrReg)
1943 {
1944 case 0: case 2: case 3: case 4: case 8:
1945 break;
1946 default:
1947 return IEMOP_RAISE_INVALID_OPCODE();
1948 }
1949 IEMOP_HLP_DONE_DECODING();
1950
1951 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1952}
1953
1954
1955/** Opcode 0x0f 0x23. */
1956FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1957{
1958 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1959 IEMOP_HLP_MIN_386();
1960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1962 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1963 return IEMOP_RAISE_INVALID_OPCODE();
1964 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1965 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1966 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1967}
1968
1969
1970/** Opcode 0x0f 0x24. */
1971FNIEMOP_DEF(iemOp_mov_Rd_Td)
1972{
1973 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1974 /** @todo works on 386 and 486. */
1975 /* The RM byte is not considered, see testcase. */
1976 return IEMOP_RAISE_INVALID_OPCODE();
1977}
1978
1979
1980/** Opcode 0x0f 0x26. */
1981FNIEMOP_DEF(iemOp_mov_Td_Rd)
1982{
1983 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1984 /** @todo works on 386 and 486. */
1985 /* The RM byte is not considered, see testcase. */
1986 return IEMOP_RAISE_INVALID_OPCODE();
1987}
1988
1989
1990/** Opcode 0x0f 0x28. */
1991FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1992{
1993 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1994 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1995 else
1996 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1998 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1999 {
2000 /*
2001 * Register, register.
2002 */
2003 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2004 IEM_MC_BEGIN(0, 0);
2005 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2006 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2007 else
2008 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2009 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2010 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2011 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2012 IEM_MC_ADVANCE_RIP();
2013 IEM_MC_END();
2014 }
2015 else
2016 {
2017 /*
2018 * Register, memory.
2019 */
2020 IEM_MC_BEGIN(0, 2);
2021 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2023
2024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2025 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2026 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2028 else
2029 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041
2042/** Opcode 0x0f 0x29. */
2043FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2044{
2045 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2046 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2047 else
2048 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2051 {
2052 /*
2053 * Register, register.
2054 */
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2056 IEM_MC_BEGIN(0, 0);
2057 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2058 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2059 else
2060 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2061 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2062 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2063 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2064 IEM_MC_ADVANCE_RIP();
2065 IEM_MC_END();
2066 }
2067 else
2068 {
2069 /*
2070 * Memory, register.
2071 */
2072 IEM_MC_BEGIN(0, 2);
2073 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2075
2076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2077 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2078 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2080 else
2081 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2083
2084 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2085 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2086
2087 IEM_MC_ADVANCE_RIP();
2088 IEM_MC_END();
2089 }
2090 return VINF_SUCCESS;
2091}
2092
2093
2094/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2095FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2096/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2097FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2098/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2099FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2100/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2101FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2102
2103
2104/** Opcode 0x0f 0x2b. */
2105FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2106{
2107 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2108 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2109 else
2110 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2112 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2113 {
2114 /*
2115 * memory, register.
2116 */
2117 IEM_MC_BEGIN(0, 2);
2118 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2120
2121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2122 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2123 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2125 else
2126 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2127 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2128
2129 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2130 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2131
2132 IEM_MC_ADVANCE_RIP();
2133 IEM_MC_END();
2134 }
2135 /* The register, register encoding is invalid. */
2136 else
2137 return IEMOP_RAISE_INVALID_OPCODE();
2138 return VINF_SUCCESS;
2139}
2140
2141
2142/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2143FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2144/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2145FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2146/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2147FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2148/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2149FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2150
2151/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2152FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2153/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2154FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2155/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2156FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2157/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2158FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2159
2160/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2161FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2162/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2163FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2164/* Opcode 0xf3 0x0f 0x2e - invalid */
2165/* Opcode 0xf2 0x0f 0x2e - invalid */
2166
2167/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2168FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2169/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2170FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2171/* Opcode 0xf3 0x0f 0x2f - invalid */
2172/* Opcode 0xf2 0x0f 0x2f - invalid */
2173
2174/** Opcode 0x0f 0x30. */
2175FNIEMOP_DEF(iemOp_wrmsr)
2176{
2177 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2179 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2180}
2181
2182
2183/** Opcode 0x0f 0x31. */
2184FNIEMOP_DEF(iemOp_rdtsc)
2185{
2186 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2188 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2189}
2190
2191
2192/** Opcode 0x0f 0x33. */
2193FNIEMOP_DEF(iemOp_rdmsr)
2194{
2195 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2197 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2198}
2199
2200
2201/** Opcode 0x0f 0x34. */
2202FNIEMOP_STUB(iemOp_rdpmc);
2203/** Opcode 0x0f 0x34. */
2204FNIEMOP_STUB(iemOp_sysenter);
2205/** Opcode 0x0f 0x35. */
2206FNIEMOP_STUB(iemOp_sysexit);
2207/** Opcode 0x0f 0x37. */
2208FNIEMOP_STUB(iemOp_getsec);
2209/** Opcode 0x0f 0x38. */
2210FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2211/** Opcode 0x0f 0x3a. */
2212FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2213
2214
2215/**
2216 * Implements a conditional move.
2217 *
2218 * Wish there was an obvious way to do this where we could share and reduce
2219 * code bloat.
2220 *
2221 * @param a_Cnd The conditional "microcode" operation.
2222 */
2223#define CMOV_X(a_Cnd) \
2224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2225 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2226 { \
2227 switch (pVCpu->iem.s.enmEffOpSize) \
2228 { \
2229 case IEMMODE_16BIT: \
2230 IEM_MC_BEGIN(0, 1); \
2231 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2232 a_Cnd { \
2233 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2234 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2235 } IEM_MC_ENDIF(); \
2236 IEM_MC_ADVANCE_RIP(); \
2237 IEM_MC_END(); \
2238 return VINF_SUCCESS; \
2239 \
2240 case IEMMODE_32BIT: \
2241 IEM_MC_BEGIN(0, 1); \
2242 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2243 a_Cnd { \
2244 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2245 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2246 } IEM_MC_ELSE() { \
2247 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2248 } IEM_MC_ENDIF(); \
2249 IEM_MC_ADVANCE_RIP(); \
2250 IEM_MC_END(); \
2251 return VINF_SUCCESS; \
2252 \
2253 case IEMMODE_64BIT: \
2254 IEM_MC_BEGIN(0, 1); \
2255 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2256 a_Cnd { \
2257 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2258 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2259 } IEM_MC_ENDIF(); \
2260 IEM_MC_ADVANCE_RIP(); \
2261 IEM_MC_END(); \
2262 return VINF_SUCCESS; \
2263 \
2264 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2265 } \
2266 } \
2267 else \
2268 { \
2269 switch (pVCpu->iem.s.enmEffOpSize) \
2270 { \
2271 case IEMMODE_16BIT: \
2272 IEM_MC_BEGIN(0, 2); \
2273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2274 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2276 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2277 a_Cnd { \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 2); \
2286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2287 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2289 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2290 a_Cnd { \
2291 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2292 } IEM_MC_ELSE() { \
2293 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2294 } IEM_MC_ENDIF(); \
2295 IEM_MC_ADVANCE_RIP(); \
2296 IEM_MC_END(); \
2297 return VINF_SUCCESS; \
2298 \
2299 case IEMMODE_64BIT: \
2300 IEM_MC_BEGIN(0, 2); \
2301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2302 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2304 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2305 a_Cnd { \
2306 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2307 } IEM_MC_ENDIF(); \
2308 IEM_MC_ADVANCE_RIP(); \
2309 IEM_MC_END(); \
2310 return VINF_SUCCESS; \
2311 \
2312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2313 } \
2314 } do {} while (0)
2315
2316
2317
2318/** Opcode 0x0f 0x40. */
2319FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2320{
2321 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2322 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2323}
2324
2325
2326/** Opcode 0x0f 0x41. */
2327FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2328{
2329 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2330 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2331}
2332
2333
2334/** Opcode 0x0f 0x42. */
2335FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2336{
2337 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2338 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2339}
2340
2341
2342/** Opcode 0x0f 0x43. */
2343FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2344{
2345 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2346 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2347}
2348
2349
2350/** Opcode 0x0f 0x44. */
2351FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2352{
2353 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2354 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2355}
2356
2357
2358/** Opcode 0x0f 0x45. */
2359FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2360{
2361 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2362 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2363}
2364
2365
2366/** Opcode 0x0f 0x46. */
2367FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2368{
2369 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2370 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2371}
2372
2373
2374/** Opcode 0x0f 0x47. */
2375FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2376{
2377 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2378 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2379}
2380
2381
2382/** Opcode 0x0f 0x48. */
2383FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2384{
2385 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2386 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2387}
2388
2389
2390/** Opcode 0x0f 0x49. */
2391FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2392{
2393 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2394 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2395}
2396
2397
2398/** Opcode 0x0f 0x4a. */
2399FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2400{
2401 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2402 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2403}
2404
2405
2406/** Opcode 0x0f 0x4b. */
2407FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2408{
2409 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2410 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2411}
2412
2413
2414/** Opcode 0x0f 0x4c. */
2415FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2416{
2417 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2418 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2419}
2420
2421
2422/** Opcode 0x0f 0x4d. */
2423FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2424{
2425 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2426 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2427}
2428
2429
2430/** Opcode 0x0f 0x4e. */
2431FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2432{
2433 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2434 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2435}
2436
2437
2438/** Opcode 0x0f 0x4f. */
2439FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2440{
2441 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2442 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2443}
2444
2445#undef CMOV_X
2446
2447/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2448FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2449/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2450FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2451/* Opcode 0xf3 0x0f 0x50 - invalid */
2452/* Opcode 0xf2 0x0f 0x50 - invalid */
2453
2454/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2455FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2456/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2457FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2458/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2459FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2460/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2461FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2462
2463/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2464FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2465/* Opcode 0x66 0x0f 0x52 - invalid */
2466/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2467FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2468/* Opcode 0xf2 0x0f 0x52 - invalid */
2469
2470/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2471FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2472/* Opcode 0x66 0x0f 0x53 - invalid */
2473/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2474FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2475/* Opcode 0xf2 0x0f 0x53 - invalid */
2476
2477/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2478FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2479/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2480FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2481/* Opcode 0xf3 0x0f 0x54 - invalid */
2482/* Opcode 0xf2 0x0f 0x54 - invalid */
2483
2484/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2485FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2486/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2487FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2488/* Opcode 0xf3 0x0f 0x55 - invalid */
2489/* Opcode 0xf2 0x0f 0x55 - invalid */
2490
2491/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2492FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2493/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2494FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2495/* Opcode 0xf3 0x0f 0x56 - invalid */
2496/* Opcode 0xf2 0x0f 0x56 - invalid */
2497
2498/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2499FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2500/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2501FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2502/* Opcode 0xf3 0x0f 0x57 - invalid */
2503/* Opcode 0xf2 0x0f 0x57 - invalid */
2504
2505/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2506FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2507/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2508FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2509/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2510FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2511/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2512FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2513
2514/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2515FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2516/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2517FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2518/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2519FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2520/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2521FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2522
2523/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2524FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2525/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2526FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2527/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2528FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2529/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2530FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2531
2532/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2533FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2534/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2535FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2536/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2537FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2538/* Opcode 0xf2 0x0f 0x5b - invalid */
2539
2540/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2541FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2542/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2543FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2544/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2545FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2546/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2547FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2548
2549/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2568FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2569/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2570FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2571/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2572FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2573/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2574FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2575
2576
2577/**
2578 * Common worker for SSE2 and MMX instructions on the forms:
2579 * pxxxx xmm1, xmm2/mem128
2580 * pxxxx mm1, mm2/mem32
2581 *
2582 * The 2nd operand is the first half of a register, which in the memory case
2583 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2584 * memory accessed for MMX.
2585 *
2586 * Exceptions type 4.
2587 */
2588FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2589{
2590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2591 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2592 {
2593 case IEM_OP_PRF_SIZE_OP: /* SSE */
2594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2595 {
2596 /*
2597 * Register, register.
2598 */
2599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2600 IEM_MC_BEGIN(2, 0);
2601 IEM_MC_ARG(uint128_t *, pDst, 0);
2602 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2604 IEM_MC_PREPARE_SSE_USAGE();
2605 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2606 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2607 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2608 IEM_MC_ADVANCE_RIP();
2609 IEM_MC_END();
2610 }
2611 else
2612 {
2613 /*
2614 * Register, memory.
2615 */
2616 IEM_MC_BEGIN(2, 2);
2617 IEM_MC_ARG(uint128_t *, pDst, 0);
2618 IEM_MC_LOCAL(uint64_t, uSrc);
2619 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2621
2622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2624 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2625 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2626
2627 IEM_MC_PREPARE_SSE_USAGE();
2628 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2629 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2630
2631 IEM_MC_ADVANCE_RIP();
2632 IEM_MC_END();
2633 }
2634 return VINF_SUCCESS;
2635
2636 case 0: /* MMX */
2637 if (!pImpl->pfnU64)
2638 return IEMOP_RAISE_INVALID_OPCODE();
2639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2640 {
2641 /*
2642 * Register, register.
2643 */
2644 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2645 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2647 IEM_MC_BEGIN(2, 0);
2648 IEM_MC_ARG(uint64_t *, pDst, 0);
2649 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2650 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2651 IEM_MC_PREPARE_FPU_USAGE();
2652 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2653 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2654 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2655 IEM_MC_ADVANCE_RIP();
2656 IEM_MC_END();
2657 }
2658 else
2659 {
2660 /*
2661 * Register, memory.
2662 */
2663 IEM_MC_BEGIN(2, 2);
2664 IEM_MC_ARG(uint64_t *, pDst, 0);
2665 IEM_MC_LOCAL(uint32_t, uSrc);
2666 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2668
2669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2671 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2672 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2673
2674 IEM_MC_PREPARE_FPU_USAGE();
2675 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2676 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2677
2678 IEM_MC_ADVANCE_RIP();
2679 IEM_MC_END();
2680 }
2681 return VINF_SUCCESS;
2682
2683 default:
2684 return IEMOP_RAISE_INVALID_OPCODE();
2685 }
2686}
2687
2688
2689/** Opcode 0x0f 0x60. */
2690FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2691{
2692 IEMOP_MNEMONIC(punpcklbw, "punpcklbw");
2693 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2694}
2695
2696
2697/** Opcode 0x0f 0x61. */
2698FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2699{
2700 IEMOP_MNEMONIC(punpcklwd, "punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2701 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2702}
2703
2704
2705/** Opcode 0x0f 0x62. */
2706FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2707{
2708 IEMOP_MNEMONIC(punpckldq, "punpckldq");
2709 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2710}
2711
2712
2713/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2714FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2715/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2716FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2717/* Opcode 0xf3 0x0f 0x63 - invalid */
2718
2719/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2720FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2721/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2722FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2723/* Opcode 0xf3 0x0f 0x64 - invalid */
2724
2725/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2726FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2727/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2728FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2729/* Opcode 0xf3 0x0f 0x65 - invalid */
2730
2731/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2732FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2733/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2734FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2735/* Opcode 0xf3 0x0f 0x66 - invalid */
2736
2737/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2738FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2739/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2740FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2741/* Opcode 0xf3 0x0f 0x67 - invalid */
2742
2743
2744/**
2745 * Common worker for SSE2 and MMX instructions on the forms:
2746 * pxxxx xmm1, xmm2/mem128
2747 * pxxxx mm1, mm2/mem64
2748 *
2749 * The 2nd operand is the second half of a register, which in the memory case
2750 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2751 * where it may read the full 128 bits or only the upper 64 bits.
2752 *
2753 * Exceptions type 4.
2754 */
2755FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2756{
2757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2758 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2759 {
2760 case IEM_OP_PRF_SIZE_OP: /* SSE */
2761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2762 {
2763 /*
2764 * Register, register.
2765 */
2766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2767 IEM_MC_BEGIN(2, 0);
2768 IEM_MC_ARG(uint128_t *, pDst, 0);
2769 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2770 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2771 IEM_MC_PREPARE_SSE_USAGE();
2772 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2773 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2774 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2775 IEM_MC_ADVANCE_RIP();
2776 IEM_MC_END();
2777 }
2778 else
2779 {
2780 /*
2781 * Register, memory.
2782 */
2783 IEM_MC_BEGIN(2, 2);
2784 IEM_MC_ARG(uint128_t *, pDst, 0);
2785 IEM_MC_LOCAL(uint128_t, uSrc);
2786 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2788
2789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2791 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2792 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2793
2794 IEM_MC_PREPARE_SSE_USAGE();
2795 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2796 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2797
2798 IEM_MC_ADVANCE_RIP();
2799 IEM_MC_END();
2800 }
2801 return VINF_SUCCESS;
2802
2803 case 0: /* MMX */
2804 if (!pImpl->pfnU64)
2805 return IEMOP_RAISE_INVALID_OPCODE();
2806 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2807 {
2808 /*
2809 * Register, register.
2810 */
2811 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2812 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2814 IEM_MC_BEGIN(2, 0);
2815 IEM_MC_ARG(uint64_t *, pDst, 0);
2816 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2817 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2818 IEM_MC_PREPARE_FPU_USAGE();
2819 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2820 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2821 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2822 IEM_MC_ADVANCE_RIP();
2823 IEM_MC_END();
2824 }
2825 else
2826 {
2827 /*
2828 * Register, memory.
2829 */
2830 IEM_MC_BEGIN(2, 2);
2831 IEM_MC_ARG(uint64_t *, pDst, 0);
2832 IEM_MC_LOCAL(uint64_t, uSrc);
2833 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2835
2836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2838 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2839 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2840
2841 IEM_MC_PREPARE_FPU_USAGE();
2842 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2843 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2844
2845 IEM_MC_ADVANCE_RIP();
2846 IEM_MC_END();
2847 }
2848 return VINF_SUCCESS;
2849
2850 default:
2851 return IEMOP_RAISE_INVALID_OPCODE();
2852 }
2853}
2854
2855
2856/** Opcode 0x0f 0x68. */
2857FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2858{
2859 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2860 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2861}
2862
2863
2864/** Opcode 0x0f 0x69. */
2865FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2866{
2867 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2868 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2869}
2870
2871
2872/** Opcode 0x0f 0x6a. */
2873FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2874{
2875 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2876 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2877}
2878
2879/** Opcode 0x0f 0x6b. */
2880FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2881
2882
2883/** Opcode 0x0f 0x6c. */
2884FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2885{
2886 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq");
2887 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2888}
2889
2890
2891/** Opcode 0x0f 0x6d. */
2892FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2893{
2894 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2895 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2896}
2897
2898
2899/** Opcode 0x0f 0x6e. */
2900FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2901{
2902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2903 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2904 {
2905 case IEM_OP_PRF_SIZE_OP: /* SSE */
2906 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2907 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2908 else
2909 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2911 {
2912 /* XMM, greg*/
2913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2914 IEM_MC_BEGIN(0, 1);
2915 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2916 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2917 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2918 {
2919 IEM_MC_LOCAL(uint64_t, u64Tmp);
2920 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2921 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2922 }
2923 else
2924 {
2925 IEM_MC_LOCAL(uint32_t, u32Tmp);
2926 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2927 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2928 }
2929 IEM_MC_ADVANCE_RIP();
2930 IEM_MC_END();
2931 }
2932 else
2933 {
2934 /* XMM, [mem] */
2935 IEM_MC_BEGIN(0, 2);
2936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2937 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2941 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2942 {
2943 IEM_MC_LOCAL(uint64_t, u64Tmp);
2944 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2945 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2946 }
2947 else
2948 {
2949 IEM_MC_LOCAL(uint32_t, u32Tmp);
2950 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2951 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2952 }
2953 IEM_MC_ADVANCE_RIP();
2954 IEM_MC_END();
2955 }
2956 return VINF_SUCCESS;
2957
2958 case 0: /* MMX */
2959 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2960 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2961 else
2962 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2964 {
2965 /* MMX, greg */
2966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2967 IEM_MC_BEGIN(0, 1);
2968 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2969 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2970 IEM_MC_LOCAL(uint64_t, u64Tmp);
2971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2972 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2973 else
2974 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2975 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2976 IEM_MC_ADVANCE_RIP();
2977 IEM_MC_END();
2978 }
2979 else
2980 {
2981 /* MMX, [mem] */
2982 IEM_MC_BEGIN(0, 2);
2983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2984 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2987 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2988 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2989 {
2990 IEM_MC_LOCAL(uint64_t, u64Tmp);
2991 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2992 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2993 }
2994 else
2995 {
2996 IEM_MC_LOCAL(uint32_t, u32Tmp);
2997 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2998 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2999 }
3000 IEM_MC_ADVANCE_RIP();
3001 IEM_MC_END();
3002 }
3003 return VINF_SUCCESS;
3004
3005 default:
3006 return IEMOP_RAISE_INVALID_OPCODE();
3007 }
3008}
3009
3010
3011/** Opcode 0x0f 0x6f. */
3012FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
3013{
3014 bool fAligned = false;
3015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3016 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3017 {
3018 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3019 fAligned = true;
3020 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3021 if (fAligned)
3022 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3023 else
3024 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3025 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3026 {
3027 /*
3028 * Register, register.
3029 */
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_BEGIN(0, 0);
3032 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3033 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3034 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3035 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3036 IEM_MC_ADVANCE_RIP();
3037 IEM_MC_END();
3038 }
3039 else
3040 {
3041 /*
3042 * Register, memory.
3043 */
3044 IEM_MC_BEGIN(0, 2);
3045 IEM_MC_LOCAL(uint128_t, u128Tmp);
3046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3047
3048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3051 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3052 if (fAligned)
3053 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3054 else
3055 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3056 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3057
3058 IEM_MC_ADVANCE_RIP();
3059 IEM_MC_END();
3060 }
3061 return VINF_SUCCESS;
3062
3063 case 0: /* MMX */
3064 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3066 {
3067 /*
3068 * Register, register.
3069 */
3070 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3071 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3073 IEM_MC_BEGIN(0, 1);
3074 IEM_MC_LOCAL(uint64_t, u64Tmp);
3075 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3076 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3077 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3078 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3079 IEM_MC_ADVANCE_RIP();
3080 IEM_MC_END();
3081 }
3082 else
3083 {
3084 /*
3085 * Register, memory.
3086 */
3087 IEM_MC_BEGIN(0, 2);
3088 IEM_MC_LOCAL(uint64_t, u64Tmp);
3089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3090
3091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3093 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3094 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3095 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3096 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3097
3098 IEM_MC_ADVANCE_RIP();
3099 IEM_MC_END();
3100 }
3101 return VINF_SUCCESS;
3102
3103 default:
3104 return IEMOP_RAISE_INVALID_OPCODE();
3105 }
3106}
3107
3108
3109/** Opcode 0x0f 0x70. The immediate here is evil! */
3110FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3111{
3112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3113 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3114 {
3115 case IEM_OP_PRF_SIZE_OP: /* SSE */
3116 case IEM_OP_PRF_REPNZ: /* SSE */
3117 case IEM_OP_PRF_REPZ: /* SSE */
3118 {
3119 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3120 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3121 {
3122 case IEM_OP_PRF_SIZE_OP:
3123 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3124 pfnAImpl = iemAImpl_pshufd;
3125 break;
3126 case IEM_OP_PRF_REPNZ:
3127 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3128 pfnAImpl = iemAImpl_pshuflw;
3129 break;
3130 case IEM_OP_PRF_REPZ:
3131 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3132 pfnAImpl = iemAImpl_pshufhw;
3133 break;
3134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3135 }
3136 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3137 {
3138 /*
3139 * Register, register.
3140 */
3141 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3143
3144 IEM_MC_BEGIN(3, 0);
3145 IEM_MC_ARG(uint128_t *, pDst, 0);
3146 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3147 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3148 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3149 IEM_MC_PREPARE_SSE_USAGE();
3150 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3151 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3152 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3153 IEM_MC_ADVANCE_RIP();
3154 IEM_MC_END();
3155 }
3156 else
3157 {
3158 /*
3159 * Register, memory.
3160 */
3161 IEM_MC_BEGIN(3, 2);
3162 IEM_MC_ARG(uint128_t *, pDst, 0);
3163 IEM_MC_LOCAL(uint128_t, uSrc);
3164 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3166
3167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3168 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3169 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3172
3173 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3174 IEM_MC_PREPARE_SSE_USAGE();
3175 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3176 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3177
3178 IEM_MC_ADVANCE_RIP();
3179 IEM_MC_END();
3180 }
3181 return VINF_SUCCESS;
3182 }
3183
3184 case 0: /* MMX Extension */
3185 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3187 {
3188 /*
3189 * Register, register.
3190 */
3191 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3193
3194 IEM_MC_BEGIN(3, 0);
3195 IEM_MC_ARG(uint64_t *, pDst, 0);
3196 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3197 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3198 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3199 IEM_MC_PREPARE_FPU_USAGE();
3200 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3201 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3202 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3203 IEM_MC_ADVANCE_RIP();
3204 IEM_MC_END();
3205 }
3206 else
3207 {
3208 /*
3209 * Register, memory.
3210 */
3211 IEM_MC_BEGIN(3, 2);
3212 IEM_MC_ARG(uint64_t *, pDst, 0);
3213 IEM_MC_LOCAL(uint64_t, uSrc);
3214 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3216
3217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3218 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3219 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3221 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3222
3223 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3224 IEM_MC_PREPARE_FPU_USAGE();
3225 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3226 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3227
3228 IEM_MC_ADVANCE_RIP();
3229 IEM_MC_END();
3230 }
3231 return VINF_SUCCESS;
3232
3233 default:
3234 return IEMOP_RAISE_INVALID_OPCODE();
3235 }
3236}
3237
3238
3239/** Opcode 0x0f 0x71 11/2. */
3240FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3241
3242/** Opcode 0x66 0x0f 0x71 11/2. */
3243FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3244
3245/** Opcode 0x0f 0x71 11/4. */
3246FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3247
3248/** Opcode 0x66 0x0f 0x71 11/4. */
3249FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3250
3251/** Opcode 0x0f 0x71 11/6. */
3252FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3253
3254/** Opcode 0x66 0x0f 0x71 11/6. */
3255FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3256
3257
3258/** Opcode 0x0f 0x71. */
3259FNIEMOP_DEF(iemOp_Grp12)
3260{
3261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3262 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3263 return IEMOP_RAISE_INVALID_OPCODE();
3264 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3265 {
3266 case 0: case 1: case 3: case 5: case 7:
3267 return IEMOP_RAISE_INVALID_OPCODE();
3268 case 2:
3269 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3270 {
3271 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3272 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3273 default: return IEMOP_RAISE_INVALID_OPCODE();
3274 }
3275 case 4:
3276 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3277 {
3278 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3279 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3280 default: return IEMOP_RAISE_INVALID_OPCODE();
3281 }
3282 case 6:
3283 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3284 {
3285 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3286 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3287 default: return IEMOP_RAISE_INVALID_OPCODE();
3288 }
3289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3290 }
3291}
3292
3293
3294/** Opcode 0x0f 0x72 11/2. */
3295FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3296
3297/** Opcode 0x66 0x0f 0x72 11/2. */
3298FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3299
3300/** Opcode 0x0f 0x72 11/4. */
3301FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3302
3303/** Opcode 0x66 0x0f 0x72 11/4. */
3304FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3305
3306/** Opcode 0x0f 0x72 11/6. */
3307FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3308
3309/** Opcode 0x66 0x0f 0x72 11/6. */
3310FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3311
3312
3313/** Opcode 0x0f 0x72. */
3314FNIEMOP_DEF(iemOp_Grp13)
3315{
3316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3317 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3318 return IEMOP_RAISE_INVALID_OPCODE();
3319 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3320 {
3321 case 0: case 1: case 3: case 5: case 7:
3322 return IEMOP_RAISE_INVALID_OPCODE();
3323 case 2:
3324 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3325 {
3326 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3327 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3328 default: return IEMOP_RAISE_INVALID_OPCODE();
3329 }
3330 case 4:
3331 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3332 {
3333 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3334 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3335 default: return IEMOP_RAISE_INVALID_OPCODE();
3336 }
3337 case 6:
3338 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3339 {
3340 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3341 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3342 default: return IEMOP_RAISE_INVALID_OPCODE();
3343 }
3344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3345 }
3346}
3347
3348
3349/** Opcode 0x0f 0x73 11/2. */
3350FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3351
3352/** Opcode 0x66 0x0f 0x73 11/2. */
3353FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3354
3355/** Opcode 0x66 0x0f 0x73 11/3. */
3356FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3357
3358/** Opcode 0x0f 0x73 11/6. */
3359FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3360
3361/** Opcode 0x66 0x0f 0x73 11/6. */
3362FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3363
3364/** Opcode 0x66 0x0f 0x73 11/7. */
3365FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3366
3367
3368/** Opcode 0x0f 0x73. */
3369FNIEMOP_DEF(iemOp_Grp14)
3370{
3371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3372 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3373 return IEMOP_RAISE_INVALID_OPCODE();
3374 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3375 {
3376 case 0: case 1: case 4: case 5:
3377 return IEMOP_RAISE_INVALID_OPCODE();
3378 case 2:
3379 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3380 {
3381 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3382 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3383 default: return IEMOP_RAISE_INVALID_OPCODE();
3384 }
3385 case 3:
3386 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3387 {
3388 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3389 default: return IEMOP_RAISE_INVALID_OPCODE();
3390 }
3391 case 6:
3392 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3393 {
3394 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3395 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3396 default: return IEMOP_RAISE_INVALID_OPCODE();
3397 }
3398 case 7:
3399 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3400 {
3401 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3402 default: return IEMOP_RAISE_INVALID_OPCODE();
3403 }
3404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3405 }
3406}
3407
3408
3409/**
3410 * Common worker for SSE2 and MMX instructions on the forms:
3411 * pxxx mm1, mm2/mem64
3412 * pxxx xmm1, xmm2/mem128
3413 *
3414 * Proper alignment of the 128-bit operand is enforced.
3415 * Exceptions type 4. SSE2 and MMX cpuid checks.
3416 */
3417FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3418{
3419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3420 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3421 {
3422 case IEM_OP_PRF_SIZE_OP: /* SSE */
3423 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3424 {
3425 /*
3426 * Register, register.
3427 */
3428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3429 IEM_MC_BEGIN(2, 0);
3430 IEM_MC_ARG(uint128_t *, pDst, 0);
3431 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3432 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3433 IEM_MC_PREPARE_SSE_USAGE();
3434 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3435 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3436 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3437 IEM_MC_ADVANCE_RIP();
3438 IEM_MC_END();
3439 }
3440 else
3441 {
3442 /*
3443 * Register, memory.
3444 */
3445 IEM_MC_BEGIN(2, 2);
3446 IEM_MC_ARG(uint128_t *, pDst, 0);
3447 IEM_MC_LOCAL(uint128_t, uSrc);
3448 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3450
3451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3453 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3454 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3455
3456 IEM_MC_PREPARE_SSE_USAGE();
3457 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3458 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3459
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 }
3463 return VINF_SUCCESS;
3464
3465 case 0: /* MMX */
3466 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3467 {
3468 /*
3469 * Register, register.
3470 */
3471 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3472 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3474 IEM_MC_BEGIN(2, 0);
3475 IEM_MC_ARG(uint64_t *, pDst, 0);
3476 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3477 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3478 IEM_MC_PREPARE_FPU_USAGE();
3479 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3480 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3481 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /*
3488 * Register, memory.
3489 */
3490 IEM_MC_BEGIN(2, 2);
3491 IEM_MC_ARG(uint64_t *, pDst, 0);
3492 IEM_MC_LOCAL(uint64_t, uSrc);
3493 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3495
3496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3498 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3499 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3500
3501 IEM_MC_PREPARE_FPU_USAGE();
3502 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3503 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3504
3505 IEM_MC_ADVANCE_RIP();
3506 IEM_MC_END();
3507 }
3508 return VINF_SUCCESS;
3509
3510 default:
3511 return IEMOP_RAISE_INVALID_OPCODE();
3512 }
3513}
3514
3515
3516/** Opcode 0x0f 0x74. */
3517FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3518{
3519 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3520 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3521}
3522
3523
3524/** Opcode 0x0f 0x75. */
3525FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3526{
3527 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3528 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3529}
3530
3531
3532/** Opcode 0x0f 0x76. */
3533FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3534{
3535 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3536 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3537}
3538
3539
3540/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3541FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3542/* Opcode 0x66 0x0f 0x77 - invalid */
3543/* Opcode 0xf3 0x0f 0x77 - invalid */
3544/* Opcode 0xf2 0x0f 0x77 - invalid */
3545
3546/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3547FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3548/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3549FNIEMOP_STUB(iemOp_AmdGrp17);
3550/* Opcode 0xf3 0x0f 0x78 - invalid */
3551/* Opcode 0xf2 0x0f 0x78 - invalid */
3552
3553/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3554FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3555/* Opcode 0x66 0x0f 0x79 - invalid */
3556/* Opcode 0xf3 0x0f 0x79 - invalid */
3557/* Opcode 0xf2 0x0f 0x79 - invalid */
3558
3559/* Opcode 0x0f 0x7a - invalid */
3560/* Opcode 0x66 0x0f 0x7a - invalid */
3561/* Opcode 0xf3 0x0f 0x7a - invalid */
3562/* Opcode 0xf2 0x0f 0x7a - invalid */
3563
3564/* Opcode 0x0f 0x7b - invalid */
3565/* Opcode 0x66 0x0f 0x7b - invalid */
3566/* Opcode 0xf3 0x0f 0x7b - invalid */
3567/* Opcode 0xf2 0x0f 0x7b - invalid */
3568
3569/* Opcode 0x0f 0x7c - invalid */
3570/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3571FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3572/* Opcode 0xf3 0x0f 0x7c - invalid */
3573/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3574FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3575
3576/* Opcode 0x0f 0x7d - invalid */
3577/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3578FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3579/* Opcode 0xf3 0x0f 0x7d - invalid */
3580/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3581FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3582
3583
3584/** Opcode 0x0f 0x7e. */
3585FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3586{
3587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3588 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3589 {
3590 case IEM_OP_PRF_SIZE_OP: /* SSE */
3591 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3592 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3593 else
3594 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3596 {
3597 /* greg, XMM */
3598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3599 IEM_MC_BEGIN(0, 1);
3600 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3601 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3602 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3603 {
3604 IEM_MC_LOCAL(uint64_t, u64Tmp);
3605 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3606 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3607 }
3608 else
3609 {
3610 IEM_MC_LOCAL(uint32_t, u32Tmp);
3611 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3612 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3613 }
3614 IEM_MC_ADVANCE_RIP();
3615 IEM_MC_END();
3616 }
3617 else
3618 {
3619 /* [mem], XMM */
3620 IEM_MC_BEGIN(0, 2);
3621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3622 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3625 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3626 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3627 {
3628 IEM_MC_LOCAL(uint64_t, u64Tmp);
3629 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3630 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3631 }
3632 else
3633 {
3634 IEM_MC_LOCAL(uint32_t, u32Tmp);
3635 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3636 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3637 }
3638 IEM_MC_ADVANCE_RIP();
3639 IEM_MC_END();
3640 }
3641 return VINF_SUCCESS;
3642
3643 case 0: /* MMX */
3644 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3645 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3646 else
3647 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3648 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3649 {
3650 /* greg, MMX */
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3652 IEM_MC_BEGIN(0, 1);
3653 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3654 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3655 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3656 {
3657 IEM_MC_LOCAL(uint64_t, u64Tmp);
3658 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3659 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3660 }
3661 else
3662 {
3663 IEM_MC_LOCAL(uint32_t, u32Tmp);
3664 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3665 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3666 }
3667 IEM_MC_ADVANCE_RIP();
3668 IEM_MC_END();
3669 }
3670 else
3671 {
3672 /* [mem], MMX */
3673 IEM_MC_BEGIN(0, 2);
3674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3675 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3678 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3679 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3680 {
3681 IEM_MC_LOCAL(uint64_t, u64Tmp);
3682 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3683 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3684 }
3685 else
3686 {
3687 IEM_MC_LOCAL(uint32_t, u32Tmp);
3688 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3689 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3690 }
3691 IEM_MC_ADVANCE_RIP();
3692 IEM_MC_END();
3693 }
3694 return VINF_SUCCESS;
3695
3696 default:
3697 return IEMOP_RAISE_INVALID_OPCODE();
3698 }
3699}
3700
3701
3702/** Opcode 0x0f 0x7f. */
3703FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3704{
3705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3706 bool fAligned = false;
3707 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3708 {
3709 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3710 fAligned = true;
3711 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3712 if (fAligned)
3713 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3714 else
3715 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3717 {
3718 /*
3719 * Register, register.
3720 */
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722 IEM_MC_BEGIN(0, 0);
3723 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3724 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3725 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3726 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3727 IEM_MC_ADVANCE_RIP();
3728 IEM_MC_END();
3729 }
3730 else
3731 {
3732 /*
3733 * Register, memory.
3734 */
3735 IEM_MC_BEGIN(0, 2);
3736 IEM_MC_LOCAL(uint128_t, u128Tmp);
3737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3738
3739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3742 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3743
3744 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3745 if (fAligned)
3746 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3747 else
3748 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3749
3750 IEM_MC_ADVANCE_RIP();
3751 IEM_MC_END();
3752 }
3753 return VINF_SUCCESS;
3754
3755 case 0: /* MMX */
3756 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3757
3758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3759 {
3760 /*
3761 * Register, register.
3762 */
3763 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3764 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3766 IEM_MC_BEGIN(0, 1);
3767 IEM_MC_LOCAL(uint64_t, u64Tmp);
3768 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3769 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3770 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3771 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3772 IEM_MC_ADVANCE_RIP();
3773 IEM_MC_END();
3774 }
3775 else
3776 {
3777 /*
3778 * Register, memory.
3779 */
3780 IEM_MC_BEGIN(0, 2);
3781 IEM_MC_LOCAL(uint64_t, u64Tmp);
3782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3783
3784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3786 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3787 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3788
3789 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3790 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3791
3792 IEM_MC_ADVANCE_RIP();
3793 IEM_MC_END();
3794 }
3795 return VINF_SUCCESS;
3796
3797 default:
3798 return IEMOP_RAISE_INVALID_OPCODE();
3799 }
3800}
3801
3802
3803
3804/** Opcode 0x0f 0x80. */
3805FNIEMOP_DEF(iemOp_jo_Jv)
3806{
3807 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3808 IEMOP_HLP_MIN_386();
3809 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3810 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3811 {
3812 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3814
3815 IEM_MC_BEGIN(0, 0);
3816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3817 IEM_MC_REL_JMP_S16(i16Imm);
3818 } IEM_MC_ELSE() {
3819 IEM_MC_ADVANCE_RIP();
3820 } IEM_MC_ENDIF();
3821 IEM_MC_END();
3822 }
3823 else
3824 {
3825 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3827
3828 IEM_MC_BEGIN(0, 0);
3829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3830 IEM_MC_REL_JMP_S32(i32Imm);
3831 } IEM_MC_ELSE() {
3832 IEM_MC_ADVANCE_RIP();
3833 } IEM_MC_ENDIF();
3834 IEM_MC_END();
3835 }
3836 return VINF_SUCCESS;
3837}
3838
3839
3840/** Opcode 0x0f 0x81. */
3841FNIEMOP_DEF(iemOp_jno_Jv)
3842{
3843 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3844 IEMOP_HLP_MIN_386();
3845 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3846 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3847 {
3848 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850
3851 IEM_MC_BEGIN(0, 0);
3852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3853 IEM_MC_ADVANCE_RIP();
3854 } IEM_MC_ELSE() {
3855 IEM_MC_REL_JMP_S16(i16Imm);
3856 } IEM_MC_ENDIF();
3857 IEM_MC_END();
3858 }
3859 else
3860 {
3861 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3863
3864 IEM_MC_BEGIN(0, 0);
3865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3866 IEM_MC_ADVANCE_RIP();
3867 } IEM_MC_ELSE() {
3868 IEM_MC_REL_JMP_S32(i32Imm);
3869 } IEM_MC_ENDIF();
3870 IEM_MC_END();
3871 }
3872 return VINF_SUCCESS;
3873}
3874
3875
3876/** Opcode 0x0f 0x82. */
3877FNIEMOP_DEF(iemOp_jc_Jv)
3878{
3879 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3880 IEMOP_HLP_MIN_386();
3881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3882 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3883 {
3884 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886
3887 IEM_MC_BEGIN(0, 0);
3888 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3889 IEM_MC_REL_JMP_S16(i16Imm);
3890 } IEM_MC_ELSE() {
3891 IEM_MC_ADVANCE_RIP();
3892 } IEM_MC_ENDIF();
3893 IEM_MC_END();
3894 }
3895 else
3896 {
3897 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3899
3900 IEM_MC_BEGIN(0, 0);
3901 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3902 IEM_MC_REL_JMP_S32(i32Imm);
3903 } IEM_MC_ELSE() {
3904 IEM_MC_ADVANCE_RIP();
3905 } IEM_MC_ENDIF();
3906 IEM_MC_END();
3907 }
3908 return VINF_SUCCESS;
3909}
3910
3911
3912/** Opcode 0x0f 0x83. */
3913FNIEMOP_DEF(iemOp_jnc_Jv)
3914{
3915 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3916 IEMOP_HLP_MIN_386();
3917 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3918 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3919 {
3920 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3922
3923 IEM_MC_BEGIN(0, 0);
3924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3925 IEM_MC_ADVANCE_RIP();
3926 } IEM_MC_ELSE() {
3927 IEM_MC_REL_JMP_S16(i16Imm);
3928 } IEM_MC_ENDIF();
3929 IEM_MC_END();
3930 }
3931 else
3932 {
3933 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3935
3936 IEM_MC_BEGIN(0, 0);
3937 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3938 IEM_MC_ADVANCE_RIP();
3939 } IEM_MC_ELSE() {
3940 IEM_MC_REL_JMP_S32(i32Imm);
3941 } IEM_MC_ENDIF();
3942 IEM_MC_END();
3943 }
3944 return VINF_SUCCESS;
3945}
3946
3947
3948/** Opcode 0x0f 0x84. */
3949FNIEMOP_DEF(iemOp_je_Jv)
3950{
3951 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3952 IEMOP_HLP_MIN_386();
3953 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3954 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3955 {
3956 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3958
3959 IEM_MC_BEGIN(0, 0);
3960 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3961 IEM_MC_REL_JMP_S16(i16Imm);
3962 } IEM_MC_ELSE() {
3963 IEM_MC_ADVANCE_RIP();
3964 } IEM_MC_ENDIF();
3965 IEM_MC_END();
3966 }
3967 else
3968 {
3969 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3971
3972 IEM_MC_BEGIN(0, 0);
3973 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3974 IEM_MC_REL_JMP_S32(i32Imm);
3975 } IEM_MC_ELSE() {
3976 IEM_MC_ADVANCE_RIP();
3977 } IEM_MC_ENDIF();
3978 IEM_MC_END();
3979 }
3980 return VINF_SUCCESS;
3981}
3982
3983
3984/** Opcode 0x0f 0x85. */
3985FNIEMOP_DEF(iemOp_jne_Jv)
3986{
3987 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3988 IEMOP_HLP_MIN_386();
3989 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3990 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3991 {
3992 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3994
3995 IEM_MC_BEGIN(0, 0);
3996 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3997 IEM_MC_ADVANCE_RIP();
3998 } IEM_MC_ELSE() {
3999 IEM_MC_REL_JMP_S16(i16Imm);
4000 } IEM_MC_ENDIF();
4001 IEM_MC_END();
4002 }
4003 else
4004 {
4005 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4007
4008 IEM_MC_BEGIN(0, 0);
4009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4010 IEM_MC_ADVANCE_RIP();
4011 } IEM_MC_ELSE() {
4012 IEM_MC_REL_JMP_S32(i32Imm);
4013 } IEM_MC_ENDIF();
4014 IEM_MC_END();
4015 }
4016 return VINF_SUCCESS;
4017}
4018
4019
4020/** Opcode 0x0f 0x86. */
4021FNIEMOP_DEF(iemOp_jbe_Jv)
4022{
4023 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4024 IEMOP_HLP_MIN_386();
4025 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4026 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4027 {
4028 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4030
4031 IEM_MC_BEGIN(0, 0);
4032 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4033 IEM_MC_REL_JMP_S16(i16Imm);
4034 } IEM_MC_ELSE() {
4035 IEM_MC_ADVANCE_RIP();
4036 } IEM_MC_ENDIF();
4037 IEM_MC_END();
4038 }
4039 else
4040 {
4041 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4043
4044 IEM_MC_BEGIN(0, 0);
4045 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4046 IEM_MC_REL_JMP_S32(i32Imm);
4047 } IEM_MC_ELSE() {
4048 IEM_MC_ADVANCE_RIP();
4049 } IEM_MC_ENDIF();
4050 IEM_MC_END();
4051 }
4052 return VINF_SUCCESS;
4053}
4054
4055
4056/** Opcode 0x0f 0x87. */
4057FNIEMOP_DEF(iemOp_jnbe_Jv)
4058{
4059 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4060 IEMOP_HLP_MIN_386();
4061 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4062 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4063 {
4064 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4066
4067 IEM_MC_BEGIN(0, 0);
4068 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4069 IEM_MC_ADVANCE_RIP();
4070 } IEM_MC_ELSE() {
4071 IEM_MC_REL_JMP_S16(i16Imm);
4072 } IEM_MC_ENDIF();
4073 IEM_MC_END();
4074 }
4075 else
4076 {
4077 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4079
4080 IEM_MC_BEGIN(0, 0);
4081 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4082 IEM_MC_ADVANCE_RIP();
4083 } IEM_MC_ELSE() {
4084 IEM_MC_REL_JMP_S32(i32Imm);
4085 } IEM_MC_ENDIF();
4086 IEM_MC_END();
4087 }
4088 return VINF_SUCCESS;
4089}
4090
4091
4092/** Opcode 0x0f 0x88. */
4093FNIEMOP_DEF(iemOp_js_Jv)
4094{
4095 IEMOP_MNEMONIC(js_Jv, "js Jv");
4096 IEMOP_HLP_MIN_386();
4097 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4098 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4099 {
4100 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4102
4103 IEM_MC_BEGIN(0, 0);
4104 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4105 IEM_MC_REL_JMP_S16(i16Imm);
4106 } IEM_MC_ELSE() {
4107 IEM_MC_ADVANCE_RIP();
4108 } IEM_MC_ENDIF();
4109 IEM_MC_END();
4110 }
4111 else
4112 {
4113 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4115
4116 IEM_MC_BEGIN(0, 0);
4117 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4118 IEM_MC_REL_JMP_S32(i32Imm);
4119 } IEM_MC_ELSE() {
4120 IEM_MC_ADVANCE_RIP();
4121 } IEM_MC_ENDIF();
4122 IEM_MC_END();
4123 }
4124 return VINF_SUCCESS;
4125}
4126
4127
4128/** Opcode 0x0f 0x89. */
4129FNIEMOP_DEF(iemOp_jns_Jv)
4130{
4131 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4132 IEMOP_HLP_MIN_386();
4133 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4134 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4135 {
4136 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4138
4139 IEM_MC_BEGIN(0, 0);
4140 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4141 IEM_MC_ADVANCE_RIP();
4142 } IEM_MC_ELSE() {
4143 IEM_MC_REL_JMP_S16(i16Imm);
4144 } IEM_MC_ENDIF();
4145 IEM_MC_END();
4146 }
4147 else
4148 {
4149 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4151
4152 IEM_MC_BEGIN(0, 0);
4153 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4154 IEM_MC_ADVANCE_RIP();
4155 } IEM_MC_ELSE() {
4156 IEM_MC_REL_JMP_S32(i32Imm);
4157 } IEM_MC_ENDIF();
4158 IEM_MC_END();
4159 }
4160 return VINF_SUCCESS;
4161}
4162
4163
4164/** Opcode 0x0f 0x8a. */
4165FNIEMOP_DEF(iemOp_jp_Jv)
4166{
4167 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4168 IEMOP_HLP_MIN_386();
4169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4170 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4171 {
4172 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4174
4175 IEM_MC_BEGIN(0, 0);
4176 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4177 IEM_MC_REL_JMP_S16(i16Imm);
4178 } IEM_MC_ELSE() {
4179 IEM_MC_ADVANCE_RIP();
4180 } IEM_MC_ENDIF();
4181 IEM_MC_END();
4182 }
4183 else
4184 {
4185 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4187
4188 IEM_MC_BEGIN(0, 0);
4189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4190 IEM_MC_REL_JMP_S32(i32Imm);
4191 } IEM_MC_ELSE() {
4192 IEM_MC_ADVANCE_RIP();
4193 } IEM_MC_ENDIF();
4194 IEM_MC_END();
4195 }
4196 return VINF_SUCCESS;
4197}
4198
4199
4200/** Opcode 0x0f 0x8b. */
4201FNIEMOP_DEF(iemOp_jnp_Jv)
4202{
4203 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4204 IEMOP_HLP_MIN_386();
4205 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4206 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4207 {
4208 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4210
4211 IEM_MC_BEGIN(0, 0);
4212 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4213 IEM_MC_ADVANCE_RIP();
4214 } IEM_MC_ELSE() {
4215 IEM_MC_REL_JMP_S16(i16Imm);
4216 } IEM_MC_ENDIF();
4217 IEM_MC_END();
4218 }
4219 else
4220 {
4221 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4223
4224 IEM_MC_BEGIN(0, 0);
4225 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4226 IEM_MC_ADVANCE_RIP();
4227 } IEM_MC_ELSE() {
4228 IEM_MC_REL_JMP_S32(i32Imm);
4229 } IEM_MC_ENDIF();
4230 IEM_MC_END();
4231 }
4232 return VINF_SUCCESS;
4233}
4234
4235
4236/** Opcode 0x0f 0x8c. */
4237FNIEMOP_DEF(iemOp_jl_Jv)
4238{
4239 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4240 IEMOP_HLP_MIN_386();
4241 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4242 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4243 {
4244 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4246
4247 IEM_MC_BEGIN(0, 0);
4248 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4249 IEM_MC_REL_JMP_S16(i16Imm);
4250 } IEM_MC_ELSE() {
4251 IEM_MC_ADVANCE_RIP();
4252 } IEM_MC_ENDIF();
4253 IEM_MC_END();
4254 }
4255 else
4256 {
4257 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4259
4260 IEM_MC_BEGIN(0, 0);
4261 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4262 IEM_MC_REL_JMP_S32(i32Imm);
4263 } IEM_MC_ELSE() {
4264 IEM_MC_ADVANCE_RIP();
4265 } IEM_MC_ENDIF();
4266 IEM_MC_END();
4267 }
4268 return VINF_SUCCESS;
4269}
4270
4271
4272/** Opcode 0x0f 0x8d. */
4273FNIEMOP_DEF(iemOp_jnl_Jv)
4274{
4275 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4276 IEMOP_HLP_MIN_386();
4277 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4278 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4279 {
4280 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4282
4283 IEM_MC_BEGIN(0, 0);
4284 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4285 IEM_MC_ADVANCE_RIP();
4286 } IEM_MC_ELSE() {
4287 IEM_MC_REL_JMP_S16(i16Imm);
4288 } IEM_MC_ENDIF();
4289 IEM_MC_END();
4290 }
4291 else
4292 {
4293 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4295
4296 IEM_MC_BEGIN(0, 0);
4297 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4298 IEM_MC_ADVANCE_RIP();
4299 } IEM_MC_ELSE() {
4300 IEM_MC_REL_JMP_S32(i32Imm);
4301 } IEM_MC_ENDIF();
4302 IEM_MC_END();
4303 }
4304 return VINF_SUCCESS;
4305}
4306
4307
4308/** Opcode 0x0f 0x8e. */
4309FNIEMOP_DEF(iemOp_jle_Jv)
4310{
4311 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4312 IEMOP_HLP_MIN_386();
4313 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4314 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4315 {
4316 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4318
4319 IEM_MC_BEGIN(0, 0);
4320 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4321 IEM_MC_REL_JMP_S16(i16Imm);
4322 } IEM_MC_ELSE() {
4323 IEM_MC_ADVANCE_RIP();
4324 } IEM_MC_ENDIF();
4325 IEM_MC_END();
4326 }
4327 else
4328 {
4329 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4331
4332 IEM_MC_BEGIN(0, 0);
4333 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4334 IEM_MC_REL_JMP_S32(i32Imm);
4335 } IEM_MC_ELSE() {
4336 IEM_MC_ADVANCE_RIP();
4337 } IEM_MC_ENDIF();
4338 IEM_MC_END();
4339 }
4340 return VINF_SUCCESS;
4341}
4342
4343
4344/** Opcode 0x0f 0x8f. */
4345FNIEMOP_DEF(iemOp_jnle_Jv)
4346{
4347 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4348 IEMOP_HLP_MIN_386();
4349 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4350 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4351 {
4352 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4354
4355 IEM_MC_BEGIN(0, 0);
4356 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4357 IEM_MC_ADVANCE_RIP();
4358 } IEM_MC_ELSE() {
4359 IEM_MC_REL_JMP_S16(i16Imm);
4360 } IEM_MC_ENDIF();
4361 IEM_MC_END();
4362 }
4363 else
4364 {
4365 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4367
4368 IEM_MC_BEGIN(0, 0);
4369 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4370 IEM_MC_ADVANCE_RIP();
4371 } IEM_MC_ELSE() {
4372 IEM_MC_REL_JMP_S32(i32Imm);
4373 } IEM_MC_ENDIF();
4374 IEM_MC_END();
4375 }
4376 return VINF_SUCCESS;
4377}
4378
4379
4380/** Opcode 0x0f 0x90. */
4381FNIEMOP_DEF(iemOp_seto_Eb)
4382{
4383 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4384 IEMOP_HLP_MIN_386();
4385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4386
4387 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4388 * any way. AMD says it's "unused", whatever that means. We're
4389 * ignoring for now. */
4390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4391 {
4392 /* register target */
4393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4394 IEM_MC_BEGIN(0, 0);
4395 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4396 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4397 } IEM_MC_ELSE() {
4398 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4399 } IEM_MC_ENDIF();
4400 IEM_MC_ADVANCE_RIP();
4401 IEM_MC_END();
4402 }
4403 else
4404 {
4405 /* memory target */
4406 IEM_MC_BEGIN(0, 1);
4407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4410 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4411 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4412 } IEM_MC_ELSE() {
4413 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4414 } IEM_MC_ENDIF();
4415 IEM_MC_ADVANCE_RIP();
4416 IEM_MC_END();
4417 }
4418 return VINF_SUCCESS;
4419}
4420
4421
4422/** Opcode 0x0f 0x91. */
4423FNIEMOP_DEF(iemOp_setno_Eb)
4424{
4425 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4426 IEMOP_HLP_MIN_386();
4427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4428
4429 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4430 * any way. AMD says it's "unused", whatever that means. We're
4431 * ignoring for now. */
4432 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4433 {
4434 /* register target */
4435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4436 IEM_MC_BEGIN(0, 0);
4437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4438 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4439 } IEM_MC_ELSE() {
4440 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4441 } IEM_MC_ENDIF();
4442 IEM_MC_ADVANCE_RIP();
4443 IEM_MC_END();
4444 }
4445 else
4446 {
4447 /* memory target */
4448 IEM_MC_BEGIN(0, 1);
4449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4452 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4453 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4454 } IEM_MC_ELSE() {
4455 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4456 } IEM_MC_ENDIF();
4457 IEM_MC_ADVANCE_RIP();
4458 IEM_MC_END();
4459 }
4460 return VINF_SUCCESS;
4461}
4462
4463
4464/** Opcode 0x0f 0x92. */
4465FNIEMOP_DEF(iemOp_setc_Eb)
4466{
4467 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4468 IEMOP_HLP_MIN_386();
4469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4470
4471 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4472 * any way. AMD says it's "unused", whatever that means. We're
4473 * ignoring for now. */
4474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4475 {
4476 /* register target */
4477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4478 IEM_MC_BEGIN(0, 0);
4479 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4480 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4481 } IEM_MC_ELSE() {
4482 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4483 } IEM_MC_ENDIF();
4484 IEM_MC_ADVANCE_RIP();
4485 IEM_MC_END();
4486 }
4487 else
4488 {
4489 /* memory target */
4490 IEM_MC_BEGIN(0, 1);
4491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4495 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4496 } IEM_MC_ELSE() {
4497 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4498 } IEM_MC_ENDIF();
4499 IEM_MC_ADVANCE_RIP();
4500 IEM_MC_END();
4501 }
4502 return VINF_SUCCESS;
4503}
4504
4505
4506/** Opcode 0x0f 0x93. */
4507FNIEMOP_DEF(iemOp_setnc_Eb)
4508{
4509 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4510 IEMOP_HLP_MIN_386();
4511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4512
4513 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4514 * any way. AMD says it's "unused", whatever that means. We're
4515 * ignoring for now. */
4516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4517 {
4518 /* register target */
4519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4520 IEM_MC_BEGIN(0, 0);
4521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4522 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4523 } IEM_MC_ELSE() {
4524 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4525 } IEM_MC_ENDIF();
4526 IEM_MC_ADVANCE_RIP();
4527 IEM_MC_END();
4528 }
4529 else
4530 {
4531 /* memory target */
4532 IEM_MC_BEGIN(0, 1);
4533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4537 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4538 } IEM_MC_ELSE() {
4539 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4540 } IEM_MC_ENDIF();
4541 IEM_MC_ADVANCE_RIP();
4542 IEM_MC_END();
4543 }
4544 return VINF_SUCCESS;
4545}
4546
4547
4548/** Opcode 0x0f 0x94. */
4549FNIEMOP_DEF(iemOp_sete_Eb)
4550{
4551 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4552 IEMOP_HLP_MIN_386();
4553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4554
4555 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4556 * any way. AMD says it's "unused", whatever that means. We're
4557 * ignoring for now. */
4558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4559 {
4560 /* register target */
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4562 IEM_MC_BEGIN(0, 0);
4563 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4564 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4565 } IEM_MC_ELSE() {
4566 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4567 } IEM_MC_ENDIF();
4568 IEM_MC_ADVANCE_RIP();
4569 IEM_MC_END();
4570 }
4571 else
4572 {
4573 /* memory target */
4574 IEM_MC_BEGIN(0, 1);
4575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4579 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4580 } IEM_MC_ELSE() {
4581 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4582 } IEM_MC_ENDIF();
4583 IEM_MC_ADVANCE_RIP();
4584 IEM_MC_END();
4585 }
4586 return VINF_SUCCESS;
4587}
4588
4589
4590/** Opcode 0x0f 0x95. */
4591FNIEMOP_DEF(iemOp_setne_Eb)
4592{
4593 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4594 IEMOP_HLP_MIN_386();
4595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4596
4597 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4598 * any way. AMD says it's "unused", whatever that means. We're
4599 * ignoring for now. */
4600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4601 {
4602 /* register target */
4603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4604 IEM_MC_BEGIN(0, 0);
4605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4606 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4607 } IEM_MC_ELSE() {
4608 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4609 } IEM_MC_ENDIF();
4610 IEM_MC_ADVANCE_RIP();
4611 IEM_MC_END();
4612 }
4613 else
4614 {
4615 /* memory target */
4616 IEM_MC_BEGIN(0, 1);
4617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4621 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4622 } IEM_MC_ELSE() {
4623 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4624 } IEM_MC_ENDIF();
4625 IEM_MC_ADVANCE_RIP();
4626 IEM_MC_END();
4627 }
4628 return VINF_SUCCESS;
4629}
4630
4631
4632/** Opcode 0x0f 0x96. */
4633FNIEMOP_DEF(iemOp_setbe_Eb)
4634{
4635 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4636 IEMOP_HLP_MIN_386();
4637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4638
4639 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4640 * any way. AMD says it's "unused", whatever that means. We're
4641 * ignoring for now. */
4642 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4643 {
4644 /* register target */
4645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4646 IEM_MC_BEGIN(0, 0);
4647 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4648 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4649 } IEM_MC_ELSE() {
4650 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4651 } IEM_MC_ENDIF();
4652 IEM_MC_ADVANCE_RIP();
4653 IEM_MC_END();
4654 }
4655 else
4656 {
4657 /* memory target */
4658 IEM_MC_BEGIN(0, 1);
4659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4662 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4663 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4664 } IEM_MC_ELSE() {
4665 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4666 } IEM_MC_ENDIF();
4667 IEM_MC_ADVANCE_RIP();
4668 IEM_MC_END();
4669 }
4670 return VINF_SUCCESS;
4671}
4672
4673
4674/** Opcode 0x0f 0x97. */
4675FNIEMOP_DEF(iemOp_setnbe_Eb)
4676{
4677 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4678 IEMOP_HLP_MIN_386();
4679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4680
4681 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4682 * any way. AMD says it's "unused", whatever that means. We're
4683 * ignoring for now. */
4684 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4685 {
4686 /* register target */
4687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4688 IEM_MC_BEGIN(0, 0);
4689 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4690 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4691 } IEM_MC_ELSE() {
4692 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4693 } IEM_MC_ENDIF();
4694 IEM_MC_ADVANCE_RIP();
4695 IEM_MC_END();
4696 }
4697 else
4698 {
4699 /* memory target */
4700 IEM_MC_BEGIN(0, 1);
4701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4704 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4705 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4706 } IEM_MC_ELSE() {
4707 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4708 } IEM_MC_ENDIF();
4709 IEM_MC_ADVANCE_RIP();
4710 IEM_MC_END();
4711 }
4712 return VINF_SUCCESS;
4713}
4714
4715
4716/** Opcode 0x0f 0x98. */
4717FNIEMOP_DEF(iemOp_sets_Eb)
4718{
4719 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4720 IEMOP_HLP_MIN_386();
4721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4722
4723 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4724 * any way. AMD says it's "unused", whatever that means. We're
4725 * ignoring for now. */
4726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4727 {
4728 /* register target */
4729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4730 IEM_MC_BEGIN(0, 0);
4731 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4732 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4733 } IEM_MC_ELSE() {
4734 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4735 } IEM_MC_ENDIF();
4736 IEM_MC_ADVANCE_RIP();
4737 IEM_MC_END();
4738 }
4739 else
4740 {
4741 /* memory target */
4742 IEM_MC_BEGIN(0, 1);
4743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4746 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4747 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4748 } IEM_MC_ELSE() {
4749 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4750 } IEM_MC_ENDIF();
4751 IEM_MC_ADVANCE_RIP();
4752 IEM_MC_END();
4753 }
4754 return VINF_SUCCESS;
4755}
4756
4757
4758/** Opcode 0x0f 0x99. */
4759FNIEMOP_DEF(iemOp_setns_Eb)
4760{
4761 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4762 IEMOP_HLP_MIN_386();
4763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4764
4765 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4766 * any way. AMD says it's "unused", whatever that means. We're
4767 * ignoring for now. */
4768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4769 {
4770 /* register target */
4771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4772 IEM_MC_BEGIN(0, 0);
4773 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4774 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4775 } IEM_MC_ELSE() {
4776 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4777 } IEM_MC_ENDIF();
4778 IEM_MC_ADVANCE_RIP();
4779 IEM_MC_END();
4780 }
4781 else
4782 {
4783 /* memory target */
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4789 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4790 } IEM_MC_ELSE() {
4791 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4792 } IEM_MC_ENDIF();
4793 IEM_MC_ADVANCE_RIP();
4794 IEM_MC_END();
4795 }
4796 return VINF_SUCCESS;
4797}
4798
4799
4800/** Opcode 0x0f 0x9a. */
4801FNIEMOP_DEF(iemOp_setp_Eb)
4802{
4803 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4804 IEMOP_HLP_MIN_386();
4805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4806
4807 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4808 * any way. AMD says it's "unused", whatever that means. We're
4809 * ignoring for now. */
4810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4811 {
4812 /* register target */
4813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4814 IEM_MC_BEGIN(0, 0);
4815 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4816 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4817 } IEM_MC_ELSE() {
4818 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4819 } IEM_MC_ENDIF();
4820 IEM_MC_ADVANCE_RIP();
4821 IEM_MC_END();
4822 }
4823 else
4824 {
4825 /* memory target */
4826 IEM_MC_BEGIN(0, 1);
4827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4831 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4832 } IEM_MC_ELSE() {
4833 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4834 } IEM_MC_ENDIF();
4835 IEM_MC_ADVANCE_RIP();
4836 IEM_MC_END();
4837 }
4838 return VINF_SUCCESS;
4839}
4840
4841
4842/** Opcode 0x0f 0x9b. */
4843FNIEMOP_DEF(iemOp_setnp_Eb)
4844{
4845 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4846 IEMOP_HLP_MIN_386();
4847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4848
4849 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4850 * any way. AMD says it's "unused", whatever that means. We're
4851 * ignoring for now. */
4852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4853 {
4854 /* register target */
4855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4856 IEM_MC_BEGIN(0, 0);
4857 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4858 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4859 } IEM_MC_ELSE() {
4860 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4861 } IEM_MC_ENDIF();
4862 IEM_MC_ADVANCE_RIP();
4863 IEM_MC_END();
4864 }
4865 else
4866 {
4867 /* memory target */
4868 IEM_MC_BEGIN(0, 1);
4869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4873 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4874 } IEM_MC_ELSE() {
4875 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4876 } IEM_MC_ENDIF();
4877 IEM_MC_ADVANCE_RIP();
4878 IEM_MC_END();
4879 }
4880 return VINF_SUCCESS;
4881}
4882
4883
4884/** Opcode 0x0f 0x9c. */
4885FNIEMOP_DEF(iemOp_setl_Eb)
4886{
4887 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4888 IEMOP_HLP_MIN_386();
4889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4890
4891 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4892 * any way. AMD says it's "unused", whatever that means. We're
4893 * ignoring for now. */
4894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4895 {
4896 /* register target */
4897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4898 IEM_MC_BEGIN(0, 0);
4899 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4900 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4901 } IEM_MC_ELSE() {
4902 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4903 } IEM_MC_ENDIF();
4904 IEM_MC_ADVANCE_RIP();
4905 IEM_MC_END();
4906 }
4907 else
4908 {
4909 /* memory target */
4910 IEM_MC_BEGIN(0, 1);
4911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4914 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4915 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4916 } IEM_MC_ELSE() {
4917 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4918 } IEM_MC_ENDIF();
4919 IEM_MC_ADVANCE_RIP();
4920 IEM_MC_END();
4921 }
4922 return VINF_SUCCESS;
4923}
4924
4925
4926/** Opcode 0x0f 0x9d. */
4927FNIEMOP_DEF(iemOp_setnl_Eb)
4928{
4929 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4930 IEMOP_HLP_MIN_386();
4931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4932
4933 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4934 * any way. AMD says it's "unused", whatever that means. We're
4935 * ignoring for now. */
4936 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4937 {
4938 /* register target */
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4940 IEM_MC_BEGIN(0, 0);
4941 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4942 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4943 } IEM_MC_ELSE() {
4944 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4945 } IEM_MC_ENDIF();
4946 IEM_MC_ADVANCE_RIP();
4947 IEM_MC_END();
4948 }
4949 else
4950 {
4951 /* memory target */
4952 IEM_MC_BEGIN(0, 1);
4953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4956 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4957 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4958 } IEM_MC_ELSE() {
4959 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4960 } IEM_MC_ENDIF();
4961 IEM_MC_ADVANCE_RIP();
4962 IEM_MC_END();
4963 }
4964 return VINF_SUCCESS;
4965}
4966
4967
4968/** Opcode 0x0f 0x9e. */
4969FNIEMOP_DEF(iemOp_setle_Eb)
4970{
4971 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4972 IEMOP_HLP_MIN_386();
4973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4974
4975 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4976 * any way. AMD says it's "unused", whatever that means. We're
4977 * ignoring for now. */
4978 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4979 {
4980 /* register target */
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982 IEM_MC_BEGIN(0, 0);
4983 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4984 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4985 } IEM_MC_ELSE() {
4986 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4987 } IEM_MC_ENDIF();
4988 IEM_MC_ADVANCE_RIP();
4989 IEM_MC_END();
4990 }
4991 else
4992 {
4993 /* memory target */
4994 IEM_MC_BEGIN(0, 1);
4995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4998 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4999 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5000 } IEM_MC_ELSE() {
5001 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5002 } IEM_MC_ENDIF();
5003 IEM_MC_ADVANCE_RIP();
5004 IEM_MC_END();
5005 }
5006 return VINF_SUCCESS;
5007}
5008
5009
5010/** Opcode 0x0f 0x9f. */
5011FNIEMOP_DEF(iemOp_setnle_Eb)
5012{
5013 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5014 IEMOP_HLP_MIN_386();
5015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5016
5017 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5018 * any way. AMD says it's "unused", whatever that means. We're
5019 * ignoring for now. */
5020 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5021 {
5022 /* register target */
5023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5024 IEM_MC_BEGIN(0, 0);
5025 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5026 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5027 } IEM_MC_ELSE() {
5028 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5029 } IEM_MC_ENDIF();
5030 IEM_MC_ADVANCE_RIP();
5031 IEM_MC_END();
5032 }
5033 else
5034 {
5035 /* memory target */
5036 IEM_MC_BEGIN(0, 1);
5037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5040 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5041 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5042 } IEM_MC_ELSE() {
5043 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5044 } IEM_MC_ENDIF();
5045 IEM_MC_ADVANCE_RIP();
5046 IEM_MC_END();
5047 }
5048 return VINF_SUCCESS;
5049}
5050
5051
5052/**
5053 * Common 'push segment-register' helper.
5054 */
5055FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5056{
5057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5058 if (iReg < X86_SREG_FS)
5059 IEMOP_HLP_NO_64BIT();
5060 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5061
5062 switch (pVCpu->iem.s.enmEffOpSize)
5063 {
5064 case IEMMODE_16BIT:
5065 IEM_MC_BEGIN(0, 1);
5066 IEM_MC_LOCAL(uint16_t, u16Value);
5067 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5068 IEM_MC_PUSH_U16(u16Value);
5069 IEM_MC_ADVANCE_RIP();
5070 IEM_MC_END();
5071 break;
5072
5073 case IEMMODE_32BIT:
5074 IEM_MC_BEGIN(0, 1);
5075 IEM_MC_LOCAL(uint32_t, u32Value);
5076 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5077 IEM_MC_PUSH_U32_SREG(u32Value);
5078 IEM_MC_ADVANCE_RIP();
5079 IEM_MC_END();
5080 break;
5081
5082 case IEMMODE_64BIT:
5083 IEM_MC_BEGIN(0, 1);
5084 IEM_MC_LOCAL(uint64_t, u64Value);
5085 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5086 IEM_MC_PUSH_U64(u64Value);
5087 IEM_MC_ADVANCE_RIP();
5088 IEM_MC_END();
5089 break;
5090 }
5091
5092 return VINF_SUCCESS;
5093}
5094
5095
5096/** Opcode 0x0f 0xa0. */
5097FNIEMOP_DEF(iemOp_push_fs)
5098{
5099 IEMOP_MNEMONIC(push_fs, "push fs");
5100 IEMOP_HLP_MIN_386();
5101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5102 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5103}
5104
5105
5106/** Opcode 0x0f 0xa1. */
5107FNIEMOP_DEF(iemOp_pop_fs)
5108{
5109 IEMOP_MNEMONIC(pop_fs, "pop fs");
5110 IEMOP_HLP_MIN_386();
5111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5112 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5113}
5114
5115
5116/** Opcode 0x0f 0xa2. */
5117FNIEMOP_DEF(iemOp_cpuid)
5118{
5119 IEMOP_MNEMONIC(cpuid, "cpuid");
5120 IEMOP_HLP_MIN_486(); /* not all 486es. */
5121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5122 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5123}
5124
5125
5126/**
5127 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5128 * iemOp_bts_Ev_Gv.
5129 */
5130FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5131{
5132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5133 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5134
5135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5136 {
5137 /* register destination. */
5138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5139 switch (pVCpu->iem.s.enmEffOpSize)
5140 {
5141 case IEMMODE_16BIT:
5142 IEM_MC_BEGIN(3, 0);
5143 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5144 IEM_MC_ARG(uint16_t, u16Src, 1);
5145 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5146
5147 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5148 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5149 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5150 IEM_MC_REF_EFLAGS(pEFlags);
5151 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5152
5153 IEM_MC_ADVANCE_RIP();
5154 IEM_MC_END();
5155 return VINF_SUCCESS;
5156
5157 case IEMMODE_32BIT:
5158 IEM_MC_BEGIN(3, 0);
5159 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5160 IEM_MC_ARG(uint32_t, u32Src, 1);
5161 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5162
5163 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5164 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5165 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5166 IEM_MC_REF_EFLAGS(pEFlags);
5167 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5168
5169 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5170 IEM_MC_ADVANCE_RIP();
5171 IEM_MC_END();
5172 return VINF_SUCCESS;
5173
5174 case IEMMODE_64BIT:
5175 IEM_MC_BEGIN(3, 0);
5176 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5177 IEM_MC_ARG(uint64_t, u64Src, 1);
5178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5179
5180 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5181 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5182 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5183 IEM_MC_REF_EFLAGS(pEFlags);
5184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5185
5186 IEM_MC_ADVANCE_RIP();
5187 IEM_MC_END();
5188 return VINF_SUCCESS;
5189
5190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5191 }
5192 }
5193 else
5194 {
5195 /* memory destination. */
5196
5197 uint32_t fAccess;
5198 if (pImpl->pfnLockedU16)
5199 fAccess = IEM_ACCESS_DATA_RW;
5200 else /* BT */
5201 fAccess = IEM_ACCESS_DATA_R;
5202
5203 /** @todo test negative bit offsets! */
5204 switch (pVCpu->iem.s.enmEffOpSize)
5205 {
5206 case IEMMODE_16BIT:
5207 IEM_MC_BEGIN(3, 2);
5208 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5209 IEM_MC_ARG(uint16_t, u16Src, 1);
5210 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5212 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5213
5214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5215 if (pImpl->pfnLockedU16)
5216 IEMOP_HLP_DONE_DECODING();
5217 else
5218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5219 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5220 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5221 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5222 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5223 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
5224 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5225 IEM_MC_FETCH_EFLAGS(EFlags);
5226
5227 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5228 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5229 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5230 else
5231 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5232 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5233
5234 IEM_MC_COMMIT_EFLAGS(EFlags);
5235 IEM_MC_ADVANCE_RIP();
5236 IEM_MC_END();
5237 return VINF_SUCCESS;
5238
5239 case IEMMODE_32BIT:
5240 IEM_MC_BEGIN(3, 2);
5241 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5242 IEM_MC_ARG(uint32_t, u32Src, 1);
5243 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5245 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5246
5247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5248 if (pImpl->pfnLockedU16)
5249 IEMOP_HLP_DONE_DECODING();
5250 else
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5253 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5254 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5255 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5256 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5257 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5258 IEM_MC_FETCH_EFLAGS(EFlags);
5259
5260 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5261 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5262 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5263 else
5264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5266
5267 IEM_MC_COMMIT_EFLAGS(EFlags);
5268 IEM_MC_ADVANCE_RIP();
5269 IEM_MC_END();
5270 return VINF_SUCCESS;
5271
5272 case IEMMODE_64BIT:
5273 IEM_MC_BEGIN(3, 2);
5274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5275 IEM_MC_ARG(uint64_t, u64Src, 1);
5276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5278 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5279
5280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5281 if (pImpl->pfnLockedU16)
5282 IEMOP_HLP_DONE_DECODING();
5283 else
5284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5285 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5286 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5287 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5288 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5289 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5290 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5291 IEM_MC_FETCH_EFLAGS(EFlags);
5292
5293 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5294 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5295 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5296 else
5297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5298 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5299
5300 IEM_MC_COMMIT_EFLAGS(EFlags);
5301 IEM_MC_ADVANCE_RIP();
5302 IEM_MC_END();
5303 return VINF_SUCCESS;
5304
5305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5306 }
5307 }
5308}
5309
5310
5311/** Opcode 0x0f 0xa3. */
5312FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5313{
5314 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5315 IEMOP_HLP_MIN_386();
5316 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5317}
5318
5319
5320/**
5321 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5322 */
5323FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5324{
5325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5326 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5327
5328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5329 {
5330 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5332
5333 switch (pVCpu->iem.s.enmEffOpSize)
5334 {
5335 case IEMMODE_16BIT:
5336 IEM_MC_BEGIN(4, 0);
5337 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5338 IEM_MC_ARG(uint16_t, u16Src, 1);
5339 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5340 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5341
5342 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5343 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5344 IEM_MC_REF_EFLAGS(pEFlags);
5345 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5346
5347 IEM_MC_ADVANCE_RIP();
5348 IEM_MC_END();
5349 return VINF_SUCCESS;
5350
5351 case IEMMODE_32BIT:
5352 IEM_MC_BEGIN(4, 0);
5353 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5354 IEM_MC_ARG(uint32_t, u32Src, 1);
5355 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5356 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5357
5358 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5359 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5360 IEM_MC_REF_EFLAGS(pEFlags);
5361 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5362
5363 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5364 IEM_MC_ADVANCE_RIP();
5365 IEM_MC_END();
5366 return VINF_SUCCESS;
5367
5368 case IEMMODE_64BIT:
5369 IEM_MC_BEGIN(4, 0);
5370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5371 IEM_MC_ARG(uint64_t, u64Src, 1);
5372 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5373 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5374
5375 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5376 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5377 IEM_MC_REF_EFLAGS(pEFlags);
5378 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5379
5380 IEM_MC_ADVANCE_RIP();
5381 IEM_MC_END();
5382 return VINF_SUCCESS;
5383
5384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5385 }
5386 }
5387 else
5388 {
5389 switch (pVCpu->iem.s.enmEffOpSize)
5390 {
5391 case IEMMODE_16BIT:
5392 IEM_MC_BEGIN(4, 2);
5393 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5394 IEM_MC_ARG(uint16_t, u16Src, 1);
5395 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5396 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5398
5399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5400 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5401 IEM_MC_ASSIGN(cShiftArg, cShift);
5402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5403 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5404 IEM_MC_FETCH_EFLAGS(EFlags);
5405 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5406 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5407
5408 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5409 IEM_MC_COMMIT_EFLAGS(EFlags);
5410 IEM_MC_ADVANCE_RIP();
5411 IEM_MC_END();
5412 return VINF_SUCCESS;
5413
5414 case IEMMODE_32BIT:
5415 IEM_MC_BEGIN(4, 2);
5416 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5417 IEM_MC_ARG(uint32_t, u32Src, 1);
5418 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5419 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5421
5422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5423 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5424 IEM_MC_ASSIGN(cShiftArg, cShift);
5425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5426 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5427 IEM_MC_FETCH_EFLAGS(EFlags);
5428 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5429 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5430
5431 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5432 IEM_MC_COMMIT_EFLAGS(EFlags);
5433 IEM_MC_ADVANCE_RIP();
5434 IEM_MC_END();
5435 return VINF_SUCCESS;
5436
5437 case IEMMODE_64BIT:
5438 IEM_MC_BEGIN(4, 2);
5439 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5440 IEM_MC_ARG(uint64_t, u64Src, 1);
5441 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5442 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5444
5445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5446 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5447 IEM_MC_ASSIGN(cShiftArg, cShift);
5448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5449 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5450 IEM_MC_FETCH_EFLAGS(EFlags);
5451 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5452 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5453
5454 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5455 IEM_MC_COMMIT_EFLAGS(EFlags);
5456 IEM_MC_ADVANCE_RIP();
5457 IEM_MC_END();
5458 return VINF_SUCCESS;
5459
5460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5461 }
5462 }
5463}
5464
5465
5466/**
5467 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5468 */
5469FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5470{
5471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5472 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5473
5474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5475 {
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477
5478 switch (pVCpu->iem.s.enmEffOpSize)
5479 {
5480 case IEMMODE_16BIT:
5481 IEM_MC_BEGIN(4, 0);
5482 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5483 IEM_MC_ARG(uint16_t, u16Src, 1);
5484 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5485 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5486
5487 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5488 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5489 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5490 IEM_MC_REF_EFLAGS(pEFlags);
5491 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5492
5493 IEM_MC_ADVANCE_RIP();
5494 IEM_MC_END();
5495 return VINF_SUCCESS;
5496
5497 case IEMMODE_32BIT:
5498 IEM_MC_BEGIN(4, 0);
5499 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5500 IEM_MC_ARG(uint32_t, u32Src, 1);
5501 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5502 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5503
5504 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5505 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5506 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5507 IEM_MC_REF_EFLAGS(pEFlags);
5508 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5509
5510 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5511 IEM_MC_ADVANCE_RIP();
5512 IEM_MC_END();
5513 return VINF_SUCCESS;
5514
5515 case IEMMODE_64BIT:
5516 IEM_MC_BEGIN(4, 0);
5517 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5518 IEM_MC_ARG(uint64_t, u64Src, 1);
5519 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5520 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5521
5522 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5523 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5524 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5525 IEM_MC_REF_EFLAGS(pEFlags);
5526 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5527
5528 IEM_MC_ADVANCE_RIP();
5529 IEM_MC_END();
5530 return VINF_SUCCESS;
5531
5532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5533 }
5534 }
5535 else
5536 {
5537 switch (pVCpu->iem.s.enmEffOpSize)
5538 {
5539 case IEMMODE_16BIT:
5540 IEM_MC_BEGIN(4, 2);
5541 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5542 IEM_MC_ARG(uint16_t, u16Src, 1);
5543 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5544 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5546
5547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5549 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5550 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5551 IEM_MC_FETCH_EFLAGS(EFlags);
5552 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5553 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5554
5555 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5556 IEM_MC_COMMIT_EFLAGS(EFlags);
5557 IEM_MC_ADVANCE_RIP();
5558 IEM_MC_END();
5559 return VINF_SUCCESS;
5560
5561 case IEMMODE_32BIT:
5562 IEM_MC_BEGIN(4, 2);
5563 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5564 IEM_MC_ARG(uint32_t, u32Src, 1);
5565 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5566 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5568
5569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5571 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5572 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5573 IEM_MC_FETCH_EFLAGS(EFlags);
5574 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5575 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5576
5577 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5578 IEM_MC_COMMIT_EFLAGS(EFlags);
5579 IEM_MC_ADVANCE_RIP();
5580 IEM_MC_END();
5581 return VINF_SUCCESS;
5582
5583 case IEMMODE_64BIT:
5584 IEM_MC_BEGIN(4, 2);
5585 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5586 IEM_MC_ARG(uint64_t, u64Src, 1);
5587 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5588 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5590
5591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5593 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5594 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5595 IEM_MC_FETCH_EFLAGS(EFlags);
5596 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5597 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5598
5599 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5600 IEM_MC_COMMIT_EFLAGS(EFlags);
5601 IEM_MC_ADVANCE_RIP();
5602 IEM_MC_END();
5603 return VINF_SUCCESS;
5604
5605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5606 }
5607 }
5608}
5609
5610
5611
5612/** Opcode 0x0f 0xa4. */
5613FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5614{
5615 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5616 IEMOP_HLP_MIN_386();
5617 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5618}
5619
5620
5621/** Opcode 0x0f 0xa5. */
5622FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5623{
5624 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5625 IEMOP_HLP_MIN_386();
5626 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5627}
5628
5629
5630/** Opcode 0x0f 0xa8. */
5631FNIEMOP_DEF(iemOp_push_gs)
5632{
5633 IEMOP_MNEMONIC(push_gs, "push gs");
5634 IEMOP_HLP_MIN_386();
5635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5636 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5637}
5638
5639
5640/** Opcode 0x0f 0xa9. */
5641FNIEMOP_DEF(iemOp_pop_gs)
5642{
5643 IEMOP_MNEMONIC(pop_gs, "pop gs");
5644 IEMOP_HLP_MIN_386();
5645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5646 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5647}
5648
5649
5650/** Opcode 0x0f 0xaa. */
5651FNIEMOP_STUB(iemOp_rsm);
5652//IEMOP_HLP_MIN_386();
5653
5654
5655/** Opcode 0x0f 0xab. */
5656FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5657{
5658 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5659 IEMOP_HLP_MIN_386();
5660 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5661}
5662
5663
5664/** Opcode 0x0f 0xac. */
5665FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5666{
5667 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5668 IEMOP_HLP_MIN_386();
5669 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5670}
5671
5672
5673/** Opcode 0x0f 0xad. */
5674FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5675{
5676 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5677 IEMOP_HLP_MIN_386();
5678 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5679}
5680
5681
5682/** Opcode 0x0f 0xae mem/0. */
5683FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5684{
5685 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5686 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5687 return IEMOP_RAISE_INVALID_OPCODE();
5688
5689 IEM_MC_BEGIN(3, 1);
5690 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5691 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5692 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5695 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5696 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5697 IEM_MC_END();
5698 return VINF_SUCCESS;
5699}
5700
5701
5702/** Opcode 0x0f 0xae mem/1. */
5703FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5704{
5705 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5706 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5707 return IEMOP_RAISE_INVALID_OPCODE();
5708
5709 IEM_MC_BEGIN(3, 1);
5710 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5711 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5712 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5715 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5716 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5717 IEM_MC_END();
5718 return VINF_SUCCESS;
5719}
5720
5721
5722/** Opcode 0x0f 0xae mem/2. */
5723FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5724
5725/** Opcode 0x0f 0xae mem/3. */
5726FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5727
5728/** Opcode 0x0f 0xae mem/4. */
5729FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5730
5731/** Opcode 0x0f 0xae mem/5. */
5732FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5733
5734/** Opcode 0x0f 0xae mem/6. */
5735FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5736
5737/** Opcode 0x0f 0xae mem/7. */
5738FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5739
5740
5741/** Opcode 0x0f 0xae 11b/5. */
5742FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5743{
5744 RT_NOREF_PV(bRm);
5745 IEMOP_MNEMONIC(lfence, "lfence");
5746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5747 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5748 return IEMOP_RAISE_INVALID_OPCODE();
5749
5750 IEM_MC_BEGIN(0, 0);
5751 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5752 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5753 else
5754 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5755 IEM_MC_ADVANCE_RIP();
5756 IEM_MC_END();
5757 return VINF_SUCCESS;
5758}
5759
5760
5761/** Opcode 0x0f 0xae 11b/6. */
5762FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5763{
5764 RT_NOREF_PV(bRm);
5765 IEMOP_MNEMONIC(mfence, "mfence");
5766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5767 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5768 return IEMOP_RAISE_INVALID_OPCODE();
5769
5770 IEM_MC_BEGIN(0, 0);
5771 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5772 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5773 else
5774 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5775 IEM_MC_ADVANCE_RIP();
5776 IEM_MC_END();
5777 return VINF_SUCCESS;
5778}
5779
5780
5781/** Opcode 0x0f 0xae 11b/7. */
5782FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5783{
5784 RT_NOREF_PV(bRm);
5785 IEMOP_MNEMONIC(sfence, "sfence");
5786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5787 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5788 return IEMOP_RAISE_INVALID_OPCODE();
5789
5790 IEM_MC_BEGIN(0, 0);
5791 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5792 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5793 else
5794 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5795 IEM_MC_ADVANCE_RIP();
5796 IEM_MC_END();
5797 return VINF_SUCCESS;
5798}
5799
5800
5801/** Opcode 0xf3 0x0f 0xae 11b/0. */
5802FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5803
5804/** Opcode 0xf3 0x0f 0xae 11b/1. */
5805FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5806
5807/** Opcode 0xf3 0x0f 0xae 11b/2. */
5808FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5809
5810/** Opcode 0xf3 0x0f 0xae 11b/3. */
5811FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5812
5813
5814/** Opcode 0x0f 0xae. */
5815FNIEMOP_DEF(iemOp_Grp15)
5816{
5817 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5819 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5820 {
5821 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5822 {
5823 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5824 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5825 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5826 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5827 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5828 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5829 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5830 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5832 }
5833 }
5834 else
5835 {
5836 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5837 {
5838 case 0:
5839 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5840 {
5841 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5842 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5843 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5844 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5845 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5846 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5847 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5848 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5850 }
5851 break;
5852
5853 case IEM_OP_PRF_REPZ:
5854 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5855 {
5856 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5857 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5858 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5859 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5860 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5861 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5862 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5863 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5865 }
5866 break;
5867
5868 default:
5869 return IEMOP_RAISE_INVALID_OPCODE();
5870 }
5871 }
5872}
5873
5874
5875/** Opcode 0x0f 0xaf. */
5876FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5877{
5878 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5879 IEMOP_HLP_MIN_386();
5880 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5881 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5882}
5883
5884
5885/** Opcode 0x0f 0xb0. */
5886FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5887{
5888 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5889 IEMOP_HLP_MIN_486();
5890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5891
5892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5893 {
5894 IEMOP_HLP_DONE_DECODING();
5895 IEM_MC_BEGIN(4, 0);
5896 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5897 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5898 IEM_MC_ARG(uint8_t, u8Src, 2);
5899 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5900
5901 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5902 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5903 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5904 IEM_MC_REF_EFLAGS(pEFlags);
5905 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5906 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5907 else
5908 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5909
5910 IEM_MC_ADVANCE_RIP();
5911 IEM_MC_END();
5912 }
5913 else
5914 {
5915 IEM_MC_BEGIN(4, 3);
5916 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5917 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5918 IEM_MC_ARG(uint8_t, u8Src, 2);
5919 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5921 IEM_MC_LOCAL(uint8_t, u8Al);
5922
5923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5924 IEMOP_HLP_DONE_DECODING();
5925 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5926 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5927 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5928 IEM_MC_FETCH_EFLAGS(EFlags);
5929 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5930 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5931 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5932 else
5933 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5934
5935 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5936 IEM_MC_COMMIT_EFLAGS(EFlags);
5937 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5938 IEM_MC_ADVANCE_RIP();
5939 IEM_MC_END();
5940 }
5941 return VINF_SUCCESS;
5942}
5943
5944/** Opcode 0x0f 0xb1. */
5945FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5946{
5947 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5948 IEMOP_HLP_MIN_486();
5949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5950
5951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5952 {
5953 IEMOP_HLP_DONE_DECODING();
5954 switch (pVCpu->iem.s.enmEffOpSize)
5955 {
5956 case IEMMODE_16BIT:
5957 IEM_MC_BEGIN(4, 0);
5958 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5959 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5960 IEM_MC_ARG(uint16_t, u16Src, 2);
5961 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5962
5963 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5964 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5965 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5966 IEM_MC_REF_EFLAGS(pEFlags);
5967 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5968 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5969 else
5970 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5971
5972 IEM_MC_ADVANCE_RIP();
5973 IEM_MC_END();
5974 return VINF_SUCCESS;
5975
5976 case IEMMODE_32BIT:
5977 IEM_MC_BEGIN(4, 0);
5978 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5979 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5980 IEM_MC_ARG(uint32_t, u32Src, 2);
5981 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5982
5983 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5984 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5985 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5986 IEM_MC_REF_EFLAGS(pEFlags);
5987 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5988 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5989 else
5990 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5991
5992 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5993 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5994 IEM_MC_ADVANCE_RIP();
5995 IEM_MC_END();
5996 return VINF_SUCCESS;
5997
5998 case IEMMODE_64BIT:
5999 IEM_MC_BEGIN(4, 0);
6000 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6001 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6002#ifdef RT_ARCH_X86
6003 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6004#else
6005 IEM_MC_ARG(uint64_t, u64Src, 2);
6006#endif
6007 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6008
6009 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6010 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6011 IEM_MC_REF_EFLAGS(pEFlags);
6012#ifdef RT_ARCH_X86
6013 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6014 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6015 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6016 else
6017 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6018#else
6019 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6020 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6021 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6022 else
6023 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6024#endif
6025
6026 IEM_MC_ADVANCE_RIP();
6027 IEM_MC_END();
6028 return VINF_SUCCESS;
6029
6030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6031 }
6032 }
6033 else
6034 {
6035 switch (pVCpu->iem.s.enmEffOpSize)
6036 {
6037 case IEMMODE_16BIT:
6038 IEM_MC_BEGIN(4, 3);
6039 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6040 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6041 IEM_MC_ARG(uint16_t, u16Src, 2);
6042 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6044 IEM_MC_LOCAL(uint16_t, u16Ax);
6045
6046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6047 IEMOP_HLP_DONE_DECODING();
6048 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6049 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6050 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6051 IEM_MC_FETCH_EFLAGS(EFlags);
6052 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6053 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6054 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6055 else
6056 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6057
6058 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6059 IEM_MC_COMMIT_EFLAGS(EFlags);
6060 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6061 IEM_MC_ADVANCE_RIP();
6062 IEM_MC_END();
6063 return VINF_SUCCESS;
6064
6065 case IEMMODE_32BIT:
6066 IEM_MC_BEGIN(4, 3);
6067 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6068 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6069 IEM_MC_ARG(uint32_t, u32Src, 2);
6070 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6072 IEM_MC_LOCAL(uint32_t, u32Eax);
6073
6074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6075 IEMOP_HLP_DONE_DECODING();
6076 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6077 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6078 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6079 IEM_MC_FETCH_EFLAGS(EFlags);
6080 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6081 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6082 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6083 else
6084 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6085
6086 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6087 IEM_MC_COMMIT_EFLAGS(EFlags);
6088 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6089 IEM_MC_ADVANCE_RIP();
6090 IEM_MC_END();
6091 return VINF_SUCCESS;
6092
6093 case IEMMODE_64BIT:
6094 IEM_MC_BEGIN(4, 3);
6095 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6096 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6097#ifdef RT_ARCH_X86
6098 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6099#else
6100 IEM_MC_ARG(uint64_t, u64Src, 2);
6101#endif
6102 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6104 IEM_MC_LOCAL(uint64_t, u64Rax);
6105
6106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6107 IEMOP_HLP_DONE_DECODING();
6108 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6109 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6110 IEM_MC_FETCH_EFLAGS(EFlags);
6111 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6112#ifdef RT_ARCH_X86
6113 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6114 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6115 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6116 else
6117 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6118#else
6119 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6120 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6121 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6122 else
6123 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6124#endif
6125
6126 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6127 IEM_MC_COMMIT_EFLAGS(EFlags);
6128 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6129 IEM_MC_ADVANCE_RIP();
6130 IEM_MC_END();
6131 return VINF_SUCCESS;
6132
6133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6134 }
6135 }
6136}
6137
6138
6139FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6140{
6141 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6142 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6143
6144 switch (pVCpu->iem.s.enmEffOpSize)
6145 {
6146 case IEMMODE_16BIT:
6147 IEM_MC_BEGIN(5, 1);
6148 IEM_MC_ARG(uint16_t, uSel, 0);
6149 IEM_MC_ARG(uint16_t, offSeg, 1);
6150 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6151 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6152 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6153 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6156 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6157 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6158 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6159 IEM_MC_END();
6160 return VINF_SUCCESS;
6161
6162 case IEMMODE_32BIT:
6163 IEM_MC_BEGIN(5, 1);
6164 IEM_MC_ARG(uint16_t, uSel, 0);
6165 IEM_MC_ARG(uint32_t, offSeg, 1);
6166 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6167 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6168 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6169 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6172 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6173 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6174 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6175 IEM_MC_END();
6176 return VINF_SUCCESS;
6177
6178 case IEMMODE_64BIT:
6179 IEM_MC_BEGIN(5, 1);
6180 IEM_MC_ARG(uint16_t, uSel, 0);
6181 IEM_MC_ARG(uint64_t, offSeg, 1);
6182 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6183 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6184 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6185 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6188 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6189 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6190 else
6191 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6192 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6193 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6194 IEM_MC_END();
6195 return VINF_SUCCESS;
6196
6197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6198 }
6199}
6200
6201
6202/** Opcode 0x0f 0xb2. */
6203FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6204{
6205 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6206 IEMOP_HLP_MIN_386();
6207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6209 return IEMOP_RAISE_INVALID_OPCODE();
6210 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6211}
6212
6213
6214/** Opcode 0x0f 0xb3. */
6215FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6216{
6217 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6218 IEMOP_HLP_MIN_386();
6219 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6220}
6221
6222
6223/** Opcode 0x0f 0xb4. */
6224FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6225{
6226 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6227 IEMOP_HLP_MIN_386();
6228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6230 return IEMOP_RAISE_INVALID_OPCODE();
6231 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6232}
6233
6234
6235/** Opcode 0x0f 0xb5. */
6236FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6237{
6238 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6239 IEMOP_HLP_MIN_386();
6240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6242 return IEMOP_RAISE_INVALID_OPCODE();
6243 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6244}
6245
6246
6247/** Opcode 0x0f 0xb6. */
6248FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6249{
6250 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6251 IEMOP_HLP_MIN_386();
6252
6253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6254
6255 /*
6256 * If rm is denoting a register, no more instruction bytes.
6257 */
6258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6259 {
6260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6261 switch (pVCpu->iem.s.enmEffOpSize)
6262 {
6263 case IEMMODE_16BIT:
6264 IEM_MC_BEGIN(0, 1);
6265 IEM_MC_LOCAL(uint16_t, u16Value);
6266 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6267 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6268 IEM_MC_ADVANCE_RIP();
6269 IEM_MC_END();
6270 return VINF_SUCCESS;
6271
6272 case IEMMODE_32BIT:
6273 IEM_MC_BEGIN(0, 1);
6274 IEM_MC_LOCAL(uint32_t, u32Value);
6275 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6276 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6277 IEM_MC_ADVANCE_RIP();
6278 IEM_MC_END();
6279 return VINF_SUCCESS;
6280
6281 case IEMMODE_64BIT:
6282 IEM_MC_BEGIN(0, 1);
6283 IEM_MC_LOCAL(uint64_t, u64Value);
6284 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6285 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6286 IEM_MC_ADVANCE_RIP();
6287 IEM_MC_END();
6288 return VINF_SUCCESS;
6289
6290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6291 }
6292 }
6293 else
6294 {
6295 /*
6296 * We're loading a register from memory.
6297 */
6298 switch (pVCpu->iem.s.enmEffOpSize)
6299 {
6300 case IEMMODE_16BIT:
6301 IEM_MC_BEGIN(0, 2);
6302 IEM_MC_LOCAL(uint16_t, u16Value);
6303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6306 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6307 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6308 IEM_MC_ADVANCE_RIP();
6309 IEM_MC_END();
6310 return VINF_SUCCESS;
6311
6312 case IEMMODE_32BIT:
6313 IEM_MC_BEGIN(0, 2);
6314 IEM_MC_LOCAL(uint32_t, u32Value);
6315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6318 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6319 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6320 IEM_MC_ADVANCE_RIP();
6321 IEM_MC_END();
6322 return VINF_SUCCESS;
6323
6324 case IEMMODE_64BIT:
6325 IEM_MC_BEGIN(0, 2);
6326 IEM_MC_LOCAL(uint64_t, u64Value);
6327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6330 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6331 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6332 IEM_MC_ADVANCE_RIP();
6333 IEM_MC_END();
6334 return VINF_SUCCESS;
6335
6336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6337 }
6338 }
6339}
6340
6341
6342/** Opcode 0x0f 0xb7. */
6343FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6344{
6345 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6346 IEMOP_HLP_MIN_386();
6347
6348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6349
6350 /** @todo Not entirely sure how the operand size prefix is handled here,
6351 * assuming that it will be ignored. Would be nice to have a few
6352 * test for this. */
6353 /*
6354 * If rm is denoting a register, no more instruction bytes.
6355 */
6356 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6357 {
6358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6359 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6360 {
6361 IEM_MC_BEGIN(0, 1);
6362 IEM_MC_LOCAL(uint32_t, u32Value);
6363 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6364 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6365 IEM_MC_ADVANCE_RIP();
6366 IEM_MC_END();
6367 }
6368 else
6369 {
6370 IEM_MC_BEGIN(0, 1);
6371 IEM_MC_LOCAL(uint64_t, u64Value);
6372 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6373 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6374 IEM_MC_ADVANCE_RIP();
6375 IEM_MC_END();
6376 }
6377 }
6378 else
6379 {
6380 /*
6381 * We're loading a register from memory.
6382 */
6383 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6384 {
6385 IEM_MC_BEGIN(0, 2);
6386 IEM_MC_LOCAL(uint32_t, u32Value);
6387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6390 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6391 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6392 IEM_MC_ADVANCE_RIP();
6393 IEM_MC_END();
6394 }
6395 else
6396 {
6397 IEM_MC_BEGIN(0, 2);
6398 IEM_MC_LOCAL(uint64_t, u64Value);
6399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6402 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6403 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6404 IEM_MC_ADVANCE_RIP();
6405 IEM_MC_END();
6406 }
6407 }
6408 return VINF_SUCCESS;
6409}
6410
6411
6412/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6413FNIEMOP_UD_STUB(iemOp_jmpe);
6414/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6415FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6416
6417
6418/** Opcode 0x0f 0xb9. */
6419FNIEMOP_DEF(iemOp_Grp10)
6420{
6421 Log(("iemOp_Grp10 -> #UD\n"));
6422 return IEMOP_RAISE_INVALID_OPCODE();
6423}
6424
6425
6426/** Opcode 0x0f 0xba. */
6427FNIEMOP_DEF(iemOp_Grp8)
6428{
6429 IEMOP_HLP_MIN_386();
6430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6431 PCIEMOPBINSIZES pImpl;
6432 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6433 {
6434 case 0: case 1: case 2: case 3:
6435 return IEMOP_RAISE_INVALID_OPCODE();
6436 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6437 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6438 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6439 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6441 }
6442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6443
6444 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6445 {
6446 /* register destination. */
6447 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449
6450 switch (pVCpu->iem.s.enmEffOpSize)
6451 {
6452 case IEMMODE_16BIT:
6453 IEM_MC_BEGIN(3, 0);
6454 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6455 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6456 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6457
6458 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6459 IEM_MC_REF_EFLAGS(pEFlags);
6460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6461
6462 IEM_MC_ADVANCE_RIP();
6463 IEM_MC_END();
6464 return VINF_SUCCESS;
6465
6466 case IEMMODE_32BIT:
6467 IEM_MC_BEGIN(3, 0);
6468 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6469 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6470 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6471
6472 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6473 IEM_MC_REF_EFLAGS(pEFlags);
6474 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6475
6476 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6477 IEM_MC_ADVANCE_RIP();
6478 IEM_MC_END();
6479 return VINF_SUCCESS;
6480
6481 case IEMMODE_64BIT:
6482 IEM_MC_BEGIN(3, 0);
6483 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6484 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6485 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6486
6487 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6488 IEM_MC_REF_EFLAGS(pEFlags);
6489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6490
6491 IEM_MC_ADVANCE_RIP();
6492 IEM_MC_END();
6493 return VINF_SUCCESS;
6494
6495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6496 }
6497 }
6498 else
6499 {
6500 /* memory destination. */
6501
6502 uint32_t fAccess;
6503 if (pImpl->pfnLockedU16)
6504 fAccess = IEM_ACCESS_DATA_RW;
6505 else /* BT */
6506 fAccess = IEM_ACCESS_DATA_R;
6507
6508 /** @todo test negative bit offsets! */
6509 switch (pVCpu->iem.s.enmEffOpSize)
6510 {
6511 case IEMMODE_16BIT:
6512 IEM_MC_BEGIN(3, 1);
6513 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6514 IEM_MC_ARG(uint16_t, u16Src, 1);
6515 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6517
6518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6519 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6520 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6521 if (pImpl->pfnLockedU16)
6522 IEMOP_HLP_DONE_DECODING();
6523 else
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 IEM_MC_FETCH_EFLAGS(EFlags);
6526 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6527 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6528 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6529 else
6530 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6531 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6532
6533 IEM_MC_COMMIT_EFLAGS(EFlags);
6534 IEM_MC_ADVANCE_RIP();
6535 IEM_MC_END();
6536 return VINF_SUCCESS;
6537
6538 case IEMMODE_32BIT:
6539 IEM_MC_BEGIN(3, 1);
6540 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6541 IEM_MC_ARG(uint32_t, u32Src, 1);
6542 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6544
6545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6546 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6547 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6548 if (pImpl->pfnLockedU16)
6549 IEMOP_HLP_DONE_DECODING();
6550 else
6551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6552 IEM_MC_FETCH_EFLAGS(EFlags);
6553 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6554 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6556 else
6557 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6559
6560 IEM_MC_COMMIT_EFLAGS(EFlags);
6561 IEM_MC_ADVANCE_RIP();
6562 IEM_MC_END();
6563 return VINF_SUCCESS;
6564
6565 case IEMMODE_64BIT:
6566 IEM_MC_BEGIN(3, 1);
6567 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6568 IEM_MC_ARG(uint64_t, u64Src, 1);
6569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6571
6572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6573 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6574 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6575 if (pImpl->pfnLockedU16)
6576 IEMOP_HLP_DONE_DECODING();
6577 else
6578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6579 IEM_MC_FETCH_EFLAGS(EFlags);
6580 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6581 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6582 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6583 else
6584 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6585 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6586
6587 IEM_MC_COMMIT_EFLAGS(EFlags);
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6593 }
6594 }
6595
6596}
6597
6598
6599/** Opcode 0x0f 0xbb. */
6600FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6601{
6602 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6603 IEMOP_HLP_MIN_386();
6604 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6605}
6606
6607
6608/** Opcode 0x0f 0xbc. */
6609FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6610{
6611 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6612 IEMOP_HLP_MIN_386();
6613 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6614 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6615}
6616
6617
6618/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6619FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6620
6621
6622/** Opcode 0x0f 0xbd. */
6623FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6624{
6625 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6626 IEMOP_HLP_MIN_386();
6627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6628 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6629}
6630
6631
6632/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6633FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6634
6635
6636/** Opcode 0x0f 0xbe. */
6637FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6638{
6639 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6640 IEMOP_HLP_MIN_386();
6641
6642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6643
6644 /*
6645 * If rm is denoting a register, no more instruction bytes.
6646 */
6647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6648 {
6649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6650 switch (pVCpu->iem.s.enmEffOpSize)
6651 {
6652 case IEMMODE_16BIT:
6653 IEM_MC_BEGIN(0, 1);
6654 IEM_MC_LOCAL(uint16_t, u16Value);
6655 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6656 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6657 IEM_MC_ADVANCE_RIP();
6658 IEM_MC_END();
6659 return VINF_SUCCESS;
6660
6661 case IEMMODE_32BIT:
6662 IEM_MC_BEGIN(0, 1);
6663 IEM_MC_LOCAL(uint32_t, u32Value);
6664 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6665 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6666 IEM_MC_ADVANCE_RIP();
6667 IEM_MC_END();
6668 return VINF_SUCCESS;
6669
6670 case IEMMODE_64BIT:
6671 IEM_MC_BEGIN(0, 1);
6672 IEM_MC_LOCAL(uint64_t, u64Value);
6673 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6674 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6675 IEM_MC_ADVANCE_RIP();
6676 IEM_MC_END();
6677 return VINF_SUCCESS;
6678
6679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6680 }
6681 }
6682 else
6683 {
6684 /*
6685 * We're loading a register from memory.
6686 */
6687 switch (pVCpu->iem.s.enmEffOpSize)
6688 {
6689 case IEMMODE_16BIT:
6690 IEM_MC_BEGIN(0, 2);
6691 IEM_MC_LOCAL(uint16_t, u16Value);
6692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6695 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6696 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6697 IEM_MC_ADVANCE_RIP();
6698 IEM_MC_END();
6699 return VINF_SUCCESS;
6700
6701 case IEMMODE_32BIT:
6702 IEM_MC_BEGIN(0, 2);
6703 IEM_MC_LOCAL(uint32_t, u32Value);
6704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6707 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6708 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711 return VINF_SUCCESS;
6712
6713 case IEMMODE_64BIT:
6714 IEM_MC_BEGIN(0, 2);
6715 IEM_MC_LOCAL(uint64_t, u64Value);
6716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6719 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6720 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6721 IEM_MC_ADVANCE_RIP();
6722 IEM_MC_END();
6723 return VINF_SUCCESS;
6724
6725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6726 }
6727 }
6728}
6729
6730
6731/** Opcode 0x0f 0xbf. */
6732FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6733{
6734 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6735 IEMOP_HLP_MIN_386();
6736
6737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6738
6739 /** @todo Not entirely sure how the operand size prefix is handled here,
6740 * assuming that it will be ignored. Would be nice to have a few
6741 * test for this. */
6742 /*
6743 * If rm is denoting a register, no more instruction bytes.
6744 */
6745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6746 {
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6749 {
6750 IEM_MC_BEGIN(0, 1);
6751 IEM_MC_LOCAL(uint32_t, u32Value);
6752 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6753 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6754 IEM_MC_ADVANCE_RIP();
6755 IEM_MC_END();
6756 }
6757 else
6758 {
6759 IEM_MC_BEGIN(0, 1);
6760 IEM_MC_LOCAL(uint64_t, u64Value);
6761 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6762 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6763 IEM_MC_ADVANCE_RIP();
6764 IEM_MC_END();
6765 }
6766 }
6767 else
6768 {
6769 /*
6770 * We're loading a register from memory.
6771 */
6772 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6773 {
6774 IEM_MC_BEGIN(0, 2);
6775 IEM_MC_LOCAL(uint32_t, u32Value);
6776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6779 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6780 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6781 IEM_MC_ADVANCE_RIP();
6782 IEM_MC_END();
6783 }
6784 else
6785 {
6786 IEM_MC_BEGIN(0, 2);
6787 IEM_MC_LOCAL(uint64_t, u64Value);
6788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6791 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6792 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6793 IEM_MC_ADVANCE_RIP();
6794 IEM_MC_END();
6795 }
6796 }
6797 return VINF_SUCCESS;
6798}
6799
6800
6801/** Opcode 0x0f 0xc0. */
6802FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6803{
6804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6805 IEMOP_HLP_MIN_486();
6806 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6807
6808 /*
6809 * If rm is denoting a register, no more instruction bytes.
6810 */
6811 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6812 {
6813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6814
6815 IEM_MC_BEGIN(3, 0);
6816 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6817 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6818 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6819
6820 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6821 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6822 IEM_MC_REF_EFLAGS(pEFlags);
6823 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6824
6825 IEM_MC_ADVANCE_RIP();
6826 IEM_MC_END();
6827 }
6828 else
6829 {
6830 /*
6831 * We're accessing memory.
6832 */
6833 IEM_MC_BEGIN(3, 3);
6834 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6835 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6836 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6837 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6839
6840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6841 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6842 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6843 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6844 IEM_MC_FETCH_EFLAGS(EFlags);
6845 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6846 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6847 else
6848 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6849
6850 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6851 IEM_MC_COMMIT_EFLAGS(EFlags);
6852 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6853 IEM_MC_ADVANCE_RIP();
6854 IEM_MC_END();
6855 return VINF_SUCCESS;
6856 }
6857 return VINF_SUCCESS;
6858}
6859
6860
6861/** Opcode 0x0f 0xc1. */
6862FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6863{
6864 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6865 IEMOP_HLP_MIN_486();
6866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6867
6868 /*
6869 * If rm is denoting a register, no more instruction bytes.
6870 */
6871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6872 {
6873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6874
6875 switch (pVCpu->iem.s.enmEffOpSize)
6876 {
6877 case IEMMODE_16BIT:
6878 IEM_MC_BEGIN(3, 0);
6879 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6880 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6881 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6882
6883 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6884 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6885 IEM_MC_REF_EFLAGS(pEFlags);
6886 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6887
6888 IEM_MC_ADVANCE_RIP();
6889 IEM_MC_END();
6890 return VINF_SUCCESS;
6891
6892 case IEMMODE_32BIT:
6893 IEM_MC_BEGIN(3, 0);
6894 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6895 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6896 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6897
6898 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6899 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6900 IEM_MC_REF_EFLAGS(pEFlags);
6901 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6902
6903 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6904 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6905 IEM_MC_ADVANCE_RIP();
6906 IEM_MC_END();
6907 return VINF_SUCCESS;
6908
6909 case IEMMODE_64BIT:
6910 IEM_MC_BEGIN(3, 0);
6911 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6912 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6913 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6914
6915 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6916 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6917 IEM_MC_REF_EFLAGS(pEFlags);
6918 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6919
6920 IEM_MC_ADVANCE_RIP();
6921 IEM_MC_END();
6922 return VINF_SUCCESS;
6923
6924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6925 }
6926 }
6927 else
6928 {
6929 /*
6930 * We're accessing memory.
6931 */
6932 switch (pVCpu->iem.s.enmEffOpSize)
6933 {
6934 case IEMMODE_16BIT:
6935 IEM_MC_BEGIN(3, 3);
6936 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6937 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6938 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6939 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6941
6942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6943 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6944 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6945 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6946 IEM_MC_FETCH_EFLAGS(EFlags);
6947 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6948 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6949 else
6950 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6951
6952 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6953 IEM_MC_COMMIT_EFLAGS(EFlags);
6954 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6955 IEM_MC_ADVANCE_RIP();
6956 IEM_MC_END();
6957 return VINF_SUCCESS;
6958
6959 case IEMMODE_32BIT:
6960 IEM_MC_BEGIN(3, 3);
6961 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6962 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6963 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6964 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6966
6967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6968 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6969 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6970 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6971 IEM_MC_FETCH_EFLAGS(EFlags);
6972 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6973 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6974 else
6975 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6976
6977 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6978 IEM_MC_COMMIT_EFLAGS(EFlags);
6979 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6980 IEM_MC_ADVANCE_RIP();
6981 IEM_MC_END();
6982 return VINF_SUCCESS;
6983
6984 case IEMMODE_64BIT:
6985 IEM_MC_BEGIN(3, 3);
6986 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6987 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6988 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6989 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6991
6992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6993 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6994 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6995 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6996 IEM_MC_FETCH_EFLAGS(EFlags);
6997 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6998 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6999 else
7000 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7001
7002 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7003 IEM_MC_COMMIT_EFLAGS(EFlags);
7004 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7005 IEM_MC_ADVANCE_RIP();
7006 IEM_MC_END();
7007 return VINF_SUCCESS;
7008
7009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7010 }
7011 }
7012}
7013
7014
7015/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7016FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7017/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7018FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7019/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7020FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7021/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7022FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7023
7024
7025/** Opcode 0x0f 0xc3. */
7026FNIEMOP_DEF(iemOp_movnti_My_Gy)
7027{
7028 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7029
7030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7031
7032 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7033 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7034 {
7035 switch (pVCpu->iem.s.enmEffOpSize)
7036 {
7037 case IEMMODE_32BIT:
7038 IEM_MC_BEGIN(0, 2);
7039 IEM_MC_LOCAL(uint32_t, u32Value);
7040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7041
7042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7044 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7045 return IEMOP_RAISE_INVALID_OPCODE();
7046
7047 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7048 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7049 IEM_MC_ADVANCE_RIP();
7050 IEM_MC_END();
7051 break;
7052
7053 case IEMMODE_64BIT:
7054 IEM_MC_BEGIN(0, 2);
7055 IEM_MC_LOCAL(uint64_t, u64Value);
7056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7057
7058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7060 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7061 return IEMOP_RAISE_INVALID_OPCODE();
7062
7063 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7064 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7065 IEM_MC_ADVANCE_RIP();
7066 IEM_MC_END();
7067 break;
7068
7069 case IEMMODE_16BIT:
7070 /** @todo check this form. */
7071 return IEMOP_RAISE_INVALID_OPCODE();
7072 }
7073 }
7074 else
7075 return IEMOP_RAISE_INVALID_OPCODE();
7076 return VINF_SUCCESS;
7077}
7078/* Opcode 0x66 0x0f 0xc3 - invalid */
7079/* Opcode 0xf3 0x0f 0xc3 - invalid */
7080/* Opcode 0xf2 0x0f 0xc3 - invalid */
7081
7082/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7083FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7084/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7085FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7086/* Opcode 0xf3 0x0f 0xc4 - invalid */
7087/* Opcode 0xf2 0x0f 0xc4 - invalid */
7088
7089/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7090FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7091/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7092FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7093/* Opcode 0xf3 0x0f 0xc5 - invalid */
7094/* Opcode 0xf2 0x0f 0xc5 - invalid */
7095
7096/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7097FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7098/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7099FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7100/* Opcode 0xf3 0x0f 0xc6 - invalid */
7101/* Opcode 0xf2 0x0f 0xc6 - invalid */
7102
7103
7104/** Opcode 0x0f 0xc7 !11/1. */
7105FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7106{
7107 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7108
7109 IEM_MC_BEGIN(4, 3);
7110 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7111 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7112 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7113 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7114 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7115 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7117
7118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7119 IEMOP_HLP_DONE_DECODING();
7120 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7121
7122 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7123 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7124 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7125
7126 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7127 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7128 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7129
7130 IEM_MC_FETCH_EFLAGS(EFlags);
7131 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7132 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7133 else
7134 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7135
7136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7137 IEM_MC_COMMIT_EFLAGS(EFlags);
7138 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7139 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7140 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7141 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7142 IEM_MC_ENDIF();
7143 IEM_MC_ADVANCE_RIP();
7144
7145 IEM_MC_END();
7146 return VINF_SUCCESS;
7147}
7148
7149
7150/** Opcode REX.W 0x0f 0xc7 !11/1. */
7151FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7152{
7153 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7154 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7155 {
7156#if 0
7157 RT_NOREF(bRm);
7158 IEMOP_BITCH_ABOUT_STUB();
7159 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7160#else
7161 IEM_MC_BEGIN(4, 3);
7162 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7163 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7164 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7165 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7166 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7167 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7169
7170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7171 IEMOP_HLP_DONE_DECODING();
7172 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7173 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7174
7175 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7176 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7177 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7178
7179 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7180 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7181 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7182
7183 IEM_MC_FETCH_EFLAGS(EFlags);
7184# ifdef RT_ARCH_AMD64
7185 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7186 {
7187 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7188 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7189 else
7190 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7191 }
7192 else
7193# endif
7194 {
7195 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7196 accesses and not all all atomic, which works fine on in UNI CPU guest
7197 configuration (ignoring DMA). If guest SMP is active we have no choice
7198 but to use a rendezvous callback here. Sigh. */
7199 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7200 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7201 else
7202 {
7203 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7204 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7205 }
7206 }
7207
7208 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7209 IEM_MC_COMMIT_EFLAGS(EFlags);
7210 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7211 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7212 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7213 IEM_MC_ENDIF();
7214 IEM_MC_ADVANCE_RIP();
7215
7216 IEM_MC_END();
7217 return VINF_SUCCESS;
7218#endif
7219 }
7220 Log(("cmpxchg16b -> #UD\n"));
7221 return IEMOP_RAISE_INVALID_OPCODE();
7222}
7223
7224
7225/** Opcode 0x0f 0xc7 11/6. */
7226FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7227
7228/** Opcode 0x0f 0xc7 !11/6. */
7229FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7230
7231/** Opcode 0x66 0x0f 0xc7 !11/6. */
7232FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7233
7234/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7235FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7236
7237/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7238FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7239
7240
7241/** Opcode 0x0f 0xc7. */
7242FNIEMOP_DEF(iemOp_Grp9)
7243{
7244 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7246 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7247 {
7248 case 0: case 2: case 3: case 4: case 5:
7249 return IEMOP_RAISE_INVALID_OPCODE();
7250 case 1:
7251 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7252 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7253 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7254 return IEMOP_RAISE_INVALID_OPCODE();
7255 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7256 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7257 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7258 case 6:
7259 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7260 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7261 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7262 {
7263 case 0:
7264 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7265 case IEM_OP_PRF_SIZE_OP:
7266 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7267 case IEM_OP_PRF_REPZ:
7268 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7269 default:
7270 return IEMOP_RAISE_INVALID_OPCODE();
7271 }
7272 case 7:
7273 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7274 {
7275 case 0:
7276 case IEM_OP_PRF_REPZ:
7277 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7278 default:
7279 return IEMOP_RAISE_INVALID_OPCODE();
7280 }
7281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7282 }
7283}
7284
7285
7286/**
7287 * Common 'bswap register' helper.
7288 */
7289FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7290{
7291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7292 switch (pVCpu->iem.s.enmEffOpSize)
7293 {
7294 case IEMMODE_16BIT:
7295 IEM_MC_BEGIN(1, 0);
7296 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7297 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7298 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7299 IEM_MC_ADVANCE_RIP();
7300 IEM_MC_END();
7301 return VINF_SUCCESS;
7302
7303 case IEMMODE_32BIT:
7304 IEM_MC_BEGIN(1, 0);
7305 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7306 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7307 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7308 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7309 IEM_MC_ADVANCE_RIP();
7310 IEM_MC_END();
7311 return VINF_SUCCESS;
7312
7313 case IEMMODE_64BIT:
7314 IEM_MC_BEGIN(1, 0);
7315 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7316 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7317 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7318 IEM_MC_ADVANCE_RIP();
7319 IEM_MC_END();
7320 return VINF_SUCCESS;
7321
7322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7323 }
7324}
7325
7326
7327/** Opcode 0x0f 0xc8. */
7328FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7329{
7330 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7331 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7332 prefix. REX.B is the correct prefix it appears. For a parallel
7333 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7334 IEMOP_HLP_MIN_486();
7335 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7336}
7337
7338
7339/** Opcode 0x0f 0xc9. */
7340FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7341{
7342 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7343 IEMOP_HLP_MIN_486();
7344 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7345}
7346
7347
7348/** Opcode 0x0f 0xca. */
7349FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7350{
7351 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7352 IEMOP_HLP_MIN_486();
7353 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7354}
7355
7356
7357/** Opcode 0x0f 0xcb. */
7358FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7359{
7360 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7361 IEMOP_HLP_MIN_486();
7362 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7363}
7364
7365
7366/** Opcode 0x0f 0xcc. */
7367FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7368{
7369 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7370 IEMOP_HLP_MIN_486();
7371 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7372}
7373
7374
7375/** Opcode 0x0f 0xcd. */
7376FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7377{
7378 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7379 IEMOP_HLP_MIN_486();
7380 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7381}
7382
7383
7384/** Opcode 0x0f 0xce. */
7385FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7386{
7387 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7388 IEMOP_HLP_MIN_486();
7389 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7390}
7391
7392
7393/** Opcode 0x0f 0xcf. */
7394FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7395{
7396 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7397 IEMOP_HLP_MIN_486();
7398 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7399}
7400
7401
7402/* Opcode 0x0f 0xd0 - invalid */
7403/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7404FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7405/* Opcode 0xf3 0x0f 0xd0 - invalid */
7406/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7407FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7408
7409/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7410FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7411/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7412FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7413/* Opcode 0xf3 0x0f 0xd1 - invalid */
7414/* Opcode 0xf2 0x0f 0xd1 - invalid */
7415
7416/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7417FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7418/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7419FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7420/* Opcode 0xf3 0x0f 0xd2 - invalid */
7421/* Opcode 0xf2 0x0f 0xd2 - invalid */
7422
7423/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7424FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7425/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7426FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7427/* Opcode 0xf3 0x0f 0xd3 - invalid */
7428/* Opcode 0xf2 0x0f 0xd3 - invalid */
7429
7430/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7431FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7432/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7433FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7434/* Opcode 0xf3 0x0f 0xd4 - invalid */
7435/* Opcode 0xf2 0x0f 0xd4 - invalid */
7436
7437/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7438FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7439/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7440FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7441/* Opcode 0xf3 0x0f 0xd5 - invalid */
7442/* Opcode 0xf2 0x0f 0xd5 - invalid */
7443
7444/* Opcode 0x0f 0xd6 - invalid */
7445/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7446FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7447/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7448FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7449/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7450FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7451#if 0
7452FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7453{
7454 /* Docs says register only. */
7455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7456
7457 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7458 {
7459 case IEM_OP_PRF_SIZE_OP: /* SSE */
7460 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7461 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7462 IEM_MC_BEGIN(2, 0);
7463 IEM_MC_ARG(uint64_t *, pDst, 0);
7464 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7465 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7466 IEM_MC_PREPARE_SSE_USAGE();
7467 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7468 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7469 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7470 IEM_MC_ADVANCE_RIP();
7471 IEM_MC_END();
7472 return VINF_SUCCESS;
7473
7474 case 0: /* MMX */
7475 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7476 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7477 IEM_MC_BEGIN(2, 0);
7478 IEM_MC_ARG(uint64_t *, pDst, 0);
7479 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7480 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7481 IEM_MC_PREPARE_FPU_USAGE();
7482 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7483 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7484 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7485 IEM_MC_ADVANCE_RIP();
7486 IEM_MC_END();
7487 return VINF_SUCCESS;
7488
7489 default:
7490 return IEMOP_RAISE_INVALID_OPCODE();
7491 }
7492}
7493#endif
7494
7495
7496/** Opcode 0x0f 0xd7. */
7497FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7498{
7499 /* Docs says register only. */
7500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7501 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7502 return IEMOP_RAISE_INVALID_OPCODE();
7503
7504 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7505 /** @todo testcase: Check that the instruction implicitly clears the high
7506 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7507 * and opcode modifications are made to work with the whole width (not
7508 * just 128). */
7509 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7510 {
7511 case IEM_OP_PRF_SIZE_OP: /* SSE */
7512 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7513 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7514 IEM_MC_BEGIN(2, 0);
7515 IEM_MC_ARG(uint64_t *, pDst, 0);
7516 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7517 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7518 IEM_MC_PREPARE_SSE_USAGE();
7519 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7520 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7521 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7522 IEM_MC_ADVANCE_RIP();
7523 IEM_MC_END();
7524 return VINF_SUCCESS;
7525
7526 case 0: /* MMX */
7527 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7528 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7529 IEM_MC_BEGIN(2, 0);
7530 IEM_MC_ARG(uint64_t *, pDst, 0);
7531 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7532 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7533 IEM_MC_PREPARE_FPU_USAGE();
7534 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7535 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7536 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7537 IEM_MC_ADVANCE_RIP();
7538 IEM_MC_END();
7539 return VINF_SUCCESS;
7540
7541 default:
7542 return IEMOP_RAISE_INVALID_OPCODE();
7543 }
7544}
7545
7546
7547/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7548FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7549/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7550FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7551/* Opcode 0xf3 0x0f 0xd8 - invalid */
7552/* Opcode 0xf2 0x0f 0xd8 - invalid */
7553
7554/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7555FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7556/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7557FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7558/* Opcode 0xf3 0x0f 0xd9 - invalid */
7559/* Opcode 0xf2 0x0f 0xd9 - invalid */
7560
7561/** Opcode 0x0f 0xda - pminub Pq, Qq */
7562FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7563/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7564FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7565/* Opcode 0xf3 0x0f 0xda - invalid */
7566/* Opcode 0xf2 0x0f 0xda - invalid */
7567
7568/** Opcode 0x0f 0xdb - pand Pq, Qq */
7569FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7570/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7571FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7572/* Opcode 0xf3 0x0f 0xdb - invalid */
7573/* Opcode 0xf2 0x0f 0xdb - invalid */
7574
7575/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7576FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7577/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7578FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7579/* Opcode 0xf3 0x0f 0xdc - invalid */
7580/* Opcode 0xf2 0x0f 0xdc - invalid */
7581
7582/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7583FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7584/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7585FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7586/* Opcode 0xf3 0x0f 0xdd - invalid */
7587/* Opcode 0xf2 0x0f 0xdd - invalid */
7588
7589/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7590FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7591/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7592FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7593/* Opcode 0xf3 0x0f 0xde - invalid */
7594/* Opcode 0xf2 0x0f 0xde - invalid */
7595
7596/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7597FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7598/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7599FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7600/* Opcode 0xf3 0x0f 0xdf - invalid */
7601/* Opcode 0xf2 0x0f 0xdf - invalid */
7602
7603/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7604FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7605/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7606FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7607/* Opcode 0xf3 0x0f 0xe0 - invalid */
7608/* Opcode 0xf2 0x0f 0xe0 - invalid */
7609
7610/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7611FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7612/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7613FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7614/* Opcode 0xf3 0x0f 0xe1 - invalid */
7615/* Opcode 0xf2 0x0f 0xe1 - invalid */
7616
7617/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7618FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7619/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7620FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7621/* Opcode 0xf3 0x0f 0xe2 - invalid */
7622/* Opcode 0xf2 0x0f 0xe2 - invalid */
7623
7624/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7625FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7626/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7627FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7628/* Opcode 0xf3 0x0f 0xe3 - invalid */
7629/* Opcode 0xf2 0x0f 0xe3 - invalid */
7630
7631/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7632FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7633/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7634FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7635/* Opcode 0xf3 0x0f 0xe4 - invalid */
7636/* Opcode 0xf2 0x0f 0xe4 - invalid */
7637
7638/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7639FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7640/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7641FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7642/* Opcode 0xf3 0x0f 0xe5 - invalid */
7643/* Opcode 0xf2 0x0f 0xe5 - invalid */
7644
7645/* Opcode 0x0f 0xe6 - invalid */
7646/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7647FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7648/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7649FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7650/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7651FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7652
7653
7654/** Opcode 0x0f 0xe7. */
7655FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7656{
7657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7658 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7659 {
7660 /*
7661 * Register, memory.
7662 */
7663/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7664 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7665 {
7666
7667 case IEM_OP_PRF_SIZE_OP: /* SSE */
7668 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7669 IEM_MC_BEGIN(0, 2);
7670 IEM_MC_LOCAL(uint128_t, uSrc);
7671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7672
7673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7675 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7676 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7677
7678 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7679 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7680
7681 IEM_MC_ADVANCE_RIP();
7682 IEM_MC_END();
7683 break;
7684
7685 case 0: /* MMX */
7686 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7687 IEM_MC_BEGIN(0, 2);
7688 IEM_MC_LOCAL(uint64_t, uSrc);
7689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7690
7691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7693 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7694 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7695
7696 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7697 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7698
7699 IEM_MC_ADVANCE_RIP();
7700 IEM_MC_END();
7701 break;
7702
7703 default:
7704 return IEMOP_RAISE_INVALID_OPCODE();
7705 }
7706 }
7707 /* The register, register encoding is invalid. */
7708 else
7709 return IEMOP_RAISE_INVALID_OPCODE();
7710 return VINF_SUCCESS;
7711}
7712
7713
7714/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7715FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7716/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7717FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7718/* Opcode 0xf3 0x0f 0xe8 - invalid */
7719/* Opcode 0xf2 0x0f 0xe8 - invalid */
7720
7721/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7722FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7723/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7724FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7725/* Opcode 0xf3 0x0f 0xe9 - invalid */
7726/* Opcode 0xf2 0x0f 0xe9 - invalid */
7727
7728/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7729FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7730/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7731FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7732/* Opcode 0xf3 0x0f 0xea - invalid */
7733/* Opcode 0xf2 0x0f 0xea - invalid */
7734
7735/** Opcode 0x0f 0xeb - por Pq, Qq */
7736FNIEMOP_STUB(iemOp_por_Pq_Qq);
7737/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7738FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7739/* Opcode 0xf3 0x0f 0xeb - invalid */
7740/* Opcode 0xf2 0x0f 0xeb - invalid */
7741
7742/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7743FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7744/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7745FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7746/* Opcode 0xf3 0x0f 0xec - invalid */
7747/* Opcode 0xf2 0x0f 0xec - invalid */
7748
7749/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7750FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7751/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7752FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7753/* Opcode 0xf3 0x0f 0xed - invalid */
7754/* Opcode 0xf2 0x0f 0xed - invalid */
7755
7756/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7757FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7758/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7759FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7760/* Opcode 0xf3 0x0f 0xee - invalid */
7761/* Opcode 0xf2 0x0f 0xee - invalid */
7762
7763
7764/** Opcode 0x0f 0xef. */
7765FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7766{
7767 IEMOP_MNEMONIC(pxor, "pxor");
7768 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7769}
7770/* Opcode 0xf3 0x0f 0xef - invalid */
7771/* Opcode 0xf2 0x0f 0xef - invalid */
7772
7773/* Opcode 0x0f 0xf0 - invalid */
7774/* Opcode 0x66 0x0f 0xf0 - invalid */
7775/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7776FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7777
7778/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7779FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7780/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7781FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7782/* Opcode 0xf2 0x0f 0xf1 - invalid */
7783
7784/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7785FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7786/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7787FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7788/* Opcode 0xf2 0x0f 0xf2 - invalid */
7789
7790/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7791FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7792/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7793FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7794/* Opcode 0xf2 0x0f 0xf3 - invalid */
7795
7796/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7797FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7798/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7799FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7800/* Opcode 0xf2 0x0f 0xf4 - invalid */
7801
7802/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7803FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7804/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7805FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7806/* Opcode 0xf2 0x0f 0xf5 - invalid */
7807
7808/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7809FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7810/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7811FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7812/* Opcode 0xf2 0x0f 0xf6 - invalid */
7813
7814/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7815FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7816/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7817FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7818/* Opcode 0xf2 0x0f 0xf7 - invalid */
7819
7820/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7821FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7822/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7823FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7824/* Opcode 0xf2 0x0f 0xf8 - invalid */
7825
7826/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7827FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7828/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7829FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7830/* Opcode 0xf2 0x0f 0xf9 - invalid */
7831
7832/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7833FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7834/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7835FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7836/* Opcode 0xf2 0x0f 0xfa - invalid */
7837
7838/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7839FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7840/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7841FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7842/* Opcode 0xf2 0x0f 0xfb - invalid */
7843
7844/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7845FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7846/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7847FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7848/* Opcode 0xf2 0x0f 0xfc - invalid */
7849
7850/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7851FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7852/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7853FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7854/* Opcode 0xf2 0x0f 0xfd - invalid */
7855
7856/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7857FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7858/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7859FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7860/* Opcode 0xf2 0x0f 0xfe - invalid */
7861
7862
7863/** Opcode **** 0x0f 0xff - UD0 */
7864FNIEMOP_DEF(iemOp_ud0)
7865{
7866 IEMOP_MNEMONIC(ud0, "ud0");
7867 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7868 {
7869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7870#ifndef TST_IEM_CHECK_MC
7871 RTGCPTR GCPtrEff;
7872 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7873 if (rcStrict != VINF_SUCCESS)
7874 return rcStrict;
7875#endif
7876 IEMOP_HLP_DONE_DECODING();
7877 }
7878 return IEMOP_RAISE_INVALID_OPCODE();
7879}
7880
7881
7882
7883/** Repeats a_fn four times. For decoding tables. */
7884#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
7885
7886IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7887{
7888 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7889 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7890 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7891 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7892 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7893 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7894 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7895 /* 0x06 */ IEMOP_X4(iemOp_clts),
7896 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7897 /* 0x08 */ IEMOP_X4(iemOp_invd),
7898 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7899 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7900 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7901 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7902 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7903 /* 0x0e */ IEMOP_X4(iemOp_femms),
7904 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7905
7906 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7907 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7908 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7909 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq, iemOp_movlps_Mq_Vq__movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7910 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7911 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7912 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7913 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7914 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7915 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7916 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7917 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7918 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7919 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7920 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7921 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7922
7923 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7924 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7925 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7926 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7927 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7928 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7929 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
7930 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7931 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd, iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7932 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
7933 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7934 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7935 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7936 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7937 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7938 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7939
7940 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
7941 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
7942 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
7943 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
7944 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
7945 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
7946 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
7947 /* 0x37 */ IEMOP_X4(iemOp_getsec),
7948 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
7949 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7950 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
7951 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7952 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7953 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
7954 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7955 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
7956
7957 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
7958 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
7959 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
7960 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
7961 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
7962 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
7963 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
7964 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
7965 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
7966 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
7967 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
7968 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
7969 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
7970 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
7971 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
7972 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
7973
7974 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7975 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7976 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7977 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7978 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7979 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7980 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7981 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7982 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7983 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7984 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7985 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7986 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7987 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7988 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7989 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7990
7991 /* 0x60 */ IEMOP_X4(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq),
7992 /* 0x61 */ IEMOP_X4(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq),
7993 /* 0x62 */ IEMOP_X4(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq),
7994 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7995 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7996 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7997 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7998 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7999 /* 0x68 */ IEMOP_X4(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq),
8000 /* 0x69 */ IEMOP_X4(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq),
8001 /* 0x6a */ IEMOP_X4(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq),
8002 /* 0x6b */ IEMOP_X4(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq),
8003 /* 0x6c */ IEMOP_X4(iemOp_punpcklqdq_Vdq_Wdq),
8004 /* 0x6d */ IEMOP_X4(iemOp_punpckhqdq_Vdq_Wdq),
8005 /* 0x6e */ IEMOP_X4(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey),
8006 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),
8007
8008 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8009 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8010 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8011 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8012 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8013 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8014 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8015 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8016
8017 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8018 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8020 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8021 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8022 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8023 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8024 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8025
8026 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8027 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8028 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8029 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8030 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8031 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8032 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8033 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8034 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8035 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8036 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8037 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8038 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8039 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8040 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8041 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8042
8043 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8044 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8045 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8046 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8047 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8048 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8049 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8050 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8051 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8052 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8053 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8054 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8055 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8056 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8057 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8058 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8059
8060 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8061 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8062 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8063 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8064 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8065 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8066 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8067 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8068 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8069 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8070 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8071 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8072 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8073 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8074 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8075 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8076
8077 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8078 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8079 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8080 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8081 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8082 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8083 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8084 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8085 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8086 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8087 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8088 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8089 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8090 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8091 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8092 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8093
8094 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8095 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8096 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8097 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8098 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8099 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8100 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8101 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8102 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8103 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8104 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8105 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8106 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8107 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8108 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8109 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8110
8111 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8112 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8113 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8114 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8115 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8116 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8117 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8118 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8119 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8120 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8121 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8122 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8123 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8124 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8125 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8126 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8127
8128 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8129 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8130 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8131 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8132 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8133 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8134 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8135 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8137 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8138 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144
8145 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8146 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8148 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8149 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8150 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8151 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8156 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8157 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8158 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8159 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8160 /* 0xff */ IEMOP_X4(iemOp_ud0),
8161};
8162AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8163/** @} */
8164
8165
8166/** @name One byte opcodes.
8167 *
8168 * @{
8169 */
8170
8171/** Opcode 0x00. */
8172FNIEMOP_DEF(iemOp_add_Eb_Gb)
8173{
8174 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8175 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8176}
8177
8178
8179/** Opcode 0x01. */
8180FNIEMOP_DEF(iemOp_add_Ev_Gv)
8181{
8182 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8183 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8184}
8185
8186
8187/** Opcode 0x02. */
8188FNIEMOP_DEF(iemOp_add_Gb_Eb)
8189{
8190 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8191 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8192}
8193
8194
8195/** Opcode 0x03. */
8196FNIEMOP_DEF(iemOp_add_Gv_Ev)
8197{
8198 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8199 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8200}
8201
8202
8203/** Opcode 0x04. */
8204FNIEMOP_DEF(iemOp_add_Al_Ib)
8205{
8206 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8207 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8208}
8209
8210
8211/** Opcode 0x05. */
8212FNIEMOP_DEF(iemOp_add_eAX_Iz)
8213{
8214 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8215 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8216}
8217
8218
8219/** Opcode 0x06. */
8220FNIEMOP_DEF(iemOp_push_ES)
8221{
8222 IEMOP_MNEMONIC(push_es, "push es");
8223 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8224}
8225
8226
8227/** Opcode 0x07. */
8228FNIEMOP_DEF(iemOp_pop_ES)
8229{
8230 IEMOP_MNEMONIC(pop_es, "pop es");
8231 IEMOP_HLP_NO_64BIT();
8232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8233 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8234}
8235
8236
8237/** Opcode 0x08. */
8238FNIEMOP_DEF(iemOp_or_Eb_Gb)
8239{
8240 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8241 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8242 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8243}
8244
8245
8246/** Opcode 0x09. */
8247FNIEMOP_DEF(iemOp_or_Ev_Gv)
8248{
8249 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8250 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8251 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8252}
8253
8254
8255/** Opcode 0x0a. */
8256FNIEMOP_DEF(iemOp_or_Gb_Eb)
8257{
8258 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8259 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8260 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8261}
8262
8263
8264/** Opcode 0x0b. */
8265FNIEMOP_DEF(iemOp_or_Gv_Ev)
8266{
8267 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8268 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8269 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8270}
8271
8272
8273/** Opcode 0x0c. */
8274FNIEMOP_DEF(iemOp_or_Al_Ib)
8275{
8276 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8278 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8279}
8280
8281
8282/** Opcode 0x0d. */
8283FNIEMOP_DEF(iemOp_or_eAX_Iz)
8284{
8285 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8287 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8288}
8289
8290
8291/** Opcode 0x0e. */
8292FNIEMOP_DEF(iemOp_push_CS)
8293{
8294 IEMOP_MNEMONIC(push_cs, "push cs");
8295 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8296}
8297
8298
8299/** Opcode 0x0f. */
8300FNIEMOP_DEF(iemOp_2byteEscape)
8301{
8302#ifdef VBOX_STRICT
8303 static bool s_fTested = false;
8304 if (RT_LIKELY(s_fTested)) { /* likely */ }
8305 else
8306 {
8307 s_fTested = true;
8308 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8309 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8310 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8311 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8312 }
8313#endif
8314
8315 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8316
8317 /** @todo PUSH CS on 8086, undefined on 80186. */
8318 IEMOP_HLP_MIN_286();
8319 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8320}
8321
8322/** Opcode 0x10. */
8323FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8324{
8325 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8326 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8327}
8328
8329
8330/** Opcode 0x11. */
8331FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8332{
8333 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8334 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8335}
8336
8337
8338/** Opcode 0x12. */
8339FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8340{
8341 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8342 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8343}
8344
8345
8346/** Opcode 0x13. */
8347FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8348{
8349 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8350 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8351}
8352
8353
8354/** Opcode 0x14. */
8355FNIEMOP_DEF(iemOp_adc_Al_Ib)
8356{
8357 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8359}
8360
8361
8362/** Opcode 0x15. */
8363FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8364{
8365 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8367}
8368
8369
8370/** Opcode 0x16. */
8371FNIEMOP_DEF(iemOp_push_SS)
8372{
8373 IEMOP_MNEMONIC(push_ss, "push ss");
8374 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8375}
8376
8377
8378/** Opcode 0x17. */
8379FNIEMOP_DEF(iemOp_pop_SS)
8380{
8381 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8383 IEMOP_HLP_NO_64BIT();
8384 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8385}
8386
8387
8388/** Opcode 0x18. */
8389FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8390{
8391 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8392 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8393}
8394
8395
8396/** Opcode 0x19. */
8397FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8398{
8399 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8400 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8401}
8402
8403
8404/** Opcode 0x1a. */
8405FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8406{
8407 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8409}
8410
8411
8412/** Opcode 0x1b. */
8413FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8414{
8415 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8416 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8417}
8418
8419
8420/** Opcode 0x1c. */
8421FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8422{
8423 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8424 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8425}
8426
8427
8428/** Opcode 0x1d. */
8429FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8430{
8431 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8433}
8434
8435
8436/** Opcode 0x1e. */
8437FNIEMOP_DEF(iemOp_push_DS)
8438{
8439 IEMOP_MNEMONIC(push_ds, "push ds");
8440 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8441}
8442
8443
8444/** Opcode 0x1f. */
8445FNIEMOP_DEF(iemOp_pop_DS)
8446{
8447 IEMOP_MNEMONIC(pop_ds, "pop ds");
8448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8449 IEMOP_HLP_NO_64BIT();
8450 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8451}
8452
8453
8454/** Opcode 0x20. */
8455FNIEMOP_DEF(iemOp_and_Eb_Gb)
8456{
8457 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8458 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8459 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8460}
8461
8462
8463/** Opcode 0x21. */
8464FNIEMOP_DEF(iemOp_and_Ev_Gv)
8465{
8466 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8468 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8469}
8470
8471
8472/** Opcode 0x22. */
8473FNIEMOP_DEF(iemOp_and_Gb_Eb)
8474{
8475 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8476 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8477 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8478}
8479
8480
8481/** Opcode 0x23. */
8482FNIEMOP_DEF(iemOp_and_Gv_Ev)
8483{
8484 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8485 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8486 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8487}
8488
8489
8490/** Opcode 0x24. */
8491FNIEMOP_DEF(iemOp_and_Al_Ib)
8492{
8493 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8494 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8495 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8496}
8497
8498
8499/** Opcode 0x25. */
8500FNIEMOP_DEF(iemOp_and_eAX_Iz)
8501{
8502 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8503 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8504 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8505}
8506
8507
8508/** Opcode 0x26. */
8509FNIEMOP_DEF(iemOp_seg_ES)
8510{
8511 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8512 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8513 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8514
8515 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8516 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8517}
8518
8519
8520/** Opcode 0x27. */
8521FNIEMOP_DEF(iemOp_daa)
8522{
8523 IEMOP_MNEMONIC(daa_AL, "daa AL");
8524 IEMOP_HLP_NO_64BIT();
8525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8527 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8528}
8529
8530
8531/** Opcode 0x28. */
8532FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8533{
8534 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8535 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8536}
8537
8538
8539/** Opcode 0x29. */
8540FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8541{
8542 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8543 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8544}
8545
8546
8547/** Opcode 0x2a. */
8548FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8549{
8550 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8551 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8552}
8553
8554
8555/** Opcode 0x2b. */
8556FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8557{
8558 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8559 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8560}
8561
8562
8563/** Opcode 0x2c. */
8564FNIEMOP_DEF(iemOp_sub_Al_Ib)
8565{
8566 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8567 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8568}
8569
8570
8571/** Opcode 0x2d. */
8572FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8573{
8574 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8575 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8576}
8577
8578
8579/** Opcode 0x2e. */
8580FNIEMOP_DEF(iemOp_seg_CS)
8581{
8582 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8583 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8584 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8585
8586 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8587 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8588}
8589
8590
8591/** Opcode 0x2f. */
8592FNIEMOP_DEF(iemOp_das)
8593{
8594 IEMOP_MNEMONIC(das_AL, "das AL");
8595 IEMOP_HLP_NO_64BIT();
8596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8597 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8598 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8599}
8600
8601
8602/** Opcode 0x30. */
8603FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8604{
8605 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8606 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8607 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8608}
8609
8610
8611/** Opcode 0x31. */
8612FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8613{
8614 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8615 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8616 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8617}
8618
8619
8620/** Opcode 0x32. */
8621FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8622{
8623 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8624 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8625 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8626}
8627
8628
8629/** Opcode 0x33. */
8630FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8631{
8632 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8633 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8634 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8635}
8636
8637
8638/** Opcode 0x34. */
8639FNIEMOP_DEF(iemOp_xor_Al_Ib)
8640{
8641 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8642 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8643 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8644}
8645
8646
8647/** Opcode 0x35. */
8648FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8649{
8650 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8651 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8652 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8653}
8654
8655
8656/** Opcode 0x36. */
8657FNIEMOP_DEF(iemOp_seg_SS)
8658{
8659 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8660 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8661 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8662
8663 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8664 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8665}
8666
8667
8668/** Opcode 0x37. */
8669FNIEMOP_STUB(iemOp_aaa);
8670
8671
8672/** Opcode 0x38. */
8673FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8674{
8675 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8676 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8677}
8678
8679
8680/** Opcode 0x39. */
8681FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8682{
8683 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8684 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8685}
8686
8687
8688/** Opcode 0x3a. */
8689FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8690{
8691 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8692 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8693}
8694
8695
8696/** Opcode 0x3b. */
8697FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8698{
8699 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8700 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8701}
8702
8703
8704/** Opcode 0x3c. */
8705FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8706{
8707 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8708 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8709}
8710
8711
8712/** Opcode 0x3d. */
8713FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8714{
8715 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8717}
8718
8719
8720/** Opcode 0x3e. */
8721FNIEMOP_DEF(iemOp_seg_DS)
8722{
8723 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8724 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8725 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8726
8727 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8728 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8729}
8730
8731
8732/** Opcode 0x3f. */
8733FNIEMOP_STUB(iemOp_aas);
8734
8735/**
8736 * Common 'inc/dec/not/neg register' helper.
8737 */
8738FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8739{
8740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8741 switch (pVCpu->iem.s.enmEffOpSize)
8742 {
8743 case IEMMODE_16BIT:
8744 IEM_MC_BEGIN(2, 0);
8745 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8746 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8747 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8748 IEM_MC_REF_EFLAGS(pEFlags);
8749 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8750 IEM_MC_ADVANCE_RIP();
8751 IEM_MC_END();
8752 return VINF_SUCCESS;
8753
8754 case IEMMODE_32BIT:
8755 IEM_MC_BEGIN(2, 0);
8756 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8757 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8758 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8759 IEM_MC_REF_EFLAGS(pEFlags);
8760 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8761 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8762 IEM_MC_ADVANCE_RIP();
8763 IEM_MC_END();
8764 return VINF_SUCCESS;
8765
8766 case IEMMODE_64BIT:
8767 IEM_MC_BEGIN(2, 0);
8768 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8769 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8770 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8771 IEM_MC_REF_EFLAGS(pEFlags);
8772 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8773 IEM_MC_ADVANCE_RIP();
8774 IEM_MC_END();
8775 return VINF_SUCCESS;
8776 }
8777 return VINF_SUCCESS;
8778}
8779
8780
8781/** Opcode 0x40. */
8782FNIEMOP_DEF(iemOp_inc_eAX)
8783{
8784 /*
8785 * This is a REX prefix in 64-bit mode.
8786 */
8787 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8788 {
8789 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8790 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8791
8792 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8793 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8794 }
8795
8796 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8797 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8798}
8799
8800
8801/** Opcode 0x41. */
8802FNIEMOP_DEF(iemOp_inc_eCX)
8803{
8804 /*
8805 * This is a REX prefix in 64-bit mode.
8806 */
8807 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8808 {
8809 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8810 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8811 pVCpu->iem.s.uRexB = 1 << 3;
8812
8813 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8814 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8815 }
8816
8817 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8818 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8819}
8820
8821
8822/** Opcode 0x42. */
8823FNIEMOP_DEF(iemOp_inc_eDX)
8824{
8825 /*
8826 * This is a REX prefix in 64-bit mode.
8827 */
8828 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8829 {
8830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8832 pVCpu->iem.s.uRexIndex = 1 << 3;
8833
8834 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8835 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8836 }
8837
8838 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8839 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8840}
8841
8842
8843
8844/** Opcode 0x43. */
8845FNIEMOP_DEF(iemOp_inc_eBX)
8846{
8847 /*
8848 * This is a REX prefix in 64-bit mode.
8849 */
8850 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8851 {
8852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8853 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8854 pVCpu->iem.s.uRexB = 1 << 3;
8855 pVCpu->iem.s.uRexIndex = 1 << 3;
8856
8857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8859 }
8860
8861 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8862 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8863}
8864
8865
8866/** Opcode 0x44. */
8867FNIEMOP_DEF(iemOp_inc_eSP)
8868{
8869 /*
8870 * This is a REX prefix in 64-bit mode.
8871 */
8872 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8873 {
8874 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8875 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8876 pVCpu->iem.s.uRexReg = 1 << 3;
8877
8878 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8879 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8880 }
8881
8882 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8883 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8884}
8885
8886
8887/** Opcode 0x45. */
8888FNIEMOP_DEF(iemOp_inc_eBP)
8889{
8890 /*
8891 * This is a REX prefix in 64-bit mode.
8892 */
8893 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8894 {
8895 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8896 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8897 pVCpu->iem.s.uRexReg = 1 << 3;
8898 pVCpu->iem.s.uRexB = 1 << 3;
8899
8900 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8901 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8902 }
8903
8904 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8905 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8906}
8907
8908
8909/** Opcode 0x46. */
8910FNIEMOP_DEF(iemOp_inc_eSI)
8911{
8912 /*
8913 * This is a REX prefix in 64-bit mode.
8914 */
8915 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8916 {
8917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8919 pVCpu->iem.s.uRexReg = 1 << 3;
8920 pVCpu->iem.s.uRexIndex = 1 << 3;
8921
8922 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8923 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8924 }
8925
8926 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8927 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8928}
8929
8930
8931/** Opcode 0x47. */
8932FNIEMOP_DEF(iemOp_inc_eDI)
8933{
8934 /*
8935 * This is a REX prefix in 64-bit mode.
8936 */
8937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8938 {
8939 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8940 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8941 pVCpu->iem.s.uRexReg = 1 << 3;
8942 pVCpu->iem.s.uRexB = 1 << 3;
8943 pVCpu->iem.s.uRexIndex = 1 << 3;
8944
8945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8947 }
8948
8949 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
8950 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8951}
8952
8953
8954/** Opcode 0x48. */
8955FNIEMOP_DEF(iemOp_dec_eAX)
8956{
8957 /*
8958 * This is a REX prefix in 64-bit mode.
8959 */
8960 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8961 {
8962 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8963 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8964 iemRecalEffOpSize(pVCpu);
8965
8966 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8967 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8968 }
8969
8970 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
8971 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8972}
8973
8974
8975/** Opcode 0x49. */
8976FNIEMOP_DEF(iemOp_dec_eCX)
8977{
8978 /*
8979 * This is a REX prefix in 64-bit mode.
8980 */
8981 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8982 {
8983 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8984 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8985 pVCpu->iem.s.uRexB = 1 << 3;
8986 iemRecalEffOpSize(pVCpu);
8987
8988 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8989 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8990 }
8991
8992 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
8993 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8994}
8995
8996
8997/** Opcode 0x4a. */
8998FNIEMOP_DEF(iemOp_dec_eDX)
8999{
9000 /*
9001 * This is a REX prefix in 64-bit mode.
9002 */
9003 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9004 {
9005 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9006 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9007 pVCpu->iem.s.uRexIndex = 1 << 3;
9008 iemRecalEffOpSize(pVCpu);
9009
9010 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9011 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9012 }
9013
9014 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9015 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9016}
9017
9018
9019/** Opcode 0x4b. */
9020FNIEMOP_DEF(iemOp_dec_eBX)
9021{
9022 /*
9023 * This is a REX prefix in 64-bit mode.
9024 */
9025 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9026 {
9027 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9028 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9029 pVCpu->iem.s.uRexB = 1 << 3;
9030 pVCpu->iem.s.uRexIndex = 1 << 3;
9031 iemRecalEffOpSize(pVCpu);
9032
9033 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9034 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9035 }
9036
9037 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9038 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9039}
9040
9041
9042/** Opcode 0x4c. */
9043FNIEMOP_DEF(iemOp_dec_eSP)
9044{
9045 /*
9046 * This is a REX prefix in 64-bit mode.
9047 */
9048 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9049 {
9050 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9051 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9052 pVCpu->iem.s.uRexReg = 1 << 3;
9053 iemRecalEffOpSize(pVCpu);
9054
9055 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9056 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9057 }
9058
9059 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9060 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9061}
9062
9063
9064/** Opcode 0x4d. */
9065FNIEMOP_DEF(iemOp_dec_eBP)
9066{
9067 /*
9068 * This is a REX prefix in 64-bit mode.
9069 */
9070 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9071 {
9072 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9073 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9074 pVCpu->iem.s.uRexReg = 1 << 3;
9075 pVCpu->iem.s.uRexB = 1 << 3;
9076 iemRecalEffOpSize(pVCpu);
9077
9078 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9079 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9080 }
9081
9082 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9083 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9084}
9085
9086
9087/** Opcode 0x4e. */
9088FNIEMOP_DEF(iemOp_dec_eSI)
9089{
9090 /*
9091 * This is a REX prefix in 64-bit mode.
9092 */
9093 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9094 {
9095 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9096 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9097 pVCpu->iem.s.uRexReg = 1 << 3;
9098 pVCpu->iem.s.uRexIndex = 1 << 3;
9099 iemRecalEffOpSize(pVCpu);
9100
9101 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9102 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9103 }
9104
9105 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9106 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9107}
9108
9109
9110/** Opcode 0x4f. */
9111FNIEMOP_DEF(iemOp_dec_eDI)
9112{
9113 /*
9114 * This is a REX prefix in 64-bit mode.
9115 */
9116 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9117 {
9118 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9119 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9120 pVCpu->iem.s.uRexReg = 1 << 3;
9121 pVCpu->iem.s.uRexB = 1 << 3;
9122 pVCpu->iem.s.uRexIndex = 1 << 3;
9123 iemRecalEffOpSize(pVCpu);
9124
9125 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9126 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9127 }
9128
9129 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9130 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9131}
9132
9133
9134/**
9135 * Common 'push register' helper.
9136 */
9137FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9138{
9139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9140 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9141 {
9142 iReg |= pVCpu->iem.s.uRexB;
9143 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9144 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9145 }
9146
9147 switch (pVCpu->iem.s.enmEffOpSize)
9148 {
9149 case IEMMODE_16BIT:
9150 IEM_MC_BEGIN(0, 1);
9151 IEM_MC_LOCAL(uint16_t, u16Value);
9152 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9153 IEM_MC_PUSH_U16(u16Value);
9154 IEM_MC_ADVANCE_RIP();
9155 IEM_MC_END();
9156 break;
9157
9158 case IEMMODE_32BIT:
9159 IEM_MC_BEGIN(0, 1);
9160 IEM_MC_LOCAL(uint32_t, u32Value);
9161 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9162 IEM_MC_PUSH_U32(u32Value);
9163 IEM_MC_ADVANCE_RIP();
9164 IEM_MC_END();
9165 break;
9166
9167 case IEMMODE_64BIT:
9168 IEM_MC_BEGIN(0, 1);
9169 IEM_MC_LOCAL(uint64_t, u64Value);
9170 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9171 IEM_MC_PUSH_U64(u64Value);
9172 IEM_MC_ADVANCE_RIP();
9173 IEM_MC_END();
9174 break;
9175 }
9176
9177 return VINF_SUCCESS;
9178}
9179
9180
9181/** Opcode 0x50. */
9182FNIEMOP_DEF(iemOp_push_eAX)
9183{
9184 IEMOP_MNEMONIC(push_rAX, "push rAX");
9185 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9186}
9187
9188
9189/** Opcode 0x51. */
9190FNIEMOP_DEF(iemOp_push_eCX)
9191{
9192 IEMOP_MNEMONIC(push_rCX, "push rCX");
9193 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9194}
9195
9196
9197/** Opcode 0x52. */
9198FNIEMOP_DEF(iemOp_push_eDX)
9199{
9200 IEMOP_MNEMONIC(push_rDX, "push rDX");
9201 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9202}
9203
9204
9205/** Opcode 0x53. */
9206FNIEMOP_DEF(iemOp_push_eBX)
9207{
9208 IEMOP_MNEMONIC(push_rBX, "push rBX");
9209 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9210}
9211
9212
9213/** Opcode 0x54. */
9214FNIEMOP_DEF(iemOp_push_eSP)
9215{
9216 IEMOP_MNEMONIC(push_rSP, "push rSP");
9217 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9218 {
9219 IEM_MC_BEGIN(0, 1);
9220 IEM_MC_LOCAL(uint16_t, u16Value);
9221 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9222 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9223 IEM_MC_PUSH_U16(u16Value);
9224 IEM_MC_ADVANCE_RIP();
9225 IEM_MC_END();
9226 }
9227 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9228}
9229
9230
9231/** Opcode 0x55. */
9232FNIEMOP_DEF(iemOp_push_eBP)
9233{
9234 IEMOP_MNEMONIC(push_rBP, "push rBP");
9235 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9236}
9237
9238
9239/** Opcode 0x56. */
9240FNIEMOP_DEF(iemOp_push_eSI)
9241{
9242 IEMOP_MNEMONIC(push_rSI, "push rSI");
9243 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9244}
9245
9246
9247/** Opcode 0x57. */
9248FNIEMOP_DEF(iemOp_push_eDI)
9249{
9250 IEMOP_MNEMONIC(push_rDI, "push rDI");
9251 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9252}
9253
9254
9255/**
9256 * Common 'pop register' helper.
9257 */
9258FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9259{
9260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9261 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9262 {
9263 iReg |= pVCpu->iem.s.uRexB;
9264 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9265 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9266 }
9267
9268 switch (pVCpu->iem.s.enmEffOpSize)
9269 {
9270 case IEMMODE_16BIT:
9271 IEM_MC_BEGIN(0, 1);
9272 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9273 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9274 IEM_MC_POP_U16(pu16Dst);
9275 IEM_MC_ADVANCE_RIP();
9276 IEM_MC_END();
9277 break;
9278
9279 case IEMMODE_32BIT:
9280 IEM_MC_BEGIN(0, 1);
9281 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9282 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9283 IEM_MC_POP_U32(pu32Dst);
9284 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9285 IEM_MC_ADVANCE_RIP();
9286 IEM_MC_END();
9287 break;
9288
9289 case IEMMODE_64BIT:
9290 IEM_MC_BEGIN(0, 1);
9291 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9292 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9293 IEM_MC_POP_U64(pu64Dst);
9294 IEM_MC_ADVANCE_RIP();
9295 IEM_MC_END();
9296 break;
9297 }
9298
9299 return VINF_SUCCESS;
9300}
9301
9302
9303/** Opcode 0x58. */
9304FNIEMOP_DEF(iemOp_pop_eAX)
9305{
9306 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9307 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9308}
9309
9310
9311/** Opcode 0x59. */
9312FNIEMOP_DEF(iemOp_pop_eCX)
9313{
9314 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9315 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9316}
9317
9318
9319/** Opcode 0x5a. */
9320FNIEMOP_DEF(iemOp_pop_eDX)
9321{
9322 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9323 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9324}
9325
9326
9327/** Opcode 0x5b. */
9328FNIEMOP_DEF(iemOp_pop_eBX)
9329{
9330 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9331 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9332}
9333
9334
9335/** Opcode 0x5c. */
9336FNIEMOP_DEF(iemOp_pop_eSP)
9337{
9338 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9339 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9340 {
9341 if (pVCpu->iem.s.uRexB)
9342 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9343 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9344 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9345 }
9346
9347 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9348 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9349 /** @todo add testcase for this instruction. */
9350 switch (pVCpu->iem.s.enmEffOpSize)
9351 {
9352 case IEMMODE_16BIT:
9353 IEM_MC_BEGIN(0, 1);
9354 IEM_MC_LOCAL(uint16_t, u16Dst);
9355 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9356 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9357 IEM_MC_ADVANCE_RIP();
9358 IEM_MC_END();
9359 break;
9360
9361 case IEMMODE_32BIT:
9362 IEM_MC_BEGIN(0, 1);
9363 IEM_MC_LOCAL(uint32_t, u32Dst);
9364 IEM_MC_POP_U32(&u32Dst);
9365 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9366 IEM_MC_ADVANCE_RIP();
9367 IEM_MC_END();
9368 break;
9369
9370 case IEMMODE_64BIT:
9371 IEM_MC_BEGIN(0, 1);
9372 IEM_MC_LOCAL(uint64_t, u64Dst);
9373 IEM_MC_POP_U64(&u64Dst);
9374 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9375 IEM_MC_ADVANCE_RIP();
9376 IEM_MC_END();
9377 break;
9378 }
9379
9380 return VINF_SUCCESS;
9381}
9382
9383
9384/** Opcode 0x5d. */
9385FNIEMOP_DEF(iemOp_pop_eBP)
9386{
9387 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9388 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9389}
9390
9391
9392/** Opcode 0x5e. */
9393FNIEMOP_DEF(iemOp_pop_eSI)
9394{
9395 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9396 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9397}
9398
9399
9400/** Opcode 0x5f. */
9401FNIEMOP_DEF(iemOp_pop_eDI)
9402{
9403 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9404 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9405}
9406
9407
9408/** Opcode 0x60. */
9409FNIEMOP_DEF(iemOp_pusha)
9410{
9411 IEMOP_MNEMONIC(pusha, "pusha");
9412 IEMOP_HLP_MIN_186();
9413 IEMOP_HLP_NO_64BIT();
9414 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9416 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9417 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9418}
9419
9420
9421/** Opcode 0x61. */
9422FNIEMOP_DEF(iemOp_popa)
9423{
9424 IEMOP_MNEMONIC(popa, "popa");
9425 IEMOP_HLP_MIN_186();
9426 IEMOP_HLP_NO_64BIT();
9427 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9428 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9429 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9430 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9431}
9432
9433
9434/** Opcode 0x62. */
9435FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9436// IEMOP_HLP_MIN_186();
9437
9438
9439/** Opcode 0x63 - non-64-bit modes. */
9440FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9441{
9442 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9443 IEMOP_HLP_MIN_286();
9444 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9446
9447 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9448 {
9449 /* Register */
9450 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9451 IEM_MC_BEGIN(3, 0);
9452 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9453 IEM_MC_ARG(uint16_t, u16Src, 1);
9454 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9455
9456 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9457 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9458 IEM_MC_REF_EFLAGS(pEFlags);
9459 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9460
9461 IEM_MC_ADVANCE_RIP();
9462 IEM_MC_END();
9463 }
9464 else
9465 {
9466 /* Memory */
9467 IEM_MC_BEGIN(3, 2);
9468 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9469 IEM_MC_ARG(uint16_t, u16Src, 1);
9470 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9472
9473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9474 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9475 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9476 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9477 IEM_MC_FETCH_EFLAGS(EFlags);
9478 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9479
9480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9481 IEM_MC_COMMIT_EFLAGS(EFlags);
9482 IEM_MC_ADVANCE_RIP();
9483 IEM_MC_END();
9484 }
9485 return VINF_SUCCESS;
9486
9487}
9488
9489
9490/** Opcode 0x63.
9491 * @note This is a weird one. It works like a regular move instruction if
9492 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9493 * @todo This definitely needs a testcase to verify the odd cases. */
9494FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9495{
9496 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9497
9498 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9500
9501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9502 {
9503 /*
9504 * Register to register.
9505 */
9506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9507 IEM_MC_BEGIN(0, 1);
9508 IEM_MC_LOCAL(uint64_t, u64Value);
9509 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9510 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9511 IEM_MC_ADVANCE_RIP();
9512 IEM_MC_END();
9513 }
9514 else
9515 {
9516 /*
9517 * We're loading a register from memory.
9518 */
9519 IEM_MC_BEGIN(0, 2);
9520 IEM_MC_LOCAL(uint64_t, u64Value);
9521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9524 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9525 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9526 IEM_MC_ADVANCE_RIP();
9527 IEM_MC_END();
9528 }
9529 return VINF_SUCCESS;
9530}
9531
9532
9533/** Opcode 0x64. */
9534FNIEMOP_DEF(iemOp_seg_FS)
9535{
9536 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9537 IEMOP_HLP_MIN_386();
9538
9539 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9540 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9541
9542 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9543 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9544}
9545
9546
9547/** Opcode 0x65. */
9548FNIEMOP_DEF(iemOp_seg_GS)
9549{
9550 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9551 IEMOP_HLP_MIN_386();
9552
9553 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9554 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9555
9556 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9557 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9558}
9559
9560
9561/** Opcode 0x66. */
9562FNIEMOP_DEF(iemOp_op_size)
9563{
9564 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9565 IEMOP_HLP_MIN_386();
9566
9567 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9568 iemRecalEffOpSize(pVCpu);
9569
9570 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9571 when REPZ or REPNZ are present. */
9572 if (pVCpu->iem.s.idxPrefix == 0)
9573 pVCpu->iem.s.idxPrefix = 1;
9574
9575 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9576 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9577}
9578
9579
9580/** Opcode 0x67. */
9581FNIEMOP_DEF(iemOp_addr_size)
9582{
9583 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9584 IEMOP_HLP_MIN_386();
9585
9586 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9587 switch (pVCpu->iem.s.enmDefAddrMode)
9588 {
9589 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9590 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9591 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9592 default: AssertFailed();
9593 }
9594
9595 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9596 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9597}
9598
9599
9600/** Opcode 0x68. */
9601FNIEMOP_DEF(iemOp_push_Iz)
9602{
9603 IEMOP_MNEMONIC(push_Iz, "push Iz");
9604 IEMOP_HLP_MIN_186();
9605 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9606 switch (pVCpu->iem.s.enmEffOpSize)
9607 {
9608 case IEMMODE_16BIT:
9609 {
9610 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9612 IEM_MC_BEGIN(0,0);
9613 IEM_MC_PUSH_U16(u16Imm);
9614 IEM_MC_ADVANCE_RIP();
9615 IEM_MC_END();
9616 return VINF_SUCCESS;
9617 }
9618
9619 case IEMMODE_32BIT:
9620 {
9621 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9623 IEM_MC_BEGIN(0,0);
9624 IEM_MC_PUSH_U32(u32Imm);
9625 IEM_MC_ADVANCE_RIP();
9626 IEM_MC_END();
9627 return VINF_SUCCESS;
9628 }
9629
9630 case IEMMODE_64BIT:
9631 {
9632 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9634 IEM_MC_BEGIN(0,0);
9635 IEM_MC_PUSH_U64(u64Imm);
9636 IEM_MC_ADVANCE_RIP();
9637 IEM_MC_END();
9638 return VINF_SUCCESS;
9639 }
9640
9641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9642 }
9643}
9644
9645
9646/** Opcode 0x69. */
9647FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9648{
9649 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9650 IEMOP_HLP_MIN_186();
9651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9652 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9653
9654 switch (pVCpu->iem.s.enmEffOpSize)
9655 {
9656 case IEMMODE_16BIT:
9657 {
9658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9659 {
9660 /* register operand */
9661 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9663
9664 IEM_MC_BEGIN(3, 1);
9665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9666 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9667 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9668 IEM_MC_LOCAL(uint16_t, u16Tmp);
9669
9670 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9671 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9672 IEM_MC_REF_EFLAGS(pEFlags);
9673 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9674 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9675
9676 IEM_MC_ADVANCE_RIP();
9677 IEM_MC_END();
9678 }
9679 else
9680 {
9681 /* memory operand */
9682 IEM_MC_BEGIN(3, 2);
9683 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9684 IEM_MC_ARG(uint16_t, u16Src, 1);
9685 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9686 IEM_MC_LOCAL(uint16_t, u16Tmp);
9687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9688
9689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9690 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9691 IEM_MC_ASSIGN(u16Src, u16Imm);
9692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9693 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9694 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9695 IEM_MC_REF_EFLAGS(pEFlags);
9696 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9697 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9698
9699 IEM_MC_ADVANCE_RIP();
9700 IEM_MC_END();
9701 }
9702 return VINF_SUCCESS;
9703 }
9704
9705 case IEMMODE_32BIT:
9706 {
9707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9708 {
9709 /* register operand */
9710 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9712
9713 IEM_MC_BEGIN(3, 1);
9714 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9715 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9716 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9717 IEM_MC_LOCAL(uint32_t, u32Tmp);
9718
9719 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9720 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9721 IEM_MC_REF_EFLAGS(pEFlags);
9722 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9723 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9724
9725 IEM_MC_ADVANCE_RIP();
9726 IEM_MC_END();
9727 }
9728 else
9729 {
9730 /* memory operand */
9731 IEM_MC_BEGIN(3, 2);
9732 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9733 IEM_MC_ARG(uint32_t, u32Src, 1);
9734 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9735 IEM_MC_LOCAL(uint32_t, u32Tmp);
9736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9737
9738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9739 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9740 IEM_MC_ASSIGN(u32Src, u32Imm);
9741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9742 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9743 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9744 IEM_MC_REF_EFLAGS(pEFlags);
9745 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9746 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9747
9748 IEM_MC_ADVANCE_RIP();
9749 IEM_MC_END();
9750 }
9751 return VINF_SUCCESS;
9752 }
9753
9754 case IEMMODE_64BIT:
9755 {
9756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9757 {
9758 /* register operand */
9759 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9761
9762 IEM_MC_BEGIN(3, 1);
9763 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9764 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9765 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9766 IEM_MC_LOCAL(uint64_t, u64Tmp);
9767
9768 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9769 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9770 IEM_MC_REF_EFLAGS(pEFlags);
9771 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9772 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9773
9774 IEM_MC_ADVANCE_RIP();
9775 IEM_MC_END();
9776 }
9777 else
9778 {
9779 /* memory operand */
9780 IEM_MC_BEGIN(3, 2);
9781 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9782 IEM_MC_ARG(uint64_t, u64Src, 1);
9783 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9784 IEM_MC_LOCAL(uint64_t, u64Tmp);
9785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9786
9787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9788 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9789 IEM_MC_ASSIGN(u64Src, u64Imm);
9790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9791 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9792 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9793 IEM_MC_REF_EFLAGS(pEFlags);
9794 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9795 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9796
9797 IEM_MC_ADVANCE_RIP();
9798 IEM_MC_END();
9799 }
9800 return VINF_SUCCESS;
9801 }
9802 }
9803 AssertFailedReturn(VERR_IEM_IPE_9);
9804}
9805
9806
9807/** Opcode 0x6a. */
9808FNIEMOP_DEF(iemOp_push_Ib)
9809{
9810 IEMOP_MNEMONIC(push_Ib, "push Ib");
9811 IEMOP_HLP_MIN_186();
9812 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9814 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9815
9816 IEM_MC_BEGIN(0,0);
9817 switch (pVCpu->iem.s.enmEffOpSize)
9818 {
9819 case IEMMODE_16BIT:
9820 IEM_MC_PUSH_U16(i8Imm);
9821 break;
9822 case IEMMODE_32BIT:
9823 IEM_MC_PUSH_U32(i8Imm);
9824 break;
9825 case IEMMODE_64BIT:
9826 IEM_MC_PUSH_U64(i8Imm);
9827 break;
9828 }
9829 IEM_MC_ADVANCE_RIP();
9830 IEM_MC_END();
9831 return VINF_SUCCESS;
9832}
9833
9834
9835/** Opcode 0x6b. */
9836FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9837{
9838 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9839 IEMOP_HLP_MIN_186();
9840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9841 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9842
9843 switch (pVCpu->iem.s.enmEffOpSize)
9844 {
9845 case IEMMODE_16BIT:
9846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9847 {
9848 /* register operand */
9849 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9851
9852 IEM_MC_BEGIN(3, 1);
9853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9854 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9856 IEM_MC_LOCAL(uint16_t, u16Tmp);
9857
9858 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9859 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9860 IEM_MC_REF_EFLAGS(pEFlags);
9861 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9862 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9863
9864 IEM_MC_ADVANCE_RIP();
9865 IEM_MC_END();
9866 }
9867 else
9868 {
9869 /* memory operand */
9870 IEM_MC_BEGIN(3, 2);
9871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9872 IEM_MC_ARG(uint16_t, u16Src, 1);
9873 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9874 IEM_MC_LOCAL(uint16_t, u16Tmp);
9875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9876
9877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9878 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9879 IEM_MC_ASSIGN(u16Src, u16Imm);
9880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9881 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9882 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9883 IEM_MC_REF_EFLAGS(pEFlags);
9884 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9885 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9886
9887 IEM_MC_ADVANCE_RIP();
9888 IEM_MC_END();
9889 }
9890 return VINF_SUCCESS;
9891
9892 case IEMMODE_32BIT:
9893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9894 {
9895 /* register operand */
9896 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9898
9899 IEM_MC_BEGIN(3, 1);
9900 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9901 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9903 IEM_MC_LOCAL(uint32_t, u32Tmp);
9904
9905 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9906 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9907 IEM_MC_REF_EFLAGS(pEFlags);
9908 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9909 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9910
9911 IEM_MC_ADVANCE_RIP();
9912 IEM_MC_END();
9913 }
9914 else
9915 {
9916 /* memory operand */
9917 IEM_MC_BEGIN(3, 2);
9918 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9919 IEM_MC_ARG(uint32_t, u32Src, 1);
9920 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9921 IEM_MC_LOCAL(uint32_t, u32Tmp);
9922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9923
9924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9925 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9926 IEM_MC_ASSIGN(u32Src, u32Imm);
9927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9928 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9929 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9930 IEM_MC_REF_EFLAGS(pEFlags);
9931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9932 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9933
9934 IEM_MC_ADVANCE_RIP();
9935 IEM_MC_END();
9936 }
9937 return VINF_SUCCESS;
9938
9939 case IEMMODE_64BIT:
9940 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9941 {
9942 /* register operand */
9943 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9945
9946 IEM_MC_BEGIN(3, 1);
9947 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9948 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9949 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9950 IEM_MC_LOCAL(uint64_t, u64Tmp);
9951
9952 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9953 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9954 IEM_MC_REF_EFLAGS(pEFlags);
9955 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9956 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9957
9958 IEM_MC_ADVANCE_RIP();
9959 IEM_MC_END();
9960 }
9961 else
9962 {
9963 /* memory operand */
9964 IEM_MC_BEGIN(3, 2);
9965 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9966 IEM_MC_ARG(uint64_t, u64Src, 1);
9967 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9968 IEM_MC_LOCAL(uint64_t, u64Tmp);
9969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9970
9971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9972 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9973 IEM_MC_ASSIGN(u64Src, u64Imm);
9974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9975 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9976 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9977 IEM_MC_REF_EFLAGS(pEFlags);
9978 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9979 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9980
9981 IEM_MC_ADVANCE_RIP();
9982 IEM_MC_END();
9983 }
9984 return VINF_SUCCESS;
9985 }
9986 AssertFailedReturn(VERR_IEM_IPE_8);
9987}
9988
9989
9990/** Opcode 0x6c. */
9991FNIEMOP_DEF(iemOp_insb_Yb_DX)
9992{
9993 IEMOP_HLP_MIN_186();
9994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9995 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9996 {
9997 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
9998 switch (pVCpu->iem.s.enmEffAddrMode)
9999 {
10000 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10001 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10002 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10004 }
10005 }
10006 else
10007 {
10008 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10009 switch (pVCpu->iem.s.enmEffAddrMode)
10010 {
10011 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10012 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10013 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10015 }
10016 }
10017}
10018
10019
10020/** Opcode 0x6d. */
10021FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10022{
10023 IEMOP_HLP_MIN_186();
10024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10025 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10026 {
10027 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10028 switch (pVCpu->iem.s.enmEffOpSize)
10029 {
10030 case IEMMODE_16BIT:
10031 switch (pVCpu->iem.s.enmEffAddrMode)
10032 {
10033 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10034 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10035 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10037 }
10038 break;
10039 case IEMMODE_64BIT:
10040 case IEMMODE_32BIT:
10041 switch (pVCpu->iem.s.enmEffAddrMode)
10042 {
10043 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10044 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10045 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10047 }
10048 break;
10049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10050 }
10051 }
10052 else
10053 {
10054 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10055 switch (pVCpu->iem.s.enmEffOpSize)
10056 {
10057 case IEMMODE_16BIT:
10058 switch (pVCpu->iem.s.enmEffAddrMode)
10059 {
10060 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10061 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10062 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10064 }
10065 break;
10066 case IEMMODE_64BIT:
10067 case IEMMODE_32BIT:
10068 switch (pVCpu->iem.s.enmEffAddrMode)
10069 {
10070 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10071 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10072 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10074 }
10075 break;
10076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10077 }
10078 }
10079}
10080
10081
10082/** Opcode 0x6e. */
10083FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10084{
10085 IEMOP_HLP_MIN_186();
10086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10087 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10088 {
10089 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10090 switch (pVCpu->iem.s.enmEffAddrMode)
10091 {
10092 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10093 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10094 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10096 }
10097 }
10098 else
10099 {
10100 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10101 switch (pVCpu->iem.s.enmEffAddrMode)
10102 {
10103 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10104 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10105 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10107 }
10108 }
10109}
10110
10111
10112/** Opcode 0x6f. */
10113FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10114{
10115 IEMOP_HLP_MIN_186();
10116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10117 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10118 {
10119 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10120 switch (pVCpu->iem.s.enmEffOpSize)
10121 {
10122 case IEMMODE_16BIT:
10123 switch (pVCpu->iem.s.enmEffAddrMode)
10124 {
10125 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10126 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10127 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10129 }
10130 break;
10131 case IEMMODE_64BIT:
10132 case IEMMODE_32BIT:
10133 switch (pVCpu->iem.s.enmEffAddrMode)
10134 {
10135 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10136 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10137 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10139 }
10140 break;
10141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10142 }
10143 }
10144 else
10145 {
10146 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10147 switch (pVCpu->iem.s.enmEffOpSize)
10148 {
10149 case IEMMODE_16BIT:
10150 switch (pVCpu->iem.s.enmEffAddrMode)
10151 {
10152 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10153 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10154 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10156 }
10157 break;
10158 case IEMMODE_64BIT:
10159 case IEMMODE_32BIT:
10160 switch (pVCpu->iem.s.enmEffAddrMode)
10161 {
10162 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10163 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10164 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10166 }
10167 break;
10168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10169 }
10170 }
10171}
10172
10173
10174/** Opcode 0x70. */
10175FNIEMOP_DEF(iemOp_jo_Jb)
10176{
10177 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10178 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10180 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10181
10182 IEM_MC_BEGIN(0, 0);
10183 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10184 IEM_MC_REL_JMP_S8(i8Imm);
10185 } IEM_MC_ELSE() {
10186 IEM_MC_ADVANCE_RIP();
10187 } IEM_MC_ENDIF();
10188 IEM_MC_END();
10189 return VINF_SUCCESS;
10190}
10191
10192
10193/** Opcode 0x71. */
10194FNIEMOP_DEF(iemOp_jno_Jb)
10195{
10196 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10197 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10199 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10200
10201 IEM_MC_BEGIN(0, 0);
10202 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10203 IEM_MC_ADVANCE_RIP();
10204 } IEM_MC_ELSE() {
10205 IEM_MC_REL_JMP_S8(i8Imm);
10206 } IEM_MC_ENDIF();
10207 IEM_MC_END();
10208 return VINF_SUCCESS;
10209}
10210
10211/** Opcode 0x72. */
10212FNIEMOP_DEF(iemOp_jc_Jb)
10213{
10214 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10215 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10217 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10218
10219 IEM_MC_BEGIN(0, 0);
10220 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10221 IEM_MC_REL_JMP_S8(i8Imm);
10222 } IEM_MC_ELSE() {
10223 IEM_MC_ADVANCE_RIP();
10224 } IEM_MC_ENDIF();
10225 IEM_MC_END();
10226 return VINF_SUCCESS;
10227}
10228
10229
10230/** Opcode 0x73. */
10231FNIEMOP_DEF(iemOp_jnc_Jb)
10232{
10233 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10234 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10236 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10237
10238 IEM_MC_BEGIN(0, 0);
10239 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10240 IEM_MC_ADVANCE_RIP();
10241 } IEM_MC_ELSE() {
10242 IEM_MC_REL_JMP_S8(i8Imm);
10243 } IEM_MC_ENDIF();
10244 IEM_MC_END();
10245 return VINF_SUCCESS;
10246}
10247
10248
10249/** Opcode 0x74. */
10250FNIEMOP_DEF(iemOp_je_Jb)
10251{
10252 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10253 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10255 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10256
10257 IEM_MC_BEGIN(0, 0);
10258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10259 IEM_MC_REL_JMP_S8(i8Imm);
10260 } IEM_MC_ELSE() {
10261 IEM_MC_ADVANCE_RIP();
10262 } IEM_MC_ENDIF();
10263 IEM_MC_END();
10264 return VINF_SUCCESS;
10265}
10266
10267
10268/** Opcode 0x75. */
10269FNIEMOP_DEF(iemOp_jne_Jb)
10270{
10271 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10272 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10274 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10275
10276 IEM_MC_BEGIN(0, 0);
10277 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10278 IEM_MC_ADVANCE_RIP();
10279 } IEM_MC_ELSE() {
10280 IEM_MC_REL_JMP_S8(i8Imm);
10281 } IEM_MC_ENDIF();
10282 IEM_MC_END();
10283 return VINF_SUCCESS;
10284}
10285
10286
10287/** Opcode 0x76. */
10288FNIEMOP_DEF(iemOp_jbe_Jb)
10289{
10290 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10291 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10293 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10294
10295 IEM_MC_BEGIN(0, 0);
10296 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10297 IEM_MC_REL_JMP_S8(i8Imm);
10298 } IEM_MC_ELSE() {
10299 IEM_MC_ADVANCE_RIP();
10300 } IEM_MC_ENDIF();
10301 IEM_MC_END();
10302 return VINF_SUCCESS;
10303}
10304
10305
10306/** Opcode 0x77. */
10307FNIEMOP_DEF(iemOp_jnbe_Jb)
10308{
10309 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10310 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10312 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10313
10314 IEM_MC_BEGIN(0, 0);
10315 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10316 IEM_MC_ADVANCE_RIP();
10317 } IEM_MC_ELSE() {
10318 IEM_MC_REL_JMP_S8(i8Imm);
10319 } IEM_MC_ENDIF();
10320 IEM_MC_END();
10321 return VINF_SUCCESS;
10322}
10323
10324
10325/** Opcode 0x78. */
10326FNIEMOP_DEF(iemOp_js_Jb)
10327{
10328 IEMOP_MNEMONIC(js_Jb, "js Jb");
10329 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10331 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10332
10333 IEM_MC_BEGIN(0, 0);
10334 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10335 IEM_MC_REL_JMP_S8(i8Imm);
10336 } IEM_MC_ELSE() {
10337 IEM_MC_ADVANCE_RIP();
10338 } IEM_MC_ENDIF();
10339 IEM_MC_END();
10340 return VINF_SUCCESS;
10341}
10342
10343
10344/** Opcode 0x79. */
10345FNIEMOP_DEF(iemOp_jns_Jb)
10346{
10347 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10348 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10350 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10351
10352 IEM_MC_BEGIN(0, 0);
10353 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10354 IEM_MC_ADVANCE_RIP();
10355 } IEM_MC_ELSE() {
10356 IEM_MC_REL_JMP_S8(i8Imm);
10357 } IEM_MC_ENDIF();
10358 IEM_MC_END();
10359 return VINF_SUCCESS;
10360}
10361
10362
10363/** Opcode 0x7a. */
10364FNIEMOP_DEF(iemOp_jp_Jb)
10365{
10366 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10367 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10369 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10370
10371 IEM_MC_BEGIN(0, 0);
10372 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10373 IEM_MC_REL_JMP_S8(i8Imm);
10374 } IEM_MC_ELSE() {
10375 IEM_MC_ADVANCE_RIP();
10376 } IEM_MC_ENDIF();
10377 IEM_MC_END();
10378 return VINF_SUCCESS;
10379}
10380
10381
10382/** Opcode 0x7b. */
10383FNIEMOP_DEF(iemOp_jnp_Jb)
10384{
10385 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10386 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10388 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10389
10390 IEM_MC_BEGIN(0, 0);
10391 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10392 IEM_MC_ADVANCE_RIP();
10393 } IEM_MC_ELSE() {
10394 IEM_MC_REL_JMP_S8(i8Imm);
10395 } IEM_MC_ENDIF();
10396 IEM_MC_END();
10397 return VINF_SUCCESS;
10398}
10399
10400
10401/** Opcode 0x7c. */
10402FNIEMOP_DEF(iemOp_jl_Jb)
10403{
10404 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10405 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10407 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10408
10409 IEM_MC_BEGIN(0, 0);
10410 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10411 IEM_MC_REL_JMP_S8(i8Imm);
10412 } IEM_MC_ELSE() {
10413 IEM_MC_ADVANCE_RIP();
10414 } IEM_MC_ENDIF();
10415 IEM_MC_END();
10416 return VINF_SUCCESS;
10417}
10418
10419
10420/** Opcode 0x7d. */
10421FNIEMOP_DEF(iemOp_jnl_Jb)
10422{
10423 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10424 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10426 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10427
10428 IEM_MC_BEGIN(0, 0);
10429 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10430 IEM_MC_ADVANCE_RIP();
10431 } IEM_MC_ELSE() {
10432 IEM_MC_REL_JMP_S8(i8Imm);
10433 } IEM_MC_ENDIF();
10434 IEM_MC_END();
10435 return VINF_SUCCESS;
10436}
10437
10438
10439/** Opcode 0x7e. */
10440FNIEMOP_DEF(iemOp_jle_Jb)
10441{
10442 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10443 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10445 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10446
10447 IEM_MC_BEGIN(0, 0);
10448 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10449 IEM_MC_REL_JMP_S8(i8Imm);
10450 } IEM_MC_ELSE() {
10451 IEM_MC_ADVANCE_RIP();
10452 } IEM_MC_ENDIF();
10453 IEM_MC_END();
10454 return VINF_SUCCESS;
10455}
10456
10457
10458/** Opcode 0x7f. */
10459FNIEMOP_DEF(iemOp_jnle_Jb)
10460{
10461 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10462 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10464 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10465
10466 IEM_MC_BEGIN(0, 0);
10467 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10468 IEM_MC_ADVANCE_RIP();
10469 } IEM_MC_ELSE() {
10470 IEM_MC_REL_JMP_S8(i8Imm);
10471 } IEM_MC_ENDIF();
10472 IEM_MC_END();
10473 return VINF_SUCCESS;
10474}
10475
10476
10477/** Opcode 0x80. */
10478FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10479{
10480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10481 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10482 {
10483 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10484 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10485 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10486 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10487 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10488 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10489 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10490 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10491 }
10492 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10493
10494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10495 {
10496 /* register target */
10497 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10499 IEM_MC_BEGIN(3, 0);
10500 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10501 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10502 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10503
10504 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10505 IEM_MC_REF_EFLAGS(pEFlags);
10506 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10507
10508 IEM_MC_ADVANCE_RIP();
10509 IEM_MC_END();
10510 }
10511 else
10512 {
10513 /* memory target */
10514 uint32_t fAccess;
10515 if (pImpl->pfnLockedU8)
10516 fAccess = IEM_ACCESS_DATA_RW;
10517 else /* CMP */
10518 fAccess = IEM_ACCESS_DATA_R;
10519 IEM_MC_BEGIN(3, 2);
10520 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10521 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10523
10524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10525 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10526 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10527 if (pImpl->pfnLockedU8)
10528 IEMOP_HLP_DONE_DECODING();
10529 else
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531
10532 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10533 IEM_MC_FETCH_EFLAGS(EFlags);
10534 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10535 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10536 else
10537 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10538
10539 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10540 IEM_MC_COMMIT_EFLAGS(EFlags);
10541 IEM_MC_ADVANCE_RIP();
10542 IEM_MC_END();
10543 }
10544 return VINF_SUCCESS;
10545}
10546
10547
10548/** Opcode 0x81. */
10549FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10550{
10551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10552 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10553 {
10554 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10555 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10556 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10557 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10558 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10559 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10560 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10561 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10562 }
10563 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10564
10565 switch (pVCpu->iem.s.enmEffOpSize)
10566 {
10567 case IEMMODE_16BIT:
10568 {
10569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10570 {
10571 /* register target */
10572 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10574 IEM_MC_BEGIN(3, 0);
10575 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10576 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10577 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10578
10579 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10580 IEM_MC_REF_EFLAGS(pEFlags);
10581 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10582
10583 IEM_MC_ADVANCE_RIP();
10584 IEM_MC_END();
10585 }
10586 else
10587 {
10588 /* memory target */
10589 uint32_t fAccess;
10590 if (pImpl->pfnLockedU16)
10591 fAccess = IEM_ACCESS_DATA_RW;
10592 else /* CMP, TEST */
10593 fAccess = IEM_ACCESS_DATA_R;
10594 IEM_MC_BEGIN(3, 2);
10595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10596 IEM_MC_ARG(uint16_t, u16Src, 1);
10597 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10599
10600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10601 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10602 IEM_MC_ASSIGN(u16Src, u16Imm);
10603 if (pImpl->pfnLockedU16)
10604 IEMOP_HLP_DONE_DECODING();
10605 else
10606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10607 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10608 IEM_MC_FETCH_EFLAGS(EFlags);
10609 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10611 else
10612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10613
10614 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10615 IEM_MC_COMMIT_EFLAGS(EFlags);
10616 IEM_MC_ADVANCE_RIP();
10617 IEM_MC_END();
10618 }
10619 break;
10620 }
10621
10622 case IEMMODE_32BIT:
10623 {
10624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10625 {
10626 /* register target */
10627 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10629 IEM_MC_BEGIN(3, 0);
10630 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10631 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10632 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10633
10634 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10635 IEM_MC_REF_EFLAGS(pEFlags);
10636 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10637 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10638
10639 IEM_MC_ADVANCE_RIP();
10640 IEM_MC_END();
10641 }
10642 else
10643 {
10644 /* memory target */
10645 uint32_t fAccess;
10646 if (pImpl->pfnLockedU32)
10647 fAccess = IEM_ACCESS_DATA_RW;
10648 else /* CMP, TEST */
10649 fAccess = IEM_ACCESS_DATA_R;
10650 IEM_MC_BEGIN(3, 2);
10651 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10652 IEM_MC_ARG(uint32_t, u32Src, 1);
10653 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10655
10656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10657 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10658 IEM_MC_ASSIGN(u32Src, u32Imm);
10659 if (pImpl->pfnLockedU32)
10660 IEMOP_HLP_DONE_DECODING();
10661 else
10662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10663 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10664 IEM_MC_FETCH_EFLAGS(EFlags);
10665 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10666 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10667 else
10668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10669
10670 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10671 IEM_MC_COMMIT_EFLAGS(EFlags);
10672 IEM_MC_ADVANCE_RIP();
10673 IEM_MC_END();
10674 }
10675 break;
10676 }
10677
10678 case IEMMODE_64BIT:
10679 {
10680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10681 {
10682 /* register target */
10683 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10685 IEM_MC_BEGIN(3, 0);
10686 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10687 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10689
10690 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10691 IEM_MC_REF_EFLAGS(pEFlags);
10692 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10693
10694 IEM_MC_ADVANCE_RIP();
10695 IEM_MC_END();
10696 }
10697 else
10698 {
10699 /* memory target */
10700 uint32_t fAccess;
10701 if (pImpl->pfnLockedU64)
10702 fAccess = IEM_ACCESS_DATA_RW;
10703 else /* CMP */
10704 fAccess = IEM_ACCESS_DATA_R;
10705 IEM_MC_BEGIN(3, 2);
10706 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10707 IEM_MC_ARG(uint64_t, u64Src, 1);
10708 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10710
10711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10712 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10713 if (pImpl->pfnLockedU64)
10714 IEMOP_HLP_DONE_DECODING();
10715 else
10716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10717 IEM_MC_ASSIGN(u64Src, u64Imm);
10718 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10719 IEM_MC_FETCH_EFLAGS(EFlags);
10720 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10722 else
10723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10724
10725 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10726 IEM_MC_COMMIT_EFLAGS(EFlags);
10727 IEM_MC_ADVANCE_RIP();
10728 IEM_MC_END();
10729 }
10730 break;
10731 }
10732 }
10733 return VINF_SUCCESS;
10734}
10735
10736
10737/** Opcode 0x82. */
10738FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10739{
10740 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10741 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10742}
10743
10744
10745/** Opcode 0x83. */
10746FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10747{
10748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10749 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10750 {
10751 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10752 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10753 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10754 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10755 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10756 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10757 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10758 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10759 }
10760 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10761 to the 386 even if absent in the intel reference manuals and some
10762 3rd party opcode listings. */
10763 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10764
10765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10766 {
10767 /*
10768 * Register target
10769 */
10770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10771 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10772 switch (pVCpu->iem.s.enmEffOpSize)
10773 {
10774 case IEMMODE_16BIT:
10775 {
10776 IEM_MC_BEGIN(3, 0);
10777 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10778 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10779 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10780
10781 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10782 IEM_MC_REF_EFLAGS(pEFlags);
10783 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10784
10785 IEM_MC_ADVANCE_RIP();
10786 IEM_MC_END();
10787 break;
10788 }
10789
10790 case IEMMODE_32BIT:
10791 {
10792 IEM_MC_BEGIN(3, 0);
10793 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10794 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10795 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10796
10797 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10798 IEM_MC_REF_EFLAGS(pEFlags);
10799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10800 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10801
10802 IEM_MC_ADVANCE_RIP();
10803 IEM_MC_END();
10804 break;
10805 }
10806
10807 case IEMMODE_64BIT:
10808 {
10809 IEM_MC_BEGIN(3, 0);
10810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10811 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10812 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10813
10814 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10815 IEM_MC_REF_EFLAGS(pEFlags);
10816 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10817
10818 IEM_MC_ADVANCE_RIP();
10819 IEM_MC_END();
10820 break;
10821 }
10822 }
10823 }
10824 else
10825 {
10826 /*
10827 * Memory target.
10828 */
10829 uint32_t fAccess;
10830 if (pImpl->pfnLockedU16)
10831 fAccess = IEM_ACCESS_DATA_RW;
10832 else /* CMP */
10833 fAccess = IEM_ACCESS_DATA_R;
10834
10835 switch (pVCpu->iem.s.enmEffOpSize)
10836 {
10837 case IEMMODE_16BIT:
10838 {
10839 IEM_MC_BEGIN(3, 2);
10840 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10841 IEM_MC_ARG(uint16_t, u16Src, 1);
10842 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10844
10845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10846 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10847 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10848 if (pImpl->pfnLockedU16)
10849 IEMOP_HLP_DONE_DECODING();
10850 else
10851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10852 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10853 IEM_MC_FETCH_EFLAGS(EFlags);
10854 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10855 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10856 else
10857 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10858
10859 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10860 IEM_MC_COMMIT_EFLAGS(EFlags);
10861 IEM_MC_ADVANCE_RIP();
10862 IEM_MC_END();
10863 break;
10864 }
10865
10866 case IEMMODE_32BIT:
10867 {
10868 IEM_MC_BEGIN(3, 2);
10869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10870 IEM_MC_ARG(uint32_t, u32Src, 1);
10871 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10873
10874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10875 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10876 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10877 if (pImpl->pfnLockedU32)
10878 IEMOP_HLP_DONE_DECODING();
10879 else
10880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10881 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10882 IEM_MC_FETCH_EFLAGS(EFlags);
10883 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10884 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10885 else
10886 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10887
10888 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10889 IEM_MC_COMMIT_EFLAGS(EFlags);
10890 IEM_MC_ADVANCE_RIP();
10891 IEM_MC_END();
10892 break;
10893 }
10894
10895 case IEMMODE_64BIT:
10896 {
10897 IEM_MC_BEGIN(3, 2);
10898 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10899 IEM_MC_ARG(uint64_t, u64Src, 1);
10900 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10902
10903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10904 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10905 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10906 if (pImpl->pfnLockedU64)
10907 IEMOP_HLP_DONE_DECODING();
10908 else
10909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10910 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10911 IEM_MC_FETCH_EFLAGS(EFlags);
10912 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10913 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10914 else
10915 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10916
10917 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10918 IEM_MC_COMMIT_EFLAGS(EFlags);
10919 IEM_MC_ADVANCE_RIP();
10920 IEM_MC_END();
10921 break;
10922 }
10923 }
10924 }
10925 return VINF_SUCCESS;
10926}
10927
10928
10929/** Opcode 0x84. */
10930FNIEMOP_DEF(iemOp_test_Eb_Gb)
10931{
10932 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
10933 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10934 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10935}
10936
10937
10938/** Opcode 0x85. */
10939FNIEMOP_DEF(iemOp_test_Ev_Gv)
10940{
10941 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
10942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10943 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10944}
10945
10946
10947/** Opcode 0x86. */
10948FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10949{
10950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10951 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
10952
10953 /*
10954 * If rm is denoting a register, no more instruction bytes.
10955 */
10956 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10957 {
10958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10959
10960 IEM_MC_BEGIN(0, 2);
10961 IEM_MC_LOCAL(uint8_t, uTmp1);
10962 IEM_MC_LOCAL(uint8_t, uTmp2);
10963
10964 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10965 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10966 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10967 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10968
10969 IEM_MC_ADVANCE_RIP();
10970 IEM_MC_END();
10971 }
10972 else
10973 {
10974 /*
10975 * We're accessing memory.
10976 */
10977/** @todo the register must be committed separately! */
10978 IEM_MC_BEGIN(2, 2);
10979 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10980 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10982
10983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10984 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10985 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10986 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10988
10989 IEM_MC_ADVANCE_RIP();
10990 IEM_MC_END();
10991 }
10992 return VINF_SUCCESS;
10993}
10994
10995
10996/** Opcode 0x87. */
10997FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10998{
10999 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11001
11002 /*
11003 * If rm is denoting a register, no more instruction bytes.
11004 */
11005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11006 {
11007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11008
11009 switch (pVCpu->iem.s.enmEffOpSize)
11010 {
11011 case IEMMODE_16BIT:
11012 IEM_MC_BEGIN(0, 2);
11013 IEM_MC_LOCAL(uint16_t, uTmp1);
11014 IEM_MC_LOCAL(uint16_t, uTmp2);
11015
11016 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11017 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11018 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11019 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11020
11021 IEM_MC_ADVANCE_RIP();
11022 IEM_MC_END();
11023 return VINF_SUCCESS;
11024
11025 case IEMMODE_32BIT:
11026 IEM_MC_BEGIN(0, 2);
11027 IEM_MC_LOCAL(uint32_t, uTmp1);
11028 IEM_MC_LOCAL(uint32_t, uTmp2);
11029
11030 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11031 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11032 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11033 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11034
11035 IEM_MC_ADVANCE_RIP();
11036 IEM_MC_END();
11037 return VINF_SUCCESS;
11038
11039 case IEMMODE_64BIT:
11040 IEM_MC_BEGIN(0, 2);
11041 IEM_MC_LOCAL(uint64_t, uTmp1);
11042 IEM_MC_LOCAL(uint64_t, uTmp2);
11043
11044 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11045 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11046 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11047 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11048
11049 IEM_MC_ADVANCE_RIP();
11050 IEM_MC_END();
11051 return VINF_SUCCESS;
11052
11053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11054 }
11055 }
11056 else
11057 {
11058 /*
11059 * We're accessing memory.
11060 */
11061 switch (pVCpu->iem.s.enmEffOpSize)
11062 {
11063/** @todo the register must be committed separately! */
11064 case IEMMODE_16BIT:
11065 IEM_MC_BEGIN(2, 2);
11066 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11067 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11069
11070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11071 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11072 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11073 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11074 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11075
11076 IEM_MC_ADVANCE_RIP();
11077 IEM_MC_END();
11078 return VINF_SUCCESS;
11079
11080 case IEMMODE_32BIT:
11081 IEM_MC_BEGIN(2, 2);
11082 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11083 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11085
11086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11087 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11088 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11089 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11090 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11091
11092 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11093 IEM_MC_ADVANCE_RIP();
11094 IEM_MC_END();
11095 return VINF_SUCCESS;
11096
11097 case IEMMODE_64BIT:
11098 IEM_MC_BEGIN(2, 2);
11099 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11100 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11102
11103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11104 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11105 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11106 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11107 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11108
11109 IEM_MC_ADVANCE_RIP();
11110 IEM_MC_END();
11111 return VINF_SUCCESS;
11112
11113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11114 }
11115 }
11116}
11117
11118
11119/** Opcode 0x88. */
11120FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11121{
11122 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11123
11124 uint8_t bRm;
11125 IEM_OPCODE_GET_NEXT_U8(&bRm);
11126
11127 /*
11128 * If rm is denoting a register, no more instruction bytes.
11129 */
11130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11131 {
11132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11133 IEM_MC_BEGIN(0, 1);
11134 IEM_MC_LOCAL(uint8_t, u8Value);
11135 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11136 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11137 IEM_MC_ADVANCE_RIP();
11138 IEM_MC_END();
11139 }
11140 else
11141 {
11142 /*
11143 * We're writing a register to memory.
11144 */
11145 IEM_MC_BEGIN(0, 2);
11146 IEM_MC_LOCAL(uint8_t, u8Value);
11147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11150 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11151 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11152 IEM_MC_ADVANCE_RIP();
11153 IEM_MC_END();
11154 }
11155 return VINF_SUCCESS;
11156
11157}
11158
11159
11160/** Opcode 0x89. */
11161FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11162{
11163 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11164
11165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11166
11167 /*
11168 * If rm is denoting a register, no more instruction bytes.
11169 */
11170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11171 {
11172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11173 switch (pVCpu->iem.s.enmEffOpSize)
11174 {
11175 case IEMMODE_16BIT:
11176 IEM_MC_BEGIN(0, 1);
11177 IEM_MC_LOCAL(uint16_t, u16Value);
11178 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11179 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11180 IEM_MC_ADVANCE_RIP();
11181 IEM_MC_END();
11182 break;
11183
11184 case IEMMODE_32BIT:
11185 IEM_MC_BEGIN(0, 1);
11186 IEM_MC_LOCAL(uint32_t, u32Value);
11187 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11188 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11189 IEM_MC_ADVANCE_RIP();
11190 IEM_MC_END();
11191 break;
11192
11193 case IEMMODE_64BIT:
11194 IEM_MC_BEGIN(0, 1);
11195 IEM_MC_LOCAL(uint64_t, u64Value);
11196 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11197 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11198 IEM_MC_ADVANCE_RIP();
11199 IEM_MC_END();
11200 break;
11201 }
11202 }
11203 else
11204 {
11205 /*
11206 * We're writing a register to memory.
11207 */
11208 switch (pVCpu->iem.s.enmEffOpSize)
11209 {
11210 case IEMMODE_16BIT:
11211 IEM_MC_BEGIN(0, 2);
11212 IEM_MC_LOCAL(uint16_t, u16Value);
11213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11216 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11217 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11218 IEM_MC_ADVANCE_RIP();
11219 IEM_MC_END();
11220 break;
11221
11222 case IEMMODE_32BIT:
11223 IEM_MC_BEGIN(0, 2);
11224 IEM_MC_LOCAL(uint32_t, u32Value);
11225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11228 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11229 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11230 IEM_MC_ADVANCE_RIP();
11231 IEM_MC_END();
11232 break;
11233
11234 case IEMMODE_64BIT:
11235 IEM_MC_BEGIN(0, 2);
11236 IEM_MC_LOCAL(uint64_t, u64Value);
11237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11240 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11241 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11242 IEM_MC_ADVANCE_RIP();
11243 IEM_MC_END();
11244 break;
11245 }
11246 }
11247 return VINF_SUCCESS;
11248}
11249
11250
11251/** Opcode 0x8a. */
11252FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11253{
11254 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11255
11256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11257
11258 /*
11259 * If rm is denoting a register, no more instruction bytes.
11260 */
11261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11262 {
11263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11264 IEM_MC_BEGIN(0, 1);
11265 IEM_MC_LOCAL(uint8_t, u8Value);
11266 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11267 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11268 IEM_MC_ADVANCE_RIP();
11269 IEM_MC_END();
11270 }
11271 else
11272 {
11273 /*
11274 * We're loading a register from memory.
11275 */
11276 IEM_MC_BEGIN(0, 2);
11277 IEM_MC_LOCAL(uint8_t, u8Value);
11278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11281 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11282 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11283 IEM_MC_ADVANCE_RIP();
11284 IEM_MC_END();
11285 }
11286 return VINF_SUCCESS;
11287}
11288
11289
11290/** Opcode 0x8b. */
11291FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11292{
11293 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11294
11295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11296
11297 /*
11298 * If rm is denoting a register, no more instruction bytes.
11299 */
11300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11301 {
11302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11303 switch (pVCpu->iem.s.enmEffOpSize)
11304 {
11305 case IEMMODE_16BIT:
11306 IEM_MC_BEGIN(0, 1);
11307 IEM_MC_LOCAL(uint16_t, u16Value);
11308 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11309 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11310 IEM_MC_ADVANCE_RIP();
11311 IEM_MC_END();
11312 break;
11313
11314 case IEMMODE_32BIT:
11315 IEM_MC_BEGIN(0, 1);
11316 IEM_MC_LOCAL(uint32_t, u32Value);
11317 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11318 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11319 IEM_MC_ADVANCE_RIP();
11320 IEM_MC_END();
11321 break;
11322
11323 case IEMMODE_64BIT:
11324 IEM_MC_BEGIN(0, 1);
11325 IEM_MC_LOCAL(uint64_t, u64Value);
11326 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11327 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11328 IEM_MC_ADVANCE_RIP();
11329 IEM_MC_END();
11330 break;
11331 }
11332 }
11333 else
11334 {
11335 /*
11336 * We're loading a register from memory.
11337 */
11338 switch (pVCpu->iem.s.enmEffOpSize)
11339 {
11340 case IEMMODE_16BIT:
11341 IEM_MC_BEGIN(0, 2);
11342 IEM_MC_LOCAL(uint16_t, u16Value);
11343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11346 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11347 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11348 IEM_MC_ADVANCE_RIP();
11349 IEM_MC_END();
11350 break;
11351
11352 case IEMMODE_32BIT:
11353 IEM_MC_BEGIN(0, 2);
11354 IEM_MC_LOCAL(uint32_t, u32Value);
11355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11358 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11359 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11360 IEM_MC_ADVANCE_RIP();
11361 IEM_MC_END();
11362 break;
11363
11364 case IEMMODE_64BIT:
11365 IEM_MC_BEGIN(0, 2);
11366 IEM_MC_LOCAL(uint64_t, u64Value);
11367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11370 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11371 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11372 IEM_MC_ADVANCE_RIP();
11373 IEM_MC_END();
11374 break;
11375 }
11376 }
11377 return VINF_SUCCESS;
11378}
11379
11380
11381/** Opcode 0x63. */
11382FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11383{
11384 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11385 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11386 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11387 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11388 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11389}
11390
11391
11392/** Opcode 0x8c. */
11393FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11394{
11395 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11396
11397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11398
11399 /*
11400 * Check that the destination register exists. The REX.R prefix is ignored.
11401 */
11402 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11403 if ( iSegReg > X86_SREG_GS)
11404 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11405
11406 /*
11407 * If rm is denoting a register, no more instruction bytes.
11408 * In that case, the operand size is respected and the upper bits are
11409 * cleared (starting with some pentium).
11410 */
11411 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11412 {
11413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11414 switch (pVCpu->iem.s.enmEffOpSize)
11415 {
11416 case IEMMODE_16BIT:
11417 IEM_MC_BEGIN(0, 1);
11418 IEM_MC_LOCAL(uint16_t, u16Value);
11419 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11420 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11421 IEM_MC_ADVANCE_RIP();
11422 IEM_MC_END();
11423 break;
11424
11425 case IEMMODE_32BIT:
11426 IEM_MC_BEGIN(0, 1);
11427 IEM_MC_LOCAL(uint32_t, u32Value);
11428 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11429 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11430 IEM_MC_ADVANCE_RIP();
11431 IEM_MC_END();
11432 break;
11433
11434 case IEMMODE_64BIT:
11435 IEM_MC_BEGIN(0, 1);
11436 IEM_MC_LOCAL(uint64_t, u64Value);
11437 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11438 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11439 IEM_MC_ADVANCE_RIP();
11440 IEM_MC_END();
11441 break;
11442 }
11443 }
11444 else
11445 {
11446 /*
11447 * We're saving the register to memory. The access is word sized
11448 * regardless of operand size prefixes.
11449 */
11450#if 0 /* not necessary */
11451 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11452#endif
11453 IEM_MC_BEGIN(0, 2);
11454 IEM_MC_LOCAL(uint16_t, u16Value);
11455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11458 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11459 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11460 IEM_MC_ADVANCE_RIP();
11461 IEM_MC_END();
11462 }
11463 return VINF_SUCCESS;
11464}
11465
11466
11467
11468
11469/** Opcode 0x8d. */
11470FNIEMOP_DEF(iemOp_lea_Gv_M)
11471{
11472 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11475 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11476
11477 switch (pVCpu->iem.s.enmEffOpSize)
11478 {
11479 case IEMMODE_16BIT:
11480 IEM_MC_BEGIN(0, 2);
11481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11482 IEM_MC_LOCAL(uint16_t, u16Cast);
11483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11485 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11486 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11487 IEM_MC_ADVANCE_RIP();
11488 IEM_MC_END();
11489 return VINF_SUCCESS;
11490
11491 case IEMMODE_32BIT:
11492 IEM_MC_BEGIN(0, 2);
11493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11494 IEM_MC_LOCAL(uint32_t, u32Cast);
11495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11497 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11498 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11499 IEM_MC_ADVANCE_RIP();
11500 IEM_MC_END();
11501 return VINF_SUCCESS;
11502
11503 case IEMMODE_64BIT:
11504 IEM_MC_BEGIN(0, 1);
11505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11508 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11509 IEM_MC_ADVANCE_RIP();
11510 IEM_MC_END();
11511 return VINF_SUCCESS;
11512 }
11513 AssertFailedReturn(VERR_IEM_IPE_7);
11514}
11515
11516
11517/** Opcode 0x8e. */
11518FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11519{
11520 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11521
11522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11523
11524 /*
11525 * The practical operand size is 16-bit.
11526 */
11527#if 0 /* not necessary */
11528 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11529#endif
11530
11531 /*
11532 * Check that the destination register exists and can be used with this
11533 * instruction. The REX.R prefix is ignored.
11534 */
11535 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11536 if ( iSegReg == X86_SREG_CS
11537 || iSegReg > X86_SREG_GS)
11538 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11539
11540 /*
11541 * If rm is denoting a register, no more instruction bytes.
11542 */
11543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11544 {
11545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11546 IEM_MC_BEGIN(2, 0);
11547 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11548 IEM_MC_ARG(uint16_t, u16Value, 1);
11549 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11550 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11551 IEM_MC_END();
11552 }
11553 else
11554 {
11555 /*
11556 * We're loading the register from memory. The access is word sized
11557 * regardless of operand size prefixes.
11558 */
11559 IEM_MC_BEGIN(2, 1);
11560 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11561 IEM_MC_ARG(uint16_t, u16Value, 1);
11562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11565 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11566 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11567 IEM_MC_END();
11568 }
11569 return VINF_SUCCESS;
11570}
11571
11572
11573/** Opcode 0x8f /0. */
11574FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11575{
11576 /* This bugger is rather annoying as it requires rSP to be updated before
11577 doing the effective address calculations. Will eventually require a
11578 split between the R/M+SIB decoding and the effective address
11579 calculation - which is something that is required for any attempt at
11580 reusing this code for a recompiler. It may also be good to have if we
11581 need to delay #UD exception caused by invalid lock prefixes.
11582
11583 For now, we'll do a mostly safe interpreter-only implementation here. */
11584 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11585 * now until tests show it's checked.. */
11586 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11587
11588 /* Register access is relatively easy and can share code. */
11589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11590 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11591
11592 /*
11593 * Memory target.
11594 *
11595 * Intel says that RSP is incremented before it's used in any effective
11596 * address calcuations. This means some serious extra annoyance here since
11597 * we decode and calculate the effective address in one step and like to
11598 * delay committing registers till everything is done.
11599 *
11600 * So, we'll decode and calculate the effective address twice. This will
11601 * require some recoding if turned into a recompiler.
11602 */
11603 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11604
11605#ifndef TST_IEM_CHECK_MC
11606 /* Calc effective address with modified ESP. */
11607/** @todo testcase */
11608 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11609 RTGCPTR GCPtrEff;
11610 VBOXSTRICTRC rcStrict;
11611 switch (pVCpu->iem.s.enmEffOpSize)
11612 {
11613 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11614 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11615 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11617 }
11618 if (rcStrict != VINF_SUCCESS)
11619 return rcStrict;
11620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11621
11622 /* Perform the operation - this should be CImpl. */
11623 RTUINT64U TmpRsp;
11624 TmpRsp.u = pCtx->rsp;
11625 switch (pVCpu->iem.s.enmEffOpSize)
11626 {
11627 case IEMMODE_16BIT:
11628 {
11629 uint16_t u16Value;
11630 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11631 if (rcStrict == VINF_SUCCESS)
11632 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11633 break;
11634 }
11635
11636 case IEMMODE_32BIT:
11637 {
11638 uint32_t u32Value;
11639 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11640 if (rcStrict == VINF_SUCCESS)
11641 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11642 break;
11643 }
11644
11645 case IEMMODE_64BIT:
11646 {
11647 uint64_t u64Value;
11648 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11649 if (rcStrict == VINF_SUCCESS)
11650 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11651 break;
11652 }
11653
11654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11655 }
11656 if (rcStrict == VINF_SUCCESS)
11657 {
11658 pCtx->rsp = TmpRsp.u;
11659 iemRegUpdateRipAndClearRF(pVCpu);
11660 }
11661 return rcStrict;
11662
11663#else
11664 return VERR_IEM_IPE_2;
11665#endif
11666}
11667
11668
11669/** Opcode 0x8f. */
11670FNIEMOP_DEF(iemOp_Grp1A)
11671{
11672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11673 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11674 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11675
11676 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11677 /** @todo XOP decoding. */
11678 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11679 return IEMOP_RAISE_INVALID_OPCODE();
11680}
11681
11682
11683/**
11684 * Common 'xchg reg,rAX' helper.
11685 */
11686FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11687{
11688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11689
11690 iReg |= pVCpu->iem.s.uRexB;
11691 switch (pVCpu->iem.s.enmEffOpSize)
11692 {
11693 case IEMMODE_16BIT:
11694 IEM_MC_BEGIN(0, 2);
11695 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11696 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11697 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11698 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11699 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11700 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11701 IEM_MC_ADVANCE_RIP();
11702 IEM_MC_END();
11703 return VINF_SUCCESS;
11704
11705 case IEMMODE_32BIT:
11706 IEM_MC_BEGIN(0, 2);
11707 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11708 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11709 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11710 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11711 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11712 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11713 IEM_MC_ADVANCE_RIP();
11714 IEM_MC_END();
11715 return VINF_SUCCESS;
11716
11717 case IEMMODE_64BIT:
11718 IEM_MC_BEGIN(0, 2);
11719 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11720 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11721 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11722 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11723 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11724 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11725 IEM_MC_ADVANCE_RIP();
11726 IEM_MC_END();
11727 return VINF_SUCCESS;
11728
11729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11730 }
11731}
11732
11733
11734/** Opcode 0x90. */
11735FNIEMOP_DEF(iemOp_nop)
11736{
11737 /* R8/R8D and RAX/EAX can be exchanged. */
11738 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11739 {
11740 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11741 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11742 }
11743
11744 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11745 IEMOP_MNEMONIC(pause, "pause");
11746 else
11747 IEMOP_MNEMONIC(nop, "nop");
11748 IEM_MC_BEGIN(0, 0);
11749 IEM_MC_ADVANCE_RIP();
11750 IEM_MC_END();
11751 return VINF_SUCCESS;
11752}
11753
11754
11755/** Opcode 0x91. */
11756FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11757{
11758 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11759 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11760}
11761
11762
11763/** Opcode 0x92. */
11764FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11765{
11766 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11767 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11768}
11769
11770
11771/** Opcode 0x93. */
11772FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11773{
11774 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11775 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11776}
11777
11778
11779/** Opcode 0x94. */
11780FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11781{
11782 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11783 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11784}
11785
11786
11787/** Opcode 0x95. */
11788FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11789{
11790 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11791 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11792}
11793
11794
11795/** Opcode 0x96. */
11796FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11797{
11798 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11799 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11800}
11801
11802
11803/** Opcode 0x97. */
11804FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11805{
11806 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11807 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11808}
11809
11810
11811/** Opcode 0x98. */
11812FNIEMOP_DEF(iemOp_cbw)
11813{
11814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11815 switch (pVCpu->iem.s.enmEffOpSize)
11816 {
11817 case IEMMODE_16BIT:
11818 IEMOP_MNEMONIC(cbw, "cbw");
11819 IEM_MC_BEGIN(0, 1);
11820 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11821 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11822 } IEM_MC_ELSE() {
11823 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11824 } IEM_MC_ENDIF();
11825 IEM_MC_ADVANCE_RIP();
11826 IEM_MC_END();
11827 return VINF_SUCCESS;
11828
11829 case IEMMODE_32BIT:
11830 IEMOP_MNEMONIC(cwde, "cwde");
11831 IEM_MC_BEGIN(0, 1);
11832 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11833 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11834 } IEM_MC_ELSE() {
11835 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11836 } IEM_MC_ENDIF();
11837 IEM_MC_ADVANCE_RIP();
11838 IEM_MC_END();
11839 return VINF_SUCCESS;
11840
11841 case IEMMODE_64BIT:
11842 IEMOP_MNEMONIC(cdqe, "cdqe");
11843 IEM_MC_BEGIN(0, 1);
11844 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11845 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11846 } IEM_MC_ELSE() {
11847 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11848 } IEM_MC_ENDIF();
11849 IEM_MC_ADVANCE_RIP();
11850 IEM_MC_END();
11851 return VINF_SUCCESS;
11852
11853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11854 }
11855}
11856
11857
11858/** Opcode 0x99. */
11859FNIEMOP_DEF(iemOp_cwd)
11860{
11861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11862 switch (pVCpu->iem.s.enmEffOpSize)
11863 {
11864 case IEMMODE_16BIT:
11865 IEMOP_MNEMONIC(cwd, "cwd");
11866 IEM_MC_BEGIN(0, 1);
11867 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11868 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11869 } IEM_MC_ELSE() {
11870 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11871 } IEM_MC_ENDIF();
11872 IEM_MC_ADVANCE_RIP();
11873 IEM_MC_END();
11874 return VINF_SUCCESS;
11875
11876 case IEMMODE_32BIT:
11877 IEMOP_MNEMONIC(cdq, "cdq");
11878 IEM_MC_BEGIN(0, 1);
11879 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11880 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11881 } IEM_MC_ELSE() {
11882 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11883 } IEM_MC_ENDIF();
11884 IEM_MC_ADVANCE_RIP();
11885 IEM_MC_END();
11886 return VINF_SUCCESS;
11887
11888 case IEMMODE_64BIT:
11889 IEMOP_MNEMONIC(cqo, "cqo");
11890 IEM_MC_BEGIN(0, 1);
11891 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11892 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11893 } IEM_MC_ELSE() {
11894 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11895 } IEM_MC_ENDIF();
11896 IEM_MC_ADVANCE_RIP();
11897 IEM_MC_END();
11898 return VINF_SUCCESS;
11899
11900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11901 }
11902}
11903
11904
11905/** Opcode 0x9a. */
11906FNIEMOP_DEF(iemOp_call_Ap)
11907{
11908 IEMOP_MNEMONIC(call_Ap, "call Ap");
11909 IEMOP_HLP_NO_64BIT();
11910
11911 /* Decode the far pointer address and pass it on to the far call C implementation. */
11912 uint32_t offSeg;
11913 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11914 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11915 else
11916 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11917 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11919 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11920}
11921
11922
11923/** Opcode 0x9b. (aka fwait) */
11924FNIEMOP_DEF(iemOp_wait)
11925{
11926 IEMOP_MNEMONIC(wait, "wait");
11927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11928
11929 IEM_MC_BEGIN(0, 0);
11930 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
11931 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11932 IEM_MC_ADVANCE_RIP();
11933 IEM_MC_END();
11934 return VINF_SUCCESS;
11935}
11936
11937
11938/** Opcode 0x9c. */
11939FNIEMOP_DEF(iemOp_pushf_Fv)
11940{
11941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11942 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11943 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11944}
11945
11946
11947/** Opcode 0x9d. */
11948FNIEMOP_DEF(iemOp_popf_Fv)
11949{
11950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11952 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11953}
11954
11955
11956/** Opcode 0x9e. */
11957FNIEMOP_DEF(iemOp_sahf)
11958{
11959 IEMOP_MNEMONIC(sahf, "sahf");
11960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11961 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11962 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11963 return IEMOP_RAISE_INVALID_OPCODE();
11964 IEM_MC_BEGIN(0, 2);
11965 IEM_MC_LOCAL(uint32_t, u32Flags);
11966 IEM_MC_LOCAL(uint32_t, EFlags);
11967 IEM_MC_FETCH_EFLAGS(EFlags);
11968 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11969 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11970 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11971 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11972 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11973 IEM_MC_COMMIT_EFLAGS(EFlags);
11974 IEM_MC_ADVANCE_RIP();
11975 IEM_MC_END();
11976 return VINF_SUCCESS;
11977}
11978
11979
11980/** Opcode 0x9f. */
11981FNIEMOP_DEF(iemOp_lahf)
11982{
11983 IEMOP_MNEMONIC(lahf, "lahf");
11984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11985 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11986 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11987 return IEMOP_RAISE_INVALID_OPCODE();
11988 IEM_MC_BEGIN(0, 1);
11989 IEM_MC_LOCAL(uint8_t, u8Flags);
11990 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11991 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11992 IEM_MC_ADVANCE_RIP();
11993 IEM_MC_END();
11994 return VINF_SUCCESS;
11995}
11996
11997
11998/**
11999 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12000 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12001 * prefixes. Will return on failures.
12002 * @param a_GCPtrMemOff The variable to store the offset in.
12003 */
12004#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12005 do \
12006 { \
12007 switch (pVCpu->iem.s.enmEffAddrMode) \
12008 { \
12009 case IEMMODE_16BIT: \
12010 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12011 break; \
12012 case IEMMODE_32BIT: \
12013 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12014 break; \
12015 case IEMMODE_64BIT: \
12016 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12017 break; \
12018 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12019 } \
12020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12021 } while (0)
12022
12023/** Opcode 0xa0. */
12024FNIEMOP_DEF(iemOp_mov_Al_Ob)
12025{
12026 /*
12027 * Get the offset and fend of lock prefixes.
12028 */
12029 RTGCPTR GCPtrMemOff;
12030 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12031
12032 /*
12033 * Fetch AL.
12034 */
12035 IEM_MC_BEGIN(0,1);
12036 IEM_MC_LOCAL(uint8_t, u8Tmp);
12037 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12038 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12039 IEM_MC_ADVANCE_RIP();
12040 IEM_MC_END();
12041 return VINF_SUCCESS;
12042}
12043
12044
12045/** Opcode 0xa1. */
12046FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12047{
12048 /*
12049 * Get the offset and fend of lock prefixes.
12050 */
12051 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12052 RTGCPTR GCPtrMemOff;
12053 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12054
12055 /*
12056 * Fetch rAX.
12057 */
12058 switch (pVCpu->iem.s.enmEffOpSize)
12059 {
12060 case IEMMODE_16BIT:
12061 IEM_MC_BEGIN(0,1);
12062 IEM_MC_LOCAL(uint16_t, u16Tmp);
12063 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12064 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12065 IEM_MC_ADVANCE_RIP();
12066 IEM_MC_END();
12067 return VINF_SUCCESS;
12068
12069 case IEMMODE_32BIT:
12070 IEM_MC_BEGIN(0,1);
12071 IEM_MC_LOCAL(uint32_t, u32Tmp);
12072 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12073 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12074 IEM_MC_ADVANCE_RIP();
12075 IEM_MC_END();
12076 return VINF_SUCCESS;
12077
12078 case IEMMODE_64BIT:
12079 IEM_MC_BEGIN(0,1);
12080 IEM_MC_LOCAL(uint64_t, u64Tmp);
12081 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12082 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12083 IEM_MC_ADVANCE_RIP();
12084 IEM_MC_END();
12085 return VINF_SUCCESS;
12086
12087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12088 }
12089}
12090
12091
12092/** Opcode 0xa2. */
12093FNIEMOP_DEF(iemOp_mov_Ob_AL)
12094{
12095 /*
12096 * Get the offset and fend of lock prefixes.
12097 */
12098 RTGCPTR GCPtrMemOff;
12099 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12100
12101 /*
12102 * Store AL.
12103 */
12104 IEM_MC_BEGIN(0,1);
12105 IEM_MC_LOCAL(uint8_t, u8Tmp);
12106 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12107 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12108 IEM_MC_ADVANCE_RIP();
12109 IEM_MC_END();
12110 return VINF_SUCCESS;
12111}
12112
12113
12114/** Opcode 0xa3. */
12115FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12116{
12117 /*
12118 * Get the offset and fend of lock prefixes.
12119 */
12120 RTGCPTR GCPtrMemOff;
12121 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12122
12123 /*
12124 * Store rAX.
12125 */
12126 switch (pVCpu->iem.s.enmEffOpSize)
12127 {
12128 case IEMMODE_16BIT:
12129 IEM_MC_BEGIN(0,1);
12130 IEM_MC_LOCAL(uint16_t, u16Tmp);
12131 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12132 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12133 IEM_MC_ADVANCE_RIP();
12134 IEM_MC_END();
12135 return VINF_SUCCESS;
12136
12137 case IEMMODE_32BIT:
12138 IEM_MC_BEGIN(0,1);
12139 IEM_MC_LOCAL(uint32_t, u32Tmp);
12140 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12141 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12142 IEM_MC_ADVANCE_RIP();
12143 IEM_MC_END();
12144 return VINF_SUCCESS;
12145
12146 case IEMMODE_64BIT:
12147 IEM_MC_BEGIN(0,1);
12148 IEM_MC_LOCAL(uint64_t, u64Tmp);
12149 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12150 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12151 IEM_MC_ADVANCE_RIP();
12152 IEM_MC_END();
12153 return VINF_SUCCESS;
12154
12155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12156 }
12157}
12158
12159/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12160#define IEM_MOVS_CASE(ValBits, AddrBits) \
12161 IEM_MC_BEGIN(0, 2); \
12162 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12163 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12164 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12165 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12166 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12167 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12169 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12170 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12171 } IEM_MC_ELSE() { \
12172 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12173 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12174 } IEM_MC_ENDIF(); \
12175 IEM_MC_ADVANCE_RIP(); \
12176 IEM_MC_END();
12177
12178/** Opcode 0xa4. */
12179FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12180{
12181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12182
12183 /*
12184 * Use the C implementation if a repeat prefix is encountered.
12185 */
12186 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12187 {
12188 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12189 switch (pVCpu->iem.s.enmEffAddrMode)
12190 {
12191 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12192 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12193 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12195 }
12196 }
12197 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12198
12199 /*
12200 * Sharing case implementation with movs[wdq] below.
12201 */
12202 switch (pVCpu->iem.s.enmEffAddrMode)
12203 {
12204 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12205 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12206 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12208 }
12209 return VINF_SUCCESS;
12210}
12211
12212
12213/** Opcode 0xa5. */
12214FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12215{
12216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12217
12218 /*
12219 * Use the C implementation if a repeat prefix is encountered.
12220 */
12221 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12222 {
12223 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12224 switch (pVCpu->iem.s.enmEffOpSize)
12225 {
12226 case IEMMODE_16BIT:
12227 switch (pVCpu->iem.s.enmEffAddrMode)
12228 {
12229 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12230 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12231 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12233 }
12234 break;
12235 case IEMMODE_32BIT:
12236 switch (pVCpu->iem.s.enmEffAddrMode)
12237 {
12238 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12239 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12240 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12242 }
12243 case IEMMODE_64BIT:
12244 switch (pVCpu->iem.s.enmEffAddrMode)
12245 {
12246 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12247 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12248 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12250 }
12251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12252 }
12253 }
12254 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12255
12256 /*
12257 * Annoying double switch here.
12258 * Using ugly macro for implementing the cases, sharing it with movsb.
12259 */
12260 switch (pVCpu->iem.s.enmEffOpSize)
12261 {
12262 case IEMMODE_16BIT:
12263 switch (pVCpu->iem.s.enmEffAddrMode)
12264 {
12265 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12266 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12267 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12269 }
12270 break;
12271
12272 case IEMMODE_32BIT:
12273 switch (pVCpu->iem.s.enmEffAddrMode)
12274 {
12275 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12276 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12277 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12279 }
12280 break;
12281
12282 case IEMMODE_64BIT:
12283 switch (pVCpu->iem.s.enmEffAddrMode)
12284 {
12285 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12286 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12287 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12289 }
12290 break;
12291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12292 }
12293 return VINF_SUCCESS;
12294}
12295
12296#undef IEM_MOVS_CASE
12297
12298/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12299#define IEM_CMPS_CASE(ValBits, AddrBits) \
12300 IEM_MC_BEGIN(3, 3); \
12301 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12302 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12303 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12304 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12305 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12306 \
12307 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12308 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12309 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12310 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12311 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12312 IEM_MC_REF_EFLAGS(pEFlags); \
12313 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12314 \
12315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12316 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12317 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12318 } IEM_MC_ELSE() { \
12319 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12320 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12321 } IEM_MC_ENDIF(); \
12322 IEM_MC_ADVANCE_RIP(); \
12323 IEM_MC_END(); \
12324
12325/** Opcode 0xa6. */
12326FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12327{
12328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12329
12330 /*
12331 * Use the C implementation if a repeat prefix is encountered.
12332 */
12333 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12334 {
12335 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12336 switch (pVCpu->iem.s.enmEffAddrMode)
12337 {
12338 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12339 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12340 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12342 }
12343 }
12344 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12345 {
12346 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12347 switch (pVCpu->iem.s.enmEffAddrMode)
12348 {
12349 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12350 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12351 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12353 }
12354 }
12355 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12356
12357 /*
12358 * Sharing case implementation with cmps[wdq] below.
12359 */
12360 switch (pVCpu->iem.s.enmEffAddrMode)
12361 {
12362 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12363 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12364 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12366 }
12367 return VINF_SUCCESS;
12368
12369}
12370
12371
12372/** Opcode 0xa7. */
12373FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12374{
12375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12376
12377 /*
12378 * Use the C implementation if a repeat prefix is encountered.
12379 */
12380 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12381 {
12382 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12383 switch (pVCpu->iem.s.enmEffOpSize)
12384 {
12385 case IEMMODE_16BIT:
12386 switch (pVCpu->iem.s.enmEffAddrMode)
12387 {
12388 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12389 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12390 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12392 }
12393 break;
12394 case IEMMODE_32BIT:
12395 switch (pVCpu->iem.s.enmEffAddrMode)
12396 {
12397 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12398 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12399 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12401 }
12402 case IEMMODE_64BIT:
12403 switch (pVCpu->iem.s.enmEffAddrMode)
12404 {
12405 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12406 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12407 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12409 }
12410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12411 }
12412 }
12413
12414 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12415 {
12416 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12417 switch (pVCpu->iem.s.enmEffOpSize)
12418 {
12419 case IEMMODE_16BIT:
12420 switch (pVCpu->iem.s.enmEffAddrMode)
12421 {
12422 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12423 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12424 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12426 }
12427 break;
12428 case IEMMODE_32BIT:
12429 switch (pVCpu->iem.s.enmEffAddrMode)
12430 {
12431 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12432 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12433 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12435 }
12436 case IEMMODE_64BIT:
12437 switch (pVCpu->iem.s.enmEffAddrMode)
12438 {
12439 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12440 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12441 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12443 }
12444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12445 }
12446 }
12447
12448 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12449
12450 /*
12451 * Annoying double switch here.
12452 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12453 */
12454 switch (pVCpu->iem.s.enmEffOpSize)
12455 {
12456 case IEMMODE_16BIT:
12457 switch (pVCpu->iem.s.enmEffAddrMode)
12458 {
12459 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12460 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12461 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12463 }
12464 break;
12465
12466 case IEMMODE_32BIT:
12467 switch (pVCpu->iem.s.enmEffAddrMode)
12468 {
12469 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12470 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12471 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12473 }
12474 break;
12475
12476 case IEMMODE_64BIT:
12477 switch (pVCpu->iem.s.enmEffAddrMode)
12478 {
12479 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12480 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12481 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12483 }
12484 break;
12485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12486 }
12487 return VINF_SUCCESS;
12488
12489}
12490
12491#undef IEM_CMPS_CASE
12492
12493/** Opcode 0xa8. */
12494FNIEMOP_DEF(iemOp_test_AL_Ib)
12495{
12496 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12497 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12498 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12499}
12500
12501
12502/** Opcode 0xa9. */
12503FNIEMOP_DEF(iemOp_test_eAX_Iz)
12504{
12505 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12506 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12507 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12508}
12509
12510
12511/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12512#define IEM_STOS_CASE(ValBits, AddrBits) \
12513 IEM_MC_BEGIN(0, 2); \
12514 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12515 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12516 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12517 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12518 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12520 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12521 } IEM_MC_ELSE() { \
12522 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12523 } IEM_MC_ENDIF(); \
12524 IEM_MC_ADVANCE_RIP(); \
12525 IEM_MC_END(); \
12526
12527/** Opcode 0xaa. */
12528FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12529{
12530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12531
12532 /*
12533 * Use the C implementation if a repeat prefix is encountered.
12534 */
12535 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12536 {
12537 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12538 switch (pVCpu->iem.s.enmEffAddrMode)
12539 {
12540 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12541 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12542 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12544 }
12545 }
12546 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12547
12548 /*
12549 * Sharing case implementation with stos[wdq] below.
12550 */
12551 switch (pVCpu->iem.s.enmEffAddrMode)
12552 {
12553 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12554 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12555 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12557 }
12558 return VINF_SUCCESS;
12559}
12560
12561
12562/** Opcode 0xab. */
12563FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12564{
12565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12566
12567 /*
12568 * Use the C implementation if a repeat prefix is encountered.
12569 */
12570 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12571 {
12572 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12573 switch (pVCpu->iem.s.enmEffOpSize)
12574 {
12575 case IEMMODE_16BIT:
12576 switch (pVCpu->iem.s.enmEffAddrMode)
12577 {
12578 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12579 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12580 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12582 }
12583 break;
12584 case IEMMODE_32BIT:
12585 switch (pVCpu->iem.s.enmEffAddrMode)
12586 {
12587 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12588 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12589 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12591 }
12592 case IEMMODE_64BIT:
12593 switch (pVCpu->iem.s.enmEffAddrMode)
12594 {
12595 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12596 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12597 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12599 }
12600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12601 }
12602 }
12603 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12604
12605 /*
12606 * Annoying double switch here.
12607 * Using ugly macro for implementing the cases, sharing it with stosb.
12608 */
12609 switch (pVCpu->iem.s.enmEffOpSize)
12610 {
12611 case IEMMODE_16BIT:
12612 switch (pVCpu->iem.s.enmEffAddrMode)
12613 {
12614 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12615 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12616 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12618 }
12619 break;
12620
12621 case IEMMODE_32BIT:
12622 switch (pVCpu->iem.s.enmEffAddrMode)
12623 {
12624 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12625 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12626 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12628 }
12629 break;
12630
12631 case IEMMODE_64BIT:
12632 switch (pVCpu->iem.s.enmEffAddrMode)
12633 {
12634 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12635 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12636 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12638 }
12639 break;
12640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12641 }
12642 return VINF_SUCCESS;
12643}
12644
12645#undef IEM_STOS_CASE
12646
12647/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12648#define IEM_LODS_CASE(ValBits, AddrBits) \
12649 IEM_MC_BEGIN(0, 2); \
12650 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12651 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12652 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12653 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12654 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12656 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12657 } IEM_MC_ELSE() { \
12658 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12659 } IEM_MC_ENDIF(); \
12660 IEM_MC_ADVANCE_RIP(); \
12661 IEM_MC_END();
12662
12663/** Opcode 0xac. */
12664FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12665{
12666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12667
12668 /*
12669 * Use the C implementation if a repeat prefix is encountered.
12670 */
12671 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12672 {
12673 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12674 switch (pVCpu->iem.s.enmEffAddrMode)
12675 {
12676 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12680 }
12681 }
12682 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12683
12684 /*
12685 * Sharing case implementation with stos[wdq] below.
12686 */
12687 switch (pVCpu->iem.s.enmEffAddrMode)
12688 {
12689 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12690 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12691 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12693 }
12694 return VINF_SUCCESS;
12695}
12696
12697
12698/** Opcode 0xad. */
12699FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12700{
12701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12702
12703 /*
12704 * Use the C implementation if a repeat prefix is encountered.
12705 */
12706 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12707 {
12708 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12709 switch (pVCpu->iem.s.enmEffOpSize)
12710 {
12711 case IEMMODE_16BIT:
12712 switch (pVCpu->iem.s.enmEffAddrMode)
12713 {
12714 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12715 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12716 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12718 }
12719 break;
12720 case IEMMODE_32BIT:
12721 switch (pVCpu->iem.s.enmEffAddrMode)
12722 {
12723 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12724 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12725 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12727 }
12728 case IEMMODE_64BIT:
12729 switch (pVCpu->iem.s.enmEffAddrMode)
12730 {
12731 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12732 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12733 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12735 }
12736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12737 }
12738 }
12739 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12740
12741 /*
12742 * Annoying double switch here.
12743 * Using ugly macro for implementing the cases, sharing it with lodsb.
12744 */
12745 switch (pVCpu->iem.s.enmEffOpSize)
12746 {
12747 case IEMMODE_16BIT:
12748 switch (pVCpu->iem.s.enmEffAddrMode)
12749 {
12750 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12751 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12752 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12754 }
12755 break;
12756
12757 case IEMMODE_32BIT:
12758 switch (pVCpu->iem.s.enmEffAddrMode)
12759 {
12760 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12761 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12762 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12764 }
12765 break;
12766
12767 case IEMMODE_64BIT:
12768 switch (pVCpu->iem.s.enmEffAddrMode)
12769 {
12770 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12771 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12772 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12774 }
12775 break;
12776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12777 }
12778 return VINF_SUCCESS;
12779}
12780
12781#undef IEM_LODS_CASE
12782
12783/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12784#define IEM_SCAS_CASE(ValBits, AddrBits) \
12785 IEM_MC_BEGIN(3, 2); \
12786 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12787 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12788 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12789 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12790 \
12791 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12792 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12793 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12794 IEM_MC_REF_EFLAGS(pEFlags); \
12795 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12796 \
12797 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12798 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12799 } IEM_MC_ELSE() { \
12800 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12801 } IEM_MC_ENDIF(); \
12802 IEM_MC_ADVANCE_RIP(); \
12803 IEM_MC_END();
12804
12805/** Opcode 0xae. */
12806FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12807{
12808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12809
12810 /*
12811 * Use the C implementation if a repeat prefix is encountered.
12812 */
12813 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12814 {
12815 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12816 switch (pVCpu->iem.s.enmEffAddrMode)
12817 {
12818 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12819 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12820 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12822 }
12823 }
12824 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12825 {
12826 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12827 switch (pVCpu->iem.s.enmEffAddrMode)
12828 {
12829 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12830 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12831 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12833 }
12834 }
12835 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12836
12837 /*
12838 * Sharing case implementation with stos[wdq] below.
12839 */
12840 switch (pVCpu->iem.s.enmEffAddrMode)
12841 {
12842 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12843 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12844 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12846 }
12847 return VINF_SUCCESS;
12848}
12849
12850
12851/** Opcode 0xaf. */
12852FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12853{
12854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12855
12856 /*
12857 * Use the C implementation if a repeat prefix is encountered.
12858 */
12859 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12860 {
12861 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12862 switch (pVCpu->iem.s.enmEffOpSize)
12863 {
12864 case IEMMODE_16BIT:
12865 switch (pVCpu->iem.s.enmEffAddrMode)
12866 {
12867 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12868 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12869 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12871 }
12872 break;
12873 case IEMMODE_32BIT:
12874 switch (pVCpu->iem.s.enmEffAddrMode)
12875 {
12876 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12877 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12878 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12880 }
12881 case IEMMODE_64BIT:
12882 switch (pVCpu->iem.s.enmEffAddrMode)
12883 {
12884 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12885 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12886 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12888 }
12889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12890 }
12891 }
12892 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12893 {
12894 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12895 switch (pVCpu->iem.s.enmEffOpSize)
12896 {
12897 case IEMMODE_16BIT:
12898 switch (pVCpu->iem.s.enmEffAddrMode)
12899 {
12900 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12901 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12902 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12904 }
12905 break;
12906 case IEMMODE_32BIT:
12907 switch (pVCpu->iem.s.enmEffAddrMode)
12908 {
12909 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12910 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12911 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12913 }
12914 case IEMMODE_64BIT:
12915 switch (pVCpu->iem.s.enmEffAddrMode)
12916 {
12917 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12918 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12919 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12921 }
12922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12923 }
12924 }
12925 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12926
12927 /*
12928 * Annoying double switch here.
12929 * Using ugly macro for implementing the cases, sharing it with scasb.
12930 */
12931 switch (pVCpu->iem.s.enmEffOpSize)
12932 {
12933 case IEMMODE_16BIT:
12934 switch (pVCpu->iem.s.enmEffAddrMode)
12935 {
12936 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12937 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12938 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12940 }
12941 break;
12942
12943 case IEMMODE_32BIT:
12944 switch (pVCpu->iem.s.enmEffAddrMode)
12945 {
12946 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12947 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12948 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12950 }
12951 break;
12952
12953 case IEMMODE_64BIT:
12954 switch (pVCpu->iem.s.enmEffAddrMode)
12955 {
12956 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12957 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12958 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12960 }
12961 break;
12962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12963 }
12964 return VINF_SUCCESS;
12965}
12966
12967#undef IEM_SCAS_CASE
12968
12969/**
12970 * Common 'mov r8, imm8' helper.
12971 */
12972FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12973{
12974 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12976
12977 IEM_MC_BEGIN(0, 1);
12978 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12979 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12980 IEM_MC_ADVANCE_RIP();
12981 IEM_MC_END();
12982
12983 return VINF_SUCCESS;
12984}
12985
12986
12987/** Opcode 0xb0. */
12988FNIEMOP_DEF(iemOp_mov_AL_Ib)
12989{
12990 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
12991 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12992}
12993
12994
12995/** Opcode 0xb1. */
12996FNIEMOP_DEF(iemOp_CL_Ib)
12997{
12998 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
12999 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13000}
13001
13002
13003/** Opcode 0xb2. */
13004FNIEMOP_DEF(iemOp_DL_Ib)
13005{
13006 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13007 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13008}
13009
13010
13011/** Opcode 0xb3. */
13012FNIEMOP_DEF(iemOp_BL_Ib)
13013{
13014 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13015 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13016}
13017
13018
13019/** Opcode 0xb4. */
13020FNIEMOP_DEF(iemOp_mov_AH_Ib)
13021{
13022 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13023 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13024}
13025
13026
13027/** Opcode 0xb5. */
13028FNIEMOP_DEF(iemOp_CH_Ib)
13029{
13030 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13031 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13032}
13033
13034
13035/** Opcode 0xb6. */
13036FNIEMOP_DEF(iemOp_DH_Ib)
13037{
13038 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13039 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13040}
13041
13042
13043/** Opcode 0xb7. */
13044FNIEMOP_DEF(iemOp_BH_Ib)
13045{
13046 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13047 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13048}
13049
13050
13051/**
13052 * Common 'mov regX,immX' helper.
13053 */
13054FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13055{
13056 switch (pVCpu->iem.s.enmEffOpSize)
13057 {
13058 case IEMMODE_16BIT:
13059 {
13060 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13062
13063 IEM_MC_BEGIN(0, 1);
13064 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13065 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13066 IEM_MC_ADVANCE_RIP();
13067 IEM_MC_END();
13068 break;
13069 }
13070
13071 case IEMMODE_32BIT:
13072 {
13073 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13075
13076 IEM_MC_BEGIN(0, 1);
13077 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13078 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13079 IEM_MC_ADVANCE_RIP();
13080 IEM_MC_END();
13081 break;
13082 }
13083 case IEMMODE_64BIT:
13084 {
13085 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13087
13088 IEM_MC_BEGIN(0, 1);
13089 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13090 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13091 IEM_MC_ADVANCE_RIP();
13092 IEM_MC_END();
13093 break;
13094 }
13095 }
13096
13097 return VINF_SUCCESS;
13098}
13099
13100
13101/** Opcode 0xb8. */
13102FNIEMOP_DEF(iemOp_eAX_Iv)
13103{
13104 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13105 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13106}
13107
13108
13109/** Opcode 0xb9. */
13110FNIEMOP_DEF(iemOp_eCX_Iv)
13111{
13112 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13113 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13114}
13115
13116
13117/** Opcode 0xba. */
13118FNIEMOP_DEF(iemOp_eDX_Iv)
13119{
13120 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13121 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13122}
13123
13124
13125/** Opcode 0xbb. */
13126FNIEMOP_DEF(iemOp_eBX_Iv)
13127{
13128 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13129 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13130}
13131
13132
13133/** Opcode 0xbc. */
13134FNIEMOP_DEF(iemOp_eSP_Iv)
13135{
13136 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13137 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13138}
13139
13140
13141/** Opcode 0xbd. */
13142FNIEMOP_DEF(iemOp_eBP_Iv)
13143{
13144 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13145 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13146}
13147
13148
13149/** Opcode 0xbe. */
13150FNIEMOP_DEF(iemOp_eSI_Iv)
13151{
13152 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13153 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13154}
13155
13156
13157/** Opcode 0xbf. */
13158FNIEMOP_DEF(iemOp_eDI_Iv)
13159{
13160 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13161 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13162}
13163
13164
13165/** Opcode 0xc0. */
13166FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13167{
13168 IEMOP_HLP_MIN_186();
13169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13170 PCIEMOPSHIFTSIZES pImpl;
13171 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13172 {
13173 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13174 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13175 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13176 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13177 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13178 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13179 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13180 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13181 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13182 }
13183 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13184
13185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13186 {
13187 /* register */
13188 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13190 IEM_MC_BEGIN(3, 0);
13191 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13192 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13194 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13195 IEM_MC_REF_EFLAGS(pEFlags);
13196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13197 IEM_MC_ADVANCE_RIP();
13198 IEM_MC_END();
13199 }
13200 else
13201 {
13202 /* memory */
13203 IEM_MC_BEGIN(3, 2);
13204 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13205 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13206 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13208
13209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13210 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13211 IEM_MC_ASSIGN(cShiftArg, cShift);
13212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13213 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13214 IEM_MC_FETCH_EFLAGS(EFlags);
13215 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13216
13217 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13218 IEM_MC_COMMIT_EFLAGS(EFlags);
13219 IEM_MC_ADVANCE_RIP();
13220 IEM_MC_END();
13221 }
13222 return VINF_SUCCESS;
13223}
13224
13225
13226/** Opcode 0xc1. */
13227FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13228{
13229 IEMOP_HLP_MIN_186();
13230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13231 PCIEMOPSHIFTSIZES pImpl;
13232 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13233 {
13234 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13235 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13236 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13237 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13238 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13239 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13240 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13241 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13242 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13243 }
13244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13245
13246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13247 {
13248 /* register */
13249 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13251 switch (pVCpu->iem.s.enmEffOpSize)
13252 {
13253 case IEMMODE_16BIT:
13254 IEM_MC_BEGIN(3, 0);
13255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13256 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13257 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13258 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13259 IEM_MC_REF_EFLAGS(pEFlags);
13260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13261 IEM_MC_ADVANCE_RIP();
13262 IEM_MC_END();
13263 return VINF_SUCCESS;
13264
13265 case IEMMODE_32BIT:
13266 IEM_MC_BEGIN(3, 0);
13267 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13268 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13269 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13270 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13271 IEM_MC_REF_EFLAGS(pEFlags);
13272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13273 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13274 IEM_MC_ADVANCE_RIP();
13275 IEM_MC_END();
13276 return VINF_SUCCESS;
13277
13278 case IEMMODE_64BIT:
13279 IEM_MC_BEGIN(3, 0);
13280 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13281 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13282 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13283 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13284 IEM_MC_REF_EFLAGS(pEFlags);
13285 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13286 IEM_MC_ADVANCE_RIP();
13287 IEM_MC_END();
13288 return VINF_SUCCESS;
13289
13290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13291 }
13292 }
13293 else
13294 {
13295 /* memory */
13296 switch (pVCpu->iem.s.enmEffOpSize)
13297 {
13298 case IEMMODE_16BIT:
13299 IEM_MC_BEGIN(3, 2);
13300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13301 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13302 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13304
13305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13306 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13307 IEM_MC_ASSIGN(cShiftArg, cShift);
13308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13309 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13310 IEM_MC_FETCH_EFLAGS(EFlags);
13311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13312
13313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13314 IEM_MC_COMMIT_EFLAGS(EFlags);
13315 IEM_MC_ADVANCE_RIP();
13316 IEM_MC_END();
13317 return VINF_SUCCESS;
13318
13319 case IEMMODE_32BIT:
13320 IEM_MC_BEGIN(3, 2);
13321 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13322 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13323 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13325
13326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13327 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13328 IEM_MC_ASSIGN(cShiftArg, cShift);
13329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13330 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13331 IEM_MC_FETCH_EFLAGS(EFlags);
13332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13333
13334 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13335 IEM_MC_COMMIT_EFLAGS(EFlags);
13336 IEM_MC_ADVANCE_RIP();
13337 IEM_MC_END();
13338 return VINF_SUCCESS;
13339
13340 case IEMMODE_64BIT:
13341 IEM_MC_BEGIN(3, 2);
13342 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13343 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13344 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13346
13347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13348 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13349 IEM_MC_ASSIGN(cShiftArg, cShift);
13350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13351 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13352 IEM_MC_FETCH_EFLAGS(EFlags);
13353 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13354
13355 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13356 IEM_MC_COMMIT_EFLAGS(EFlags);
13357 IEM_MC_ADVANCE_RIP();
13358 IEM_MC_END();
13359 return VINF_SUCCESS;
13360
13361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13362 }
13363 }
13364}
13365
13366
13367/** Opcode 0xc2. */
13368FNIEMOP_DEF(iemOp_retn_Iw)
13369{
13370 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13371 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13373 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13374 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13375}
13376
13377
13378/** Opcode 0xc3. */
13379FNIEMOP_DEF(iemOp_retn)
13380{
13381 IEMOP_MNEMONIC(retn, "retn");
13382 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13384 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13385}
13386
13387
13388/** Opcode 0xc4. */
13389FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13390{
13391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13392 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13393 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13394 {
13395 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13396 /* The LES instruction is invalid 64-bit mode. In legacy and
13397 compatability mode it is invalid with MOD=3.
13398 The use as a VEX prefix is made possible by assigning the inverted
13399 REX.R to the top MOD bit, and the top bit in the inverted register
13400 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13401 to accessing registers 0..7 in this VEX form. */
13402 /** @todo VEX: Just use new tables for it. */
13403 return IEMOP_RAISE_INVALID_OPCODE();
13404 }
13405 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13406 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13407}
13408
13409
13410/** Opcode 0xc5. */
13411FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13412{
13413 /* The LDS instruction is invalid 64-bit mode. In legacy and
13414 compatability mode it is invalid with MOD=3.
13415 The use as a VEX prefix is made possible by assigning the inverted
13416 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13417 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13419 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13420 {
13421 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13422 {
13423 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13424 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13425 }
13426 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13427 }
13428
13429 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13430 /** @todo Test when exctly the VEX conformance checks kick in during
13431 * instruction decoding and fetching (using \#PF). */
13432 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13433 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13434 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13435#if 0 /* will make sense of this next week... */
13436 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13437 &&
13438 )
13439 {
13440
13441 }
13442#endif
13443
13444 /** @todo VEX: Just use new tables for it. */
13445 return IEMOP_RAISE_INVALID_OPCODE();
13446}
13447
13448
13449/** Opcode 0xc6. */
13450FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13451{
13452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13453 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13454 return IEMOP_RAISE_INVALID_OPCODE();
13455 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13456
13457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13458 {
13459 /* register access */
13460 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13462 IEM_MC_BEGIN(0, 0);
13463 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13464 IEM_MC_ADVANCE_RIP();
13465 IEM_MC_END();
13466 }
13467 else
13468 {
13469 /* memory access. */
13470 IEM_MC_BEGIN(0, 1);
13471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13473 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13475 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13476 IEM_MC_ADVANCE_RIP();
13477 IEM_MC_END();
13478 }
13479 return VINF_SUCCESS;
13480}
13481
13482
13483/** Opcode 0xc7. */
13484FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13485{
13486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13487 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13488 return IEMOP_RAISE_INVALID_OPCODE();
13489 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13490
13491 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13492 {
13493 /* register access */
13494 switch (pVCpu->iem.s.enmEffOpSize)
13495 {
13496 case IEMMODE_16BIT:
13497 IEM_MC_BEGIN(0, 0);
13498 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13500 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13501 IEM_MC_ADVANCE_RIP();
13502 IEM_MC_END();
13503 return VINF_SUCCESS;
13504
13505 case IEMMODE_32BIT:
13506 IEM_MC_BEGIN(0, 0);
13507 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13509 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13510 IEM_MC_ADVANCE_RIP();
13511 IEM_MC_END();
13512 return VINF_SUCCESS;
13513
13514 case IEMMODE_64BIT:
13515 IEM_MC_BEGIN(0, 0);
13516 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13518 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13519 IEM_MC_ADVANCE_RIP();
13520 IEM_MC_END();
13521 return VINF_SUCCESS;
13522
13523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13524 }
13525 }
13526 else
13527 {
13528 /* memory access. */
13529 switch (pVCpu->iem.s.enmEffOpSize)
13530 {
13531 case IEMMODE_16BIT:
13532 IEM_MC_BEGIN(0, 1);
13533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13535 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13537 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13538 IEM_MC_ADVANCE_RIP();
13539 IEM_MC_END();
13540 return VINF_SUCCESS;
13541
13542 case IEMMODE_32BIT:
13543 IEM_MC_BEGIN(0, 1);
13544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13546 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13548 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13549 IEM_MC_ADVANCE_RIP();
13550 IEM_MC_END();
13551 return VINF_SUCCESS;
13552
13553 case IEMMODE_64BIT:
13554 IEM_MC_BEGIN(0, 1);
13555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13557 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13559 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13560 IEM_MC_ADVANCE_RIP();
13561 IEM_MC_END();
13562 return VINF_SUCCESS;
13563
13564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13565 }
13566 }
13567}
13568
13569
13570
13571
13572/** Opcode 0xc8. */
13573FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13574{
13575 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13576 IEMOP_HLP_MIN_186();
13577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13578 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13579 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13581 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13582}
13583
13584
13585/** Opcode 0xc9. */
13586FNIEMOP_DEF(iemOp_leave)
13587{
13588 IEMOP_MNEMONIC(leave, "leave");
13589 IEMOP_HLP_MIN_186();
13590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13592 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13593}
13594
13595
13596/** Opcode 0xca. */
13597FNIEMOP_DEF(iemOp_retf_Iw)
13598{
13599 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13600 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13602 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13603 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13604}
13605
13606
13607/** Opcode 0xcb. */
13608FNIEMOP_DEF(iemOp_retf)
13609{
13610 IEMOP_MNEMONIC(retf, "retf");
13611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13613 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13614}
13615
13616
13617/** Opcode 0xcc. */
13618FNIEMOP_DEF(iemOp_int_3)
13619{
13620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13621 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13622}
13623
13624
13625/** Opcode 0xcd. */
13626FNIEMOP_DEF(iemOp_int_Ib)
13627{
13628 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13630 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13631}
13632
13633
13634/** Opcode 0xce. */
13635FNIEMOP_DEF(iemOp_into)
13636{
13637 IEMOP_MNEMONIC(into, "into");
13638 IEMOP_HLP_NO_64BIT();
13639
13640 IEM_MC_BEGIN(2, 0);
13641 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13642 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13643 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13644 IEM_MC_END();
13645 return VINF_SUCCESS;
13646}
13647
13648
13649/** Opcode 0xcf. */
13650FNIEMOP_DEF(iemOp_iret)
13651{
13652 IEMOP_MNEMONIC(iret, "iret");
13653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13654 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13655}
13656
13657
13658/** Opcode 0xd0. */
13659FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13660{
13661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13662 PCIEMOPSHIFTSIZES pImpl;
13663 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13664 {
13665 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13666 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13667 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13668 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13669 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13670 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13671 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13672 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13673 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13674 }
13675 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13676
13677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13678 {
13679 /* register */
13680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13681 IEM_MC_BEGIN(3, 0);
13682 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13683 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13684 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13685 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13686 IEM_MC_REF_EFLAGS(pEFlags);
13687 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13688 IEM_MC_ADVANCE_RIP();
13689 IEM_MC_END();
13690 }
13691 else
13692 {
13693 /* memory */
13694 IEM_MC_BEGIN(3, 2);
13695 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13696 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13697 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13699
13700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13702 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13703 IEM_MC_FETCH_EFLAGS(EFlags);
13704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13705
13706 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13707 IEM_MC_COMMIT_EFLAGS(EFlags);
13708 IEM_MC_ADVANCE_RIP();
13709 IEM_MC_END();
13710 }
13711 return VINF_SUCCESS;
13712}
13713
13714
13715
13716/** Opcode 0xd1. */
13717FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13718{
13719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13720 PCIEMOPSHIFTSIZES pImpl;
13721 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13722 {
13723 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13724 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13725 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13726 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13727 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13728 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13729 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13730 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13731 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13732 }
13733 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13734
13735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13736 {
13737 /* register */
13738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13739 switch (pVCpu->iem.s.enmEffOpSize)
13740 {
13741 case IEMMODE_16BIT:
13742 IEM_MC_BEGIN(3, 0);
13743 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13744 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13745 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13746 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13747 IEM_MC_REF_EFLAGS(pEFlags);
13748 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13749 IEM_MC_ADVANCE_RIP();
13750 IEM_MC_END();
13751 return VINF_SUCCESS;
13752
13753 case IEMMODE_32BIT:
13754 IEM_MC_BEGIN(3, 0);
13755 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13756 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13757 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13758 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13759 IEM_MC_REF_EFLAGS(pEFlags);
13760 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13761 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13762 IEM_MC_ADVANCE_RIP();
13763 IEM_MC_END();
13764 return VINF_SUCCESS;
13765
13766 case IEMMODE_64BIT:
13767 IEM_MC_BEGIN(3, 0);
13768 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13769 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13770 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13771 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13772 IEM_MC_REF_EFLAGS(pEFlags);
13773 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13774 IEM_MC_ADVANCE_RIP();
13775 IEM_MC_END();
13776 return VINF_SUCCESS;
13777
13778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13779 }
13780 }
13781 else
13782 {
13783 /* memory */
13784 switch (pVCpu->iem.s.enmEffOpSize)
13785 {
13786 case IEMMODE_16BIT:
13787 IEM_MC_BEGIN(3, 2);
13788 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13789 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13790 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13792
13793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13795 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13796 IEM_MC_FETCH_EFLAGS(EFlags);
13797 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13798
13799 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13800 IEM_MC_COMMIT_EFLAGS(EFlags);
13801 IEM_MC_ADVANCE_RIP();
13802 IEM_MC_END();
13803 return VINF_SUCCESS;
13804
13805 case IEMMODE_32BIT:
13806 IEM_MC_BEGIN(3, 2);
13807 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13808 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13809 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13811
13812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13814 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13815 IEM_MC_FETCH_EFLAGS(EFlags);
13816 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13817
13818 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13819 IEM_MC_COMMIT_EFLAGS(EFlags);
13820 IEM_MC_ADVANCE_RIP();
13821 IEM_MC_END();
13822 return VINF_SUCCESS;
13823
13824 case IEMMODE_64BIT:
13825 IEM_MC_BEGIN(3, 2);
13826 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13827 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13828 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13830
13831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13833 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13834 IEM_MC_FETCH_EFLAGS(EFlags);
13835 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13836
13837 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13838 IEM_MC_COMMIT_EFLAGS(EFlags);
13839 IEM_MC_ADVANCE_RIP();
13840 IEM_MC_END();
13841 return VINF_SUCCESS;
13842
13843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13844 }
13845 }
13846}
13847
13848
13849/** Opcode 0xd2. */
13850FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13851{
13852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13853 PCIEMOPSHIFTSIZES pImpl;
13854 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13855 {
13856 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13857 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13858 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13859 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13860 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13861 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13862 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13863 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13864 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13865 }
13866 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13867
13868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13869 {
13870 /* register */
13871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13872 IEM_MC_BEGIN(3, 0);
13873 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13874 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13875 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13876 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13877 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13878 IEM_MC_REF_EFLAGS(pEFlags);
13879 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13880 IEM_MC_ADVANCE_RIP();
13881 IEM_MC_END();
13882 }
13883 else
13884 {
13885 /* memory */
13886 IEM_MC_BEGIN(3, 2);
13887 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13888 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13889 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13891
13892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13894 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13895 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13896 IEM_MC_FETCH_EFLAGS(EFlags);
13897 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13898
13899 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13900 IEM_MC_COMMIT_EFLAGS(EFlags);
13901 IEM_MC_ADVANCE_RIP();
13902 IEM_MC_END();
13903 }
13904 return VINF_SUCCESS;
13905}
13906
13907
13908/** Opcode 0xd3. */
13909FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13910{
13911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13912 PCIEMOPSHIFTSIZES pImpl;
13913 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13914 {
13915 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13916 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13917 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13918 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13919 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13920 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13921 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13922 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13923 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13924 }
13925 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13926
13927 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13928 {
13929 /* register */
13930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13931 switch (pVCpu->iem.s.enmEffOpSize)
13932 {
13933 case IEMMODE_16BIT:
13934 IEM_MC_BEGIN(3, 0);
13935 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13936 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13937 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13938 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13939 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13940 IEM_MC_REF_EFLAGS(pEFlags);
13941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13942 IEM_MC_ADVANCE_RIP();
13943 IEM_MC_END();
13944 return VINF_SUCCESS;
13945
13946 case IEMMODE_32BIT:
13947 IEM_MC_BEGIN(3, 0);
13948 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13949 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13950 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13951 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13952 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13953 IEM_MC_REF_EFLAGS(pEFlags);
13954 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13955 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13956 IEM_MC_ADVANCE_RIP();
13957 IEM_MC_END();
13958 return VINF_SUCCESS;
13959
13960 case IEMMODE_64BIT:
13961 IEM_MC_BEGIN(3, 0);
13962 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13963 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13965 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13966 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13967 IEM_MC_REF_EFLAGS(pEFlags);
13968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13969 IEM_MC_ADVANCE_RIP();
13970 IEM_MC_END();
13971 return VINF_SUCCESS;
13972
13973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13974 }
13975 }
13976 else
13977 {
13978 /* memory */
13979 switch (pVCpu->iem.s.enmEffOpSize)
13980 {
13981 case IEMMODE_16BIT:
13982 IEM_MC_BEGIN(3, 2);
13983 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13984 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13985 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13987
13988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13990 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13991 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13992 IEM_MC_FETCH_EFLAGS(EFlags);
13993 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13994
13995 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13996 IEM_MC_COMMIT_EFLAGS(EFlags);
13997 IEM_MC_ADVANCE_RIP();
13998 IEM_MC_END();
13999 return VINF_SUCCESS;
14000
14001 case IEMMODE_32BIT:
14002 IEM_MC_BEGIN(3, 2);
14003 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14004 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14005 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14007
14008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14010 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14011 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14012 IEM_MC_FETCH_EFLAGS(EFlags);
14013 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14014
14015 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14016 IEM_MC_COMMIT_EFLAGS(EFlags);
14017 IEM_MC_ADVANCE_RIP();
14018 IEM_MC_END();
14019 return VINF_SUCCESS;
14020
14021 case IEMMODE_64BIT:
14022 IEM_MC_BEGIN(3, 2);
14023 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14024 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14025 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14027
14028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14030 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14031 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14032 IEM_MC_FETCH_EFLAGS(EFlags);
14033 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14034
14035 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14036 IEM_MC_COMMIT_EFLAGS(EFlags);
14037 IEM_MC_ADVANCE_RIP();
14038 IEM_MC_END();
14039 return VINF_SUCCESS;
14040
14041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14042 }
14043 }
14044}
14045
14046/** Opcode 0xd4. */
14047FNIEMOP_DEF(iemOp_aam_Ib)
14048{
14049 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14050 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14052 IEMOP_HLP_NO_64BIT();
14053 if (!bImm)
14054 return IEMOP_RAISE_DIVIDE_ERROR();
14055 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14056}
14057
14058
14059/** Opcode 0xd5. */
14060FNIEMOP_DEF(iemOp_aad_Ib)
14061{
14062 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14063 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14065 IEMOP_HLP_NO_64BIT();
14066 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14067}
14068
14069
14070/** Opcode 0xd6. */
14071FNIEMOP_DEF(iemOp_salc)
14072{
14073 IEMOP_MNEMONIC(salc, "salc");
14074 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14075 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14077 IEMOP_HLP_NO_64BIT();
14078
14079 IEM_MC_BEGIN(0, 0);
14080 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14081 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14082 } IEM_MC_ELSE() {
14083 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14084 } IEM_MC_ENDIF();
14085 IEM_MC_ADVANCE_RIP();
14086 IEM_MC_END();
14087 return VINF_SUCCESS;
14088}
14089
14090
14091/** Opcode 0xd7. */
14092FNIEMOP_DEF(iemOp_xlat)
14093{
14094 IEMOP_MNEMONIC(xlat, "xlat");
14095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14096 switch (pVCpu->iem.s.enmEffAddrMode)
14097 {
14098 case IEMMODE_16BIT:
14099 IEM_MC_BEGIN(2, 0);
14100 IEM_MC_LOCAL(uint8_t, u8Tmp);
14101 IEM_MC_LOCAL(uint16_t, u16Addr);
14102 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14103 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14104 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14105 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14106 IEM_MC_ADVANCE_RIP();
14107 IEM_MC_END();
14108 return VINF_SUCCESS;
14109
14110 case IEMMODE_32BIT:
14111 IEM_MC_BEGIN(2, 0);
14112 IEM_MC_LOCAL(uint8_t, u8Tmp);
14113 IEM_MC_LOCAL(uint32_t, u32Addr);
14114 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14115 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14116 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14117 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14118 IEM_MC_ADVANCE_RIP();
14119 IEM_MC_END();
14120 return VINF_SUCCESS;
14121
14122 case IEMMODE_64BIT:
14123 IEM_MC_BEGIN(2, 0);
14124 IEM_MC_LOCAL(uint8_t, u8Tmp);
14125 IEM_MC_LOCAL(uint64_t, u64Addr);
14126 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14127 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14128 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14129 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14130 IEM_MC_ADVANCE_RIP();
14131 IEM_MC_END();
14132 return VINF_SUCCESS;
14133
14134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14135 }
14136}
14137
14138
14139/**
14140 * Common worker for FPU instructions working on ST0 and STn, and storing the
14141 * result in ST0.
14142 *
14143 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14144 */
14145FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14146{
14147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14148
14149 IEM_MC_BEGIN(3, 1);
14150 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14151 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14152 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14153 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14154
14155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14157 IEM_MC_PREPARE_FPU_USAGE();
14158 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14159 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14160 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14161 IEM_MC_ELSE()
14162 IEM_MC_FPU_STACK_UNDERFLOW(0);
14163 IEM_MC_ENDIF();
14164 IEM_MC_ADVANCE_RIP();
14165
14166 IEM_MC_END();
14167 return VINF_SUCCESS;
14168}
14169
14170
14171/**
14172 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14173 * flags.
14174 *
14175 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14176 */
14177FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14178{
14179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14180
14181 IEM_MC_BEGIN(3, 1);
14182 IEM_MC_LOCAL(uint16_t, u16Fsw);
14183 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14184 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14185 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14186
14187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14188 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14189 IEM_MC_PREPARE_FPU_USAGE();
14190 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14191 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14192 IEM_MC_UPDATE_FSW(u16Fsw);
14193 IEM_MC_ELSE()
14194 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14195 IEM_MC_ENDIF();
14196 IEM_MC_ADVANCE_RIP();
14197
14198 IEM_MC_END();
14199 return VINF_SUCCESS;
14200}
14201
14202
14203/**
14204 * Common worker for FPU instructions working on ST0 and STn, only affecting
14205 * flags, and popping when done.
14206 *
14207 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14208 */
14209FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14210{
14211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14212
14213 IEM_MC_BEGIN(3, 1);
14214 IEM_MC_LOCAL(uint16_t, u16Fsw);
14215 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14216 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14217 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14218
14219 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14220 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14221 IEM_MC_PREPARE_FPU_USAGE();
14222 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14223 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14224 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14225 IEM_MC_ELSE()
14226 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14227 IEM_MC_ENDIF();
14228 IEM_MC_ADVANCE_RIP();
14229
14230 IEM_MC_END();
14231 return VINF_SUCCESS;
14232}
14233
14234
14235/** Opcode 0xd8 11/0. */
14236FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14237{
14238 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14239 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14240}
14241
14242
14243/** Opcode 0xd8 11/1. */
14244FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14245{
14246 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14247 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14248}
14249
14250
14251/** Opcode 0xd8 11/2. */
14252FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14253{
14254 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14255 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14256}
14257
14258
14259/** Opcode 0xd8 11/3. */
14260FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14261{
14262 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14263 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14264}
14265
14266
14267/** Opcode 0xd8 11/4. */
14268FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14269{
14270 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14271 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14272}
14273
14274
14275/** Opcode 0xd8 11/5. */
14276FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14277{
14278 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14279 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14280}
14281
14282
14283/** Opcode 0xd8 11/6. */
14284FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14285{
14286 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14287 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14288}
14289
14290
14291/** Opcode 0xd8 11/7. */
14292FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14293{
14294 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14295 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14296}
14297
14298
14299/**
14300 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14301 * the result in ST0.
14302 *
14303 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14304 */
14305FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14306{
14307 IEM_MC_BEGIN(3, 3);
14308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14309 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14310 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14311 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14313 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14314
14315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14317
14318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14320 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14321
14322 IEM_MC_PREPARE_FPU_USAGE();
14323 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14324 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14325 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14326 IEM_MC_ELSE()
14327 IEM_MC_FPU_STACK_UNDERFLOW(0);
14328 IEM_MC_ENDIF();
14329 IEM_MC_ADVANCE_RIP();
14330
14331 IEM_MC_END();
14332 return VINF_SUCCESS;
14333}
14334
14335
14336/** Opcode 0xd8 !11/0. */
14337FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14338{
14339 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14340 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14341}
14342
14343
14344/** Opcode 0xd8 !11/1. */
14345FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14346{
14347 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14348 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14349}
14350
14351
14352/** Opcode 0xd8 !11/2. */
14353FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14354{
14355 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14356
14357 IEM_MC_BEGIN(3, 3);
14358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14359 IEM_MC_LOCAL(uint16_t, u16Fsw);
14360 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14361 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14362 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14363 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14364
14365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14367
14368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14369 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14370 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14371
14372 IEM_MC_PREPARE_FPU_USAGE();
14373 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14374 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14375 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14376 IEM_MC_ELSE()
14377 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14378 IEM_MC_ENDIF();
14379 IEM_MC_ADVANCE_RIP();
14380
14381 IEM_MC_END();
14382 return VINF_SUCCESS;
14383}
14384
14385
14386/** Opcode 0xd8 !11/3. */
14387FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14388{
14389 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14390
14391 IEM_MC_BEGIN(3, 3);
14392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14393 IEM_MC_LOCAL(uint16_t, u16Fsw);
14394 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14395 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14396 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14397 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14398
14399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14401
14402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14404 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14405
14406 IEM_MC_PREPARE_FPU_USAGE();
14407 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14408 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14409 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14410 IEM_MC_ELSE()
14411 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14412 IEM_MC_ENDIF();
14413 IEM_MC_ADVANCE_RIP();
14414
14415 IEM_MC_END();
14416 return VINF_SUCCESS;
14417}
14418
14419
14420/** Opcode 0xd8 !11/4. */
14421FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14422{
14423 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14424 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14425}
14426
14427
14428/** Opcode 0xd8 !11/5. */
14429FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14430{
14431 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14432 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14433}
14434
14435
14436/** Opcode 0xd8 !11/6. */
14437FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14438{
14439 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14440 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14441}
14442
14443
14444/** Opcode 0xd8 !11/7. */
14445FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14446{
14447 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14448 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14449}
14450
14451
14452/** Opcode 0xd8. */
14453FNIEMOP_DEF(iemOp_EscF0)
14454{
14455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14456 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14457
14458 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14459 {
14460 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14461 {
14462 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14463 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14464 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14465 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14466 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14467 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14468 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14469 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14471 }
14472 }
14473 else
14474 {
14475 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14476 {
14477 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14478 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14479 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14480 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14481 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14482 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14483 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14484 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14486 }
14487 }
14488}
14489
14490
14491/** Opcode 0xd9 /0 mem32real
14492 * @sa iemOp_fld_m64r */
14493FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14494{
14495 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14496
14497 IEM_MC_BEGIN(2, 3);
14498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14499 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14500 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14501 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14502 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14503
14504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14506
14507 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14508 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14509 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14510
14511 IEM_MC_PREPARE_FPU_USAGE();
14512 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14513 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14514 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14515 IEM_MC_ELSE()
14516 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14517 IEM_MC_ENDIF();
14518 IEM_MC_ADVANCE_RIP();
14519
14520 IEM_MC_END();
14521 return VINF_SUCCESS;
14522}
14523
14524
14525/** Opcode 0xd9 !11/2 mem32real */
14526FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14527{
14528 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14529 IEM_MC_BEGIN(3, 2);
14530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14531 IEM_MC_LOCAL(uint16_t, u16Fsw);
14532 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14533 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14534 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14535
14536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14540
14541 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14542 IEM_MC_PREPARE_FPU_USAGE();
14543 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14544 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14545 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14546 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14547 IEM_MC_ELSE()
14548 IEM_MC_IF_FCW_IM()
14549 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14550 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14551 IEM_MC_ENDIF();
14552 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14553 IEM_MC_ENDIF();
14554 IEM_MC_ADVANCE_RIP();
14555
14556 IEM_MC_END();
14557 return VINF_SUCCESS;
14558}
14559
14560
14561/** Opcode 0xd9 !11/3 */
14562FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14563{
14564 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14565 IEM_MC_BEGIN(3, 2);
14566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14567 IEM_MC_LOCAL(uint16_t, u16Fsw);
14568 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14569 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14570 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14571
14572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14576
14577 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14578 IEM_MC_PREPARE_FPU_USAGE();
14579 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14580 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14581 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14582 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14583 IEM_MC_ELSE()
14584 IEM_MC_IF_FCW_IM()
14585 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14586 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14587 IEM_MC_ENDIF();
14588 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14589 IEM_MC_ENDIF();
14590 IEM_MC_ADVANCE_RIP();
14591
14592 IEM_MC_END();
14593 return VINF_SUCCESS;
14594}
14595
14596
14597/** Opcode 0xd9 !11/4 */
14598FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14599{
14600 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14601 IEM_MC_BEGIN(3, 0);
14602 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14603 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14604 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14608 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14609 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14610 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14611 IEM_MC_END();
14612 return VINF_SUCCESS;
14613}
14614
14615
14616/** Opcode 0xd9 !11/5 */
14617FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14618{
14619 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14620 IEM_MC_BEGIN(1, 1);
14621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14622 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14625 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14626 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14627 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14628 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14629 IEM_MC_END();
14630 return VINF_SUCCESS;
14631}
14632
14633
14634/** Opcode 0xd9 !11/6 */
14635FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14636{
14637 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14638 IEM_MC_BEGIN(3, 0);
14639 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14640 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14641 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14645 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14646 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14647 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14648 IEM_MC_END();
14649 return VINF_SUCCESS;
14650}
14651
14652
14653/** Opcode 0xd9 !11/7 */
14654FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14655{
14656 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14657 IEM_MC_BEGIN(2, 0);
14658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14659 IEM_MC_LOCAL(uint16_t, u16Fcw);
14660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14663 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14664 IEM_MC_FETCH_FCW(u16Fcw);
14665 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14666 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14667 IEM_MC_END();
14668 return VINF_SUCCESS;
14669}
14670
14671
14672/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14673FNIEMOP_DEF(iemOp_fnop)
14674{
14675 IEMOP_MNEMONIC(fnop, "fnop");
14676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14677
14678 IEM_MC_BEGIN(0, 0);
14679 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14680 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14681 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14682 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14683 * intel optimizations. Investigate. */
14684 IEM_MC_UPDATE_FPU_OPCODE_IP();
14685 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14686 IEM_MC_END();
14687 return VINF_SUCCESS;
14688}
14689
14690
14691/** Opcode 0xd9 11/0 stN */
14692FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14693{
14694 IEMOP_MNEMONIC(fld_stN, "fld stN");
14695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14696
14697 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14698 * indicates that it does. */
14699 IEM_MC_BEGIN(0, 2);
14700 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14701 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14704
14705 IEM_MC_PREPARE_FPU_USAGE();
14706 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14707 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14708 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14709 IEM_MC_ELSE()
14710 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14711 IEM_MC_ENDIF();
14712
14713 IEM_MC_ADVANCE_RIP();
14714 IEM_MC_END();
14715
14716 return VINF_SUCCESS;
14717}
14718
14719
14720/** Opcode 0xd9 11/3 stN */
14721FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14722{
14723 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14725
14726 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14727 * indicates that it does. */
14728 IEM_MC_BEGIN(1, 3);
14729 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14730 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14731 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14732 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14735
14736 IEM_MC_PREPARE_FPU_USAGE();
14737 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14738 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14739 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14740 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14741 IEM_MC_ELSE()
14742 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14743 IEM_MC_ENDIF();
14744
14745 IEM_MC_ADVANCE_RIP();
14746 IEM_MC_END();
14747
14748 return VINF_SUCCESS;
14749}
14750
14751
14752/** Opcode 0xd9 11/4, 0xdd 11/2. */
14753FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14754{
14755 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14757
14758 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14759 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14760 if (!iDstReg)
14761 {
14762 IEM_MC_BEGIN(0, 1);
14763 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14764 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14765 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14766
14767 IEM_MC_PREPARE_FPU_USAGE();
14768 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14769 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14770 IEM_MC_ELSE()
14771 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14772 IEM_MC_ENDIF();
14773
14774 IEM_MC_ADVANCE_RIP();
14775 IEM_MC_END();
14776 }
14777 else
14778 {
14779 IEM_MC_BEGIN(0, 2);
14780 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14781 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14783 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14784
14785 IEM_MC_PREPARE_FPU_USAGE();
14786 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14787 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14788 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14789 IEM_MC_ELSE()
14790 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14791 IEM_MC_ENDIF();
14792
14793 IEM_MC_ADVANCE_RIP();
14794 IEM_MC_END();
14795 }
14796 return VINF_SUCCESS;
14797}
14798
14799
14800/**
14801 * Common worker for FPU instructions working on ST0 and replaces it with the
14802 * result, i.e. unary operators.
14803 *
14804 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14805 */
14806FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14807{
14808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14809
14810 IEM_MC_BEGIN(2, 1);
14811 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14812 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14813 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14814
14815 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14816 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14817 IEM_MC_PREPARE_FPU_USAGE();
14818 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14819 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14820 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14821 IEM_MC_ELSE()
14822 IEM_MC_FPU_STACK_UNDERFLOW(0);
14823 IEM_MC_ENDIF();
14824 IEM_MC_ADVANCE_RIP();
14825
14826 IEM_MC_END();
14827 return VINF_SUCCESS;
14828}
14829
14830
14831/** Opcode 0xd9 0xe0. */
14832FNIEMOP_DEF(iemOp_fchs)
14833{
14834 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14835 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14836}
14837
14838
14839/** Opcode 0xd9 0xe1. */
14840FNIEMOP_DEF(iemOp_fabs)
14841{
14842 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14843 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14844}
14845
14846
14847/**
14848 * Common worker for FPU instructions working on ST0 and only returns FSW.
14849 *
14850 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14851 */
14852FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14853{
14854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14855
14856 IEM_MC_BEGIN(2, 1);
14857 IEM_MC_LOCAL(uint16_t, u16Fsw);
14858 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14859 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14860
14861 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14862 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14863 IEM_MC_PREPARE_FPU_USAGE();
14864 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14865 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14866 IEM_MC_UPDATE_FSW(u16Fsw);
14867 IEM_MC_ELSE()
14868 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14869 IEM_MC_ENDIF();
14870 IEM_MC_ADVANCE_RIP();
14871
14872 IEM_MC_END();
14873 return VINF_SUCCESS;
14874}
14875
14876
14877/** Opcode 0xd9 0xe4. */
14878FNIEMOP_DEF(iemOp_ftst)
14879{
14880 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14881 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14882}
14883
14884
14885/** Opcode 0xd9 0xe5. */
14886FNIEMOP_DEF(iemOp_fxam)
14887{
14888 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14889 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14890}
14891
14892
14893/**
14894 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14895 *
14896 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14897 */
14898FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14899{
14900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14901
14902 IEM_MC_BEGIN(1, 1);
14903 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14904 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14905
14906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14908 IEM_MC_PREPARE_FPU_USAGE();
14909 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14910 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14911 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14912 IEM_MC_ELSE()
14913 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14914 IEM_MC_ENDIF();
14915 IEM_MC_ADVANCE_RIP();
14916
14917 IEM_MC_END();
14918 return VINF_SUCCESS;
14919}
14920
14921
14922/** Opcode 0xd9 0xe8. */
14923FNIEMOP_DEF(iemOp_fld1)
14924{
14925 IEMOP_MNEMONIC(fld1, "fld1");
14926 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14927}
14928
14929
14930/** Opcode 0xd9 0xe9. */
14931FNIEMOP_DEF(iemOp_fldl2t)
14932{
14933 IEMOP_MNEMONIC(fldl2t, "fldl2t");
14934 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14935}
14936
14937
14938/** Opcode 0xd9 0xea. */
14939FNIEMOP_DEF(iemOp_fldl2e)
14940{
14941 IEMOP_MNEMONIC(fldl2e, "fldl2e");
14942 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14943}
14944
14945/** Opcode 0xd9 0xeb. */
14946FNIEMOP_DEF(iemOp_fldpi)
14947{
14948 IEMOP_MNEMONIC(fldpi, "fldpi");
14949 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14950}
14951
14952
14953/** Opcode 0xd9 0xec. */
14954FNIEMOP_DEF(iemOp_fldlg2)
14955{
14956 IEMOP_MNEMONIC(fldlg2, "fldlg2");
14957 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14958}
14959
14960/** Opcode 0xd9 0xed. */
14961FNIEMOP_DEF(iemOp_fldln2)
14962{
14963 IEMOP_MNEMONIC(fldln2, "fldln2");
14964 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14965}
14966
14967
14968/** Opcode 0xd9 0xee. */
14969FNIEMOP_DEF(iemOp_fldz)
14970{
14971 IEMOP_MNEMONIC(fldz, "fldz");
14972 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14973}
14974
14975
14976/** Opcode 0xd9 0xf0. */
14977FNIEMOP_DEF(iemOp_f2xm1)
14978{
14979 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
14980 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14981}
14982
14983
14984/**
14985 * Common worker for FPU instructions working on STn and ST0, storing the result
14986 * in STn, and popping the stack unless IE, DE or ZE was raised.
14987 *
14988 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14989 */
14990FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14991{
14992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14993
14994 IEM_MC_BEGIN(3, 1);
14995 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14996 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14997 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14998 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14999
15000 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15001 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15002
15003 IEM_MC_PREPARE_FPU_USAGE();
15004 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15005 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15006 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15007 IEM_MC_ELSE()
15008 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15009 IEM_MC_ENDIF();
15010 IEM_MC_ADVANCE_RIP();
15011
15012 IEM_MC_END();
15013 return VINF_SUCCESS;
15014}
15015
15016
15017/** Opcode 0xd9 0xf1. */
15018FNIEMOP_DEF(iemOp_fyl2x)
15019{
15020 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15021 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15022}
15023
15024
15025/**
15026 * Common worker for FPU instructions working on ST0 and having two outputs, one
15027 * replacing ST0 and one pushed onto the stack.
15028 *
15029 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15030 */
15031FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15032{
15033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15034
15035 IEM_MC_BEGIN(2, 1);
15036 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15037 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15038 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15039
15040 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15041 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15042 IEM_MC_PREPARE_FPU_USAGE();
15043 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15044 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15045 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15046 IEM_MC_ELSE()
15047 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15048 IEM_MC_ENDIF();
15049 IEM_MC_ADVANCE_RIP();
15050
15051 IEM_MC_END();
15052 return VINF_SUCCESS;
15053}
15054
15055
15056/** Opcode 0xd9 0xf2. */
15057FNIEMOP_DEF(iemOp_fptan)
15058{
15059 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15060 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15061}
15062
15063
15064/** Opcode 0xd9 0xf3. */
15065FNIEMOP_DEF(iemOp_fpatan)
15066{
15067 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15068 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15069}
15070
15071
15072/** Opcode 0xd9 0xf4. */
15073FNIEMOP_DEF(iemOp_fxtract)
15074{
15075 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15076 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15077}
15078
15079
15080/** Opcode 0xd9 0xf5. */
15081FNIEMOP_DEF(iemOp_fprem1)
15082{
15083 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15084 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15085}
15086
15087
15088/** Opcode 0xd9 0xf6. */
15089FNIEMOP_DEF(iemOp_fdecstp)
15090{
15091 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15093 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15094 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15095 * FINCSTP and FDECSTP. */
15096
15097 IEM_MC_BEGIN(0,0);
15098
15099 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15100 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15101
15102 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15103 IEM_MC_FPU_STACK_DEC_TOP();
15104 IEM_MC_UPDATE_FSW_CONST(0);
15105
15106 IEM_MC_ADVANCE_RIP();
15107 IEM_MC_END();
15108 return VINF_SUCCESS;
15109}
15110
15111
15112/** Opcode 0xd9 0xf7. */
15113FNIEMOP_DEF(iemOp_fincstp)
15114{
15115 IEMOP_MNEMONIC(fincstp, "fincstp");
15116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15117 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15118 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15119 * FINCSTP and FDECSTP. */
15120
15121 IEM_MC_BEGIN(0,0);
15122
15123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15124 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15125
15126 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15127 IEM_MC_FPU_STACK_INC_TOP();
15128 IEM_MC_UPDATE_FSW_CONST(0);
15129
15130 IEM_MC_ADVANCE_RIP();
15131 IEM_MC_END();
15132 return VINF_SUCCESS;
15133}
15134
15135
15136/** Opcode 0xd9 0xf8. */
15137FNIEMOP_DEF(iemOp_fprem)
15138{
15139 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15140 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15141}
15142
15143
15144/** Opcode 0xd9 0xf9. */
15145FNIEMOP_DEF(iemOp_fyl2xp1)
15146{
15147 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15148 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15149}
15150
15151
15152/** Opcode 0xd9 0xfa. */
15153FNIEMOP_DEF(iemOp_fsqrt)
15154{
15155 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15156 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15157}
15158
15159
15160/** Opcode 0xd9 0xfb. */
15161FNIEMOP_DEF(iemOp_fsincos)
15162{
15163 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15164 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15165}
15166
15167
15168/** Opcode 0xd9 0xfc. */
15169FNIEMOP_DEF(iemOp_frndint)
15170{
15171 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15172 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15173}
15174
15175
15176/** Opcode 0xd9 0xfd. */
15177FNIEMOP_DEF(iemOp_fscale)
15178{
15179 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15180 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15181}
15182
15183
15184/** Opcode 0xd9 0xfe. */
15185FNIEMOP_DEF(iemOp_fsin)
15186{
15187 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15188 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15189}
15190
15191
15192/** Opcode 0xd9 0xff. */
15193FNIEMOP_DEF(iemOp_fcos)
15194{
15195 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15196 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15197}
15198
15199
15200/** Used by iemOp_EscF1. */
15201IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15202{
15203 /* 0xe0 */ iemOp_fchs,
15204 /* 0xe1 */ iemOp_fabs,
15205 /* 0xe2 */ iemOp_Invalid,
15206 /* 0xe3 */ iemOp_Invalid,
15207 /* 0xe4 */ iemOp_ftst,
15208 /* 0xe5 */ iemOp_fxam,
15209 /* 0xe6 */ iemOp_Invalid,
15210 /* 0xe7 */ iemOp_Invalid,
15211 /* 0xe8 */ iemOp_fld1,
15212 /* 0xe9 */ iemOp_fldl2t,
15213 /* 0xea */ iemOp_fldl2e,
15214 /* 0xeb */ iemOp_fldpi,
15215 /* 0xec */ iemOp_fldlg2,
15216 /* 0xed */ iemOp_fldln2,
15217 /* 0xee */ iemOp_fldz,
15218 /* 0xef */ iemOp_Invalid,
15219 /* 0xf0 */ iemOp_f2xm1,
15220 /* 0xf1 */ iemOp_fyl2x,
15221 /* 0xf2 */ iemOp_fptan,
15222 /* 0xf3 */ iemOp_fpatan,
15223 /* 0xf4 */ iemOp_fxtract,
15224 /* 0xf5 */ iemOp_fprem1,
15225 /* 0xf6 */ iemOp_fdecstp,
15226 /* 0xf7 */ iemOp_fincstp,
15227 /* 0xf8 */ iemOp_fprem,
15228 /* 0xf9 */ iemOp_fyl2xp1,
15229 /* 0xfa */ iemOp_fsqrt,
15230 /* 0xfb */ iemOp_fsincos,
15231 /* 0xfc */ iemOp_frndint,
15232 /* 0xfd */ iemOp_fscale,
15233 /* 0xfe */ iemOp_fsin,
15234 /* 0xff */ iemOp_fcos
15235};
15236
15237
15238/** Opcode 0xd9. */
15239FNIEMOP_DEF(iemOp_EscF1)
15240{
15241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15242 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15243
15244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15245 {
15246 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15247 {
15248 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15249 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15250 case 2:
15251 if (bRm == 0xd0)
15252 return FNIEMOP_CALL(iemOp_fnop);
15253 return IEMOP_RAISE_INVALID_OPCODE();
15254 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15255 case 4:
15256 case 5:
15257 case 6:
15258 case 7:
15259 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15260 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15262 }
15263 }
15264 else
15265 {
15266 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15267 {
15268 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15269 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15270 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15271 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15272 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15273 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15274 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15275 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15277 }
15278 }
15279}
15280
15281
15282/** Opcode 0xda 11/0. */
15283FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15284{
15285 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15287
15288 IEM_MC_BEGIN(0, 1);
15289 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15290
15291 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15292 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15293
15294 IEM_MC_PREPARE_FPU_USAGE();
15295 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15297 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15298 IEM_MC_ENDIF();
15299 IEM_MC_UPDATE_FPU_OPCODE_IP();
15300 IEM_MC_ELSE()
15301 IEM_MC_FPU_STACK_UNDERFLOW(0);
15302 IEM_MC_ENDIF();
15303 IEM_MC_ADVANCE_RIP();
15304
15305 IEM_MC_END();
15306 return VINF_SUCCESS;
15307}
15308
15309
15310/** Opcode 0xda 11/1. */
15311FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15312{
15313 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15315
15316 IEM_MC_BEGIN(0, 1);
15317 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15318
15319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15321
15322 IEM_MC_PREPARE_FPU_USAGE();
15323 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15324 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15325 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15326 IEM_MC_ENDIF();
15327 IEM_MC_UPDATE_FPU_OPCODE_IP();
15328 IEM_MC_ELSE()
15329 IEM_MC_FPU_STACK_UNDERFLOW(0);
15330 IEM_MC_ENDIF();
15331 IEM_MC_ADVANCE_RIP();
15332
15333 IEM_MC_END();
15334 return VINF_SUCCESS;
15335}
15336
15337
15338/** Opcode 0xda 11/2. */
15339FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15340{
15341 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15343
15344 IEM_MC_BEGIN(0, 1);
15345 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15346
15347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15349
15350 IEM_MC_PREPARE_FPU_USAGE();
15351 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15352 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15353 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15354 IEM_MC_ENDIF();
15355 IEM_MC_UPDATE_FPU_OPCODE_IP();
15356 IEM_MC_ELSE()
15357 IEM_MC_FPU_STACK_UNDERFLOW(0);
15358 IEM_MC_ENDIF();
15359 IEM_MC_ADVANCE_RIP();
15360
15361 IEM_MC_END();
15362 return VINF_SUCCESS;
15363}
15364
15365
15366/** Opcode 0xda 11/3. */
15367FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15368{
15369 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15371
15372 IEM_MC_BEGIN(0, 1);
15373 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15374
15375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15377
15378 IEM_MC_PREPARE_FPU_USAGE();
15379 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15381 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15382 IEM_MC_ENDIF();
15383 IEM_MC_UPDATE_FPU_OPCODE_IP();
15384 IEM_MC_ELSE()
15385 IEM_MC_FPU_STACK_UNDERFLOW(0);
15386 IEM_MC_ENDIF();
15387 IEM_MC_ADVANCE_RIP();
15388
15389 IEM_MC_END();
15390 return VINF_SUCCESS;
15391}
15392
15393
15394/**
15395 * Common worker for FPU instructions working on ST0 and STn, only affecting
15396 * flags, and popping twice when done.
15397 *
15398 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15399 */
15400FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15401{
15402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15403
15404 IEM_MC_BEGIN(3, 1);
15405 IEM_MC_LOCAL(uint16_t, u16Fsw);
15406 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15407 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15408 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15409
15410 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15411 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15412
15413 IEM_MC_PREPARE_FPU_USAGE();
15414 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15415 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15416 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15417 IEM_MC_ELSE()
15418 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15419 IEM_MC_ENDIF();
15420 IEM_MC_ADVANCE_RIP();
15421
15422 IEM_MC_END();
15423 return VINF_SUCCESS;
15424}
15425
15426
15427/** Opcode 0xda 0xe9. */
15428FNIEMOP_DEF(iemOp_fucompp)
15429{
15430 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15431 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15432}
15433
15434
15435/**
15436 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15437 * the result in ST0.
15438 *
15439 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15440 */
15441FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15442{
15443 IEM_MC_BEGIN(3, 3);
15444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15445 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15446 IEM_MC_LOCAL(int32_t, i32Val2);
15447 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15448 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15449 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15450
15451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15453
15454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15455 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15456 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15457
15458 IEM_MC_PREPARE_FPU_USAGE();
15459 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15460 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15461 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15462 IEM_MC_ELSE()
15463 IEM_MC_FPU_STACK_UNDERFLOW(0);
15464 IEM_MC_ENDIF();
15465 IEM_MC_ADVANCE_RIP();
15466
15467 IEM_MC_END();
15468 return VINF_SUCCESS;
15469}
15470
15471
15472/** Opcode 0xda !11/0. */
15473FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15474{
15475 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15476 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15477}
15478
15479
15480/** Opcode 0xda !11/1. */
15481FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15482{
15483 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15484 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15485}
15486
15487
15488/** Opcode 0xda !11/2. */
15489FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15490{
15491 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15492
15493 IEM_MC_BEGIN(3, 3);
15494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15495 IEM_MC_LOCAL(uint16_t, u16Fsw);
15496 IEM_MC_LOCAL(int32_t, i32Val2);
15497 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15498 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15499 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15500
15501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15503
15504 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15505 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15506 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15507
15508 IEM_MC_PREPARE_FPU_USAGE();
15509 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15510 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15511 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15512 IEM_MC_ELSE()
15513 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15514 IEM_MC_ENDIF();
15515 IEM_MC_ADVANCE_RIP();
15516
15517 IEM_MC_END();
15518 return VINF_SUCCESS;
15519}
15520
15521
15522/** Opcode 0xda !11/3. */
15523FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15524{
15525 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15526
15527 IEM_MC_BEGIN(3, 3);
15528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15529 IEM_MC_LOCAL(uint16_t, u16Fsw);
15530 IEM_MC_LOCAL(int32_t, i32Val2);
15531 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15533 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15534
15535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15537
15538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15540 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15541
15542 IEM_MC_PREPARE_FPU_USAGE();
15543 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15544 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15545 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15546 IEM_MC_ELSE()
15547 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15548 IEM_MC_ENDIF();
15549 IEM_MC_ADVANCE_RIP();
15550
15551 IEM_MC_END();
15552 return VINF_SUCCESS;
15553}
15554
15555
15556/** Opcode 0xda !11/4. */
15557FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15558{
15559 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15560 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15561}
15562
15563
15564/** Opcode 0xda !11/5. */
15565FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15566{
15567 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15568 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15569}
15570
15571
15572/** Opcode 0xda !11/6. */
15573FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15574{
15575 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15576 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15577}
15578
15579
15580/** Opcode 0xda !11/7. */
15581FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15582{
15583 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15584 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15585}
15586
15587
15588/** Opcode 0xda. */
15589FNIEMOP_DEF(iemOp_EscF2)
15590{
15591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15592 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15594 {
15595 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15596 {
15597 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15598 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15599 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15600 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15601 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15602 case 5:
15603 if (bRm == 0xe9)
15604 return FNIEMOP_CALL(iemOp_fucompp);
15605 return IEMOP_RAISE_INVALID_OPCODE();
15606 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15607 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15609 }
15610 }
15611 else
15612 {
15613 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15614 {
15615 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15616 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15617 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15618 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15619 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15620 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15621 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15622 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15624 }
15625 }
15626}
15627
15628
15629/** Opcode 0xdb !11/0. */
15630FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15631{
15632 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15633
15634 IEM_MC_BEGIN(2, 3);
15635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15636 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15637 IEM_MC_LOCAL(int32_t, i32Val);
15638 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15639 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15640
15641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15643
15644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15646 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15647
15648 IEM_MC_PREPARE_FPU_USAGE();
15649 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15650 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15651 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15652 IEM_MC_ELSE()
15653 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15654 IEM_MC_ENDIF();
15655 IEM_MC_ADVANCE_RIP();
15656
15657 IEM_MC_END();
15658 return VINF_SUCCESS;
15659}
15660
15661
15662/** Opcode 0xdb !11/1. */
15663FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15664{
15665 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15666 IEM_MC_BEGIN(3, 2);
15667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15668 IEM_MC_LOCAL(uint16_t, u16Fsw);
15669 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15670 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15671 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15672
15673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15677
15678 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15679 IEM_MC_PREPARE_FPU_USAGE();
15680 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15681 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15682 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15683 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15684 IEM_MC_ELSE()
15685 IEM_MC_IF_FCW_IM()
15686 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15687 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15688 IEM_MC_ENDIF();
15689 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15690 IEM_MC_ENDIF();
15691 IEM_MC_ADVANCE_RIP();
15692
15693 IEM_MC_END();
15694 return VINF_SUCCESS;
15695}
15696
15697
15698/** Opcode 0xdb !11/2. */
15699FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15700{
15701 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15702 IEM_MC_BEGIN(3, 2);
15703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15704 IEM_MC_LOCAL(uint16_t, u16Fsw);
15705 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15706 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15707 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15708
15709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15712 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15713
15714 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15715 IEM_MC_PREPARE_FPU_USAGE();
15716 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15717 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15718 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15719 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15720 IEM_MC_ELSE()
15721 IEM_MC_IF_FCW_IM()
15722 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15723 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15724 IEM_MC_ENDIF();
15725 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15726 IEM_MC_ENDIF();
15727 IEM_MC_ADVANCE_RIP();
15728
15729 IEM_MC_END();
15730 return VINF_SUCCESS;
15731}
15732
15733
15734/** Opcode 0xdb !11/3. */
15735FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15736{
15737 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15738 IEM_MC_BEGIN(3, 2);
15739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15740 IEM_MC_LOCAL(uint16_t, u16Fsw);
15741 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15742 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15743 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15744
15745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15749
15750 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15751 IEM_MC_PREPARE_FPU_USAGE();
15752 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15753 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15754 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15755 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15756 IEM_MC_ELSE()
15757 IEM_MC_IF_FCW_IM()
15758 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15759 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15760 IEM_MC_ENDIF();
15761 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15762 IEM_MC_ENDIF();
15763 IEM_MC_ADVANCE_RIP();
15764
15765 IEM_MC_END();
15766 return VINF_SUCCESS;
15767}
15768
15769
15770/** Opcode 0xdb !11/5. */
15771FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15772{
15773 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15774
15775 IEM_MC_BEGIN(2, 3);
15776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15777 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15778 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15779 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15780 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15781
15782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15784
15785 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15786 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15787 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15788
15789 IEM_MC_PREPARE_FPU_USAGE();
15790 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15791 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15792 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15793 IEM_MC_ELSE()
15794 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15795 IEM_MC_ENDIF();
15796 IEM_MC_ADVANCE_RIP();
15797
15798 IEM_MC_END();
15799 return VINF_SUCCESS;
15800}
15801
15802
15803/** Opcode 0xdb !11/7. */
15804FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15805{
15806 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15807 IEM_MC_BEGIN(3, 2);
15808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15809 IEM_MC_LOCAL(uint16_t, u16Fsw);
15810 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15811 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15812 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15813
15814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15818
15819 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15820 IEM_MC_PREPARE_FPU_USAGE();
15821 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15822 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15823 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15824 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15825 IEM_MC_ELSE()
15826 IEM_MC_IF_FCW_IM()
15827 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15828 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15829 IEM_MC_ENDIF();
15830 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15831 IEM_MC_ENDIF();
15832 IEM_MC_ADVANCE_RIP();
15833
15834 IEM_MC_END();
15835 return VINF_SUCCESS;
15836}
15837
15838
15839/** Opcode 0xdb 11/0. */
15840FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15841{
15842 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15844
15845 IEM_MC_BEGIN(0, 1);
15846 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15847
15848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15850
15851 IEM_MC_PREPARE_FPU_USAGE();
15852 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15853 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15854 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15855 IEM_MC_ENDIF();
15856 IEM_MC_UPDATE_FPU_OPCODE_IP();
15857 IEM_MC_ELSE()
15858 IEM_MC_FPU_STACK_UNDERFLOW(0);
15859 IEM_MC_ENDIF();
15860 IEM_MC_ADVANCE_RIP();
15861
15862 IEM_MC_END();
15863 return VINF_SUCCESS;
15864}
15865
15866
15867/** Opcode 0xdb 11/1. */
15868FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15869{
15870 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15872
15873 IEM_MC_BEGIN(0, 1);
15874 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15875
15876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15878
15879 IEM_MC_PREPARE_FPU_USAGE();
15880 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15881 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15882 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15883 IEM_MC_ENDIF();
15884 IEM_MC_UPDATE_FPU_OPCODE_IP();
15885 IEM_MC_ELSE()
15886 IEM_MC_FPU_STACK_UNDERFLOW(0);
15887 IEM_MC_ENDIF();
15888 IEM_MC_ADVANCE_RIP();
15889
15890 IEM_MC_END();
15891 return VINF_SUCCESS;
15892}
15893
15894
15895/** Opcode 0xdb 11/2. */
15896FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15897{
15898 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15900
15901 IEM_MC_BEGIN(0, 1);
15902 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15903
15904 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15905 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15906
15907 IEM_MC_PREPARE_FPU_USAGE();
15908 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15909 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15910 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15911 IEM_MC_ENDIF();
15912 IEM_MC_UPDATE_FPU_OPCODE_IP();
15913 IEM_MC_ELSE()
15914 IEM_MC_FPU_STACK_UNDERFLOW(0);
15915 IEM_MC_ENDIF();
15916 IEM_MC_ADVANCE_RIP();
15917
15918 IEM_MC_END();
15919 return VINF_SUCCESS;
15920}
15921
15922
15923/** Opcode 0xdb 11/3. */
15924FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15925{
15926 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15928
15929 IEM_MC_BEGIN(0, 1);
15930 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15931
15932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15934
15935 IEM_MC_PREPARE_FPU_USAGE();
15936 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15937 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15938 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15939 IEM_MC_ENDIF();
15940 IEM_MC_UPDATE_FPU_OPCODE_IP();
15941 IEM_MC_ELSE()
15942 IEM_MC_FPU_STACK_UNDERFLOW(0);
15943 IEM_MC_ENDIF();
15944 IEM_MC_ADVANCE_RIP();
15945
15946 IEM_MC_END();
15947 return VINF_SUCCESS;
15948}
15949
15950
15951/** Opcode 0xdb 0xe0. */
15952FNIEMOP_DEF(iemOp_fneni)
15953{
15954 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
15955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15956 IEM_MC_BEGIN(0,0);
15957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15958 IEM_MC_ADVANCE_RIP();
15959 IEM_MC_END();
15960 return VINF_SUCCESS;
15961}
15962
15963
15964/** Opcode 0xdb 0xe1. */
15965FNIEMOP_DEF(iemOp_fndisi)
15966{
15967 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
15968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15969 IEM_MC_BEGIN(0,0);
15970 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15971 IEM_MC_ADVANCE_RIP();
15972 IEM_MC_END();
15973 return VINF_SUCCESS;
15974}
15975
15976
15977/** Opcode 0xdb 0xe2. */
15978FNIEMOP_DEF(iemOp_fnclex)
15979{
15980 IEMOP_MNEMONIC(fnclex, "fnclex");
15981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15982
15983 IEM_MC_BEGIN(0,0);
15984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15985 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15986 IEM_MC_CLEAR_FSW_EX();
15987 IEM_MC_ADVANCE_RIP();
15988 IEM_MC_END();
15989 return VINF_SUCCESS;
15990}
15991
15992
15993/** Opcode 0xdb 0xe3. */
15994FNIEMOP_DEF(iemOp_fninit)
15995{
15996 IEMOP_MNEMONIC(fninit, "fninit");
15997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15998 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15999}
16000
16001
16002/** Opcode 0xdb 0xe4. */
16003FNIEMOP_DEF(iemOp_fnsetpm)
16004{
16005 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16007 IEM_MC_BEGIN(0,0);
16008 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16009 IEM_MC_ADVANCE_RIP();
16010 IEM_MC_END();
16011 return VINF_SUCCESS;
16012}
16013
16014
16015/** Opcode 0xdb 0xe5. */
16016FNIEMOP_DEF(iemOp_frstpm)
16017{
16018 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16019#if 0 /* #UDs on newer CPUs */
16020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16021 IEM_MC_BEGIN(0,0);
16022 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16023 IEM_MC_ADVANCE_RIP();
16024 IEM_MC_END();
16025 return VINF_SUCCESS;
16026#else
16027 return IEMOP_RAISE_INVALID_OPCODE();
16028#endif
16029}
16030
16031
16032/** Opcode 0xdb 11/5. */
16033FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16034{
16035 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16036 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16037}
16038
16039
16040/** Opcode 0xdb 11/6. */
16041FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16042{
16043 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16044 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16045}
16046
16047
16048/** Opcode 0xdb. */
16049FNIEMOP_DEF(iemOp_EscF3)
16050{
16051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16052 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16054 {
16055 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16056 {
16057 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16058 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16059 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16060 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16061 case 4:
16062 switch (bRm)
16063 {
16064 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16065 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16066 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16067 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16068 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16069 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16070 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16071 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16073 }
16074 break;
16075 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16076 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16077 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16079 }
16080 }
16081 else
16082 {
16083 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16084 {
16085 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16086 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16087 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16088 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16089 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16090 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16091 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16092 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16094 }
16095 }
16096}
16097
16098
16099/**
16100 * Common worker for FPU instructions working on STn and ST0, and storing the
16101 * result in STn unless IE, DE or ZE was raised.
16102 *
16103 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16104 */
16105FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16106{
16107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16108
16109 IEM_MC_BEGIN(3, 1);
16110 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16111 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16112 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16113 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16114
16115 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16116 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16117
16118 IEM_MC_PREPARE_FPU_USAGE();
16119 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16120 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16121 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16122 IEM_MC_ELSE()
16123 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16124 IEM_MC_ENDIF();
16125 IEM_MC_ADVANCE_RIP();
16126
16127 IEM_MC_END();
16128 return VINF_SUCCESS;
16129}
16130
16131
16132/** Opcode 0xdc 11/0. */
16133FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16134{
16135 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16136 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16137}
16138
16139
16140/** Opcode 0xdc 11/1. */
16141FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16142{
16143 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16144 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16145}
16146
16147
16148/** Opcode 0xdc 11/4. */
16149FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16150{
16151 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16152 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16153}
16154
16155
16156/** Opcode 0xdc 11/5. */
16157FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16158{
16159 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16160 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16161}
16162
16163
16164/** Opcode 0xdc 11/6. */
16165FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16166{
16167 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16168 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16169}
16170
16171
16172/** Opcode 0xdc 11/7. */
16173FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16174{
16175 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16176 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16177}
16178
16179
16180/**
16181 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16182 * memory operand, and storing the result in ST0.
16183 *
16184 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16185 */
16186FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16187{
16188 IEM_MC_BEGIN(3, 3);
16189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16190 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16191 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16192 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16193 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16194 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16195
16196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16198 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16199 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16200
16201 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16202 IEM_MC_PREPARE_FPU_USAGE();
16203 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16204 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16205 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16206 IEM_MC_ELSE()
16207 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16208 IEM_MC_ENDIF();
16209 IEM_MC_ADVANCE_RIP();
16210
16211 IEM_MC_END();
16212 return VINF_SUCCESS;
16213}
16214
16215
16216/** Opcode 0xdc !11/0. */
16217FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16218{
16219 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16220 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16221}
16222
16223
16224/** Opcode 0xdc !11/1. */
16225FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16226{
16227 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16228 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16229}
16230
16231
16232/** Opcode 0xdc !11/2. */
16233FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16234{
16235 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16236
16237 IEM_MC_BEGIN(3, 3);
16238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16239 IEM_MC_LOCAL(uint16_t, u16Fsw);
16240 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16241 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16242 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16243 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16244
16245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16247
16248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16250 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16251
16252 IEM_MC_PREPARE_FPU_USAGE();
16253 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16254 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16255 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16256 IEM_MC_ELSE()
16257 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16258 IEM_MC_ENDIF();
16259 IEM_MC_ADVANCE_RIP();
16260
16261 IEM_MC_END();
16262 return VINF_SUCCESS;
16263}
16264
16265
16266/** Opcode 0xdc !11/3. */
16267FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16268{
16269 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16270
16271 IEM_MC_BEGIN(3, 3);
16272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16273 IEM_MC_LOCAL(uint16_t, u16Fsw);
16274 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16275 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16276 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16277 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16278
16279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16281
16282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16283 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16284 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16285
16286 IEM_MC_PREPARE_FPU_USAGE();
16287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16288 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16289 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16290 IEM_MC_ELSE()
16291 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16292 IEM_MC_ENDIF();
16293 IEM_MC_ADVANCE_RIP();
16294
16295 IEM_MC_END();
16296 return VINF_SUCCESS;
16297}
16298
16299
16300/** Opcode 0xdc !11/4. */
16301FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16302{
16303 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16304 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16305}
16306
16307
16308/** Opcode 0xdc !11/5. */
16309FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16310{
16311 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16312 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16313}
16314
16315
16316/** Opcode 0xdc !11/6. */
16317FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16318{
16319 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16320 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16321}
16322
16323
16324/** Opcode 0xdc !11/7. */
16325FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16326{
16327 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16328 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16329}
16330
16331
16332/** Opcode 0xdc. */
16333FNIEMOP_DEF(iemOp_EscF4)
16334{
16335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16336 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16337 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16338 {
16339 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16340 {
16341 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16342 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16343 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16344 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16345 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16346 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16347 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16348 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16350 }
16351 }
16352 else
16353 {
16354 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16355 {
16356 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16357 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16358 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16359 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16360 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16361 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16362 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16363 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16365 }
16366 }
16367}
16368
16369
16370/** Opcode 0xdd !11/0.
16371 * @sa iemOp_fld_m32r */
16372FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16373{
16374 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16375
16376 IEM_MC_BEGIN(2, 3);
16377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16378 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16379 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16380 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16381 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16382
16383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16385 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16386 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16387
16388 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16389 IEM_MC_PREPARE_FPU_USAGE();
16390 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16391 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16392 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16393 IEM_MC_ELSE()
16394 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16395 IEM_MC_ENDIF();
16396 IEM_MC_ADVANCE_RIP();
16397
16398 IEM_MC_END();
16399 return VINF_SUCCESS;
16400}
16401
16402
16403/** Opcode 0xdd !11/0. */
16404FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16405{
16406 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16407 IEM_MC_BEGIN(3, 2);
16408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16409 IEM_MC_LOCAL(uint16_t, u16Fsw);
16410 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16411 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16412 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16413
16414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16418
16419 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16420 IEM_MC_PREPARE_FPU_USAGE();
16421 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16422 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16423 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16424 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16425 IEM_MC_ELSE()
16426 IEM_MC_IF_FCW_IM()
16427 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16428 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16429 IEM_MC_ENDIF();
16430 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16431 IEM_MC_ENDIF();
16432 IEM_MC_ADVANCE_RIP();
16433
16434 IEM_MC_END();
16435 return VINF_SUCCESS;
16436}
16437
16438
16439/** Opcode 0xdd !11/0. */
16440FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16441{
16442 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16443 IEM_MC_BEGIN(3, 2);
16444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16445 IEM_MC_LOCAL(uint16_t, u16Fsw);
16446 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16447 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16448 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16449
16450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16454
16455 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16456 IEM_MC_PREPARE_FPU_USAGE();
16457 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16458 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16459 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16460 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16461 IEM_MC_ELSE()
16462 IEM_MC_IF_FCW_IM()
16463 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16464 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16465 IEM_MC_ENDIF();
16466 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16467 IEM_MC_ENDIF();
16468 IEM_MC_ADVANCE_RIP();
16469
16470 IEM_MC_END();
16471 return VINF_SUCCESS;
16472}
16473
16474
16475
16476
16477/** Opcode 0xdd !11/0. */
16478FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16479{
16480 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16481 IEM_MC_BEGIN(3, 2);
16482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16483 IEM_MC_LOCAL(uint16_t, u16Fsw);
16484 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16485 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16486 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16487
16488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16490 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16491 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16492
16493 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16494 IEM_MC_PREPARE_FPU_USAGE();
16495 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16496 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16497 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16498 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16499 IEM_MC_ELSE()
16500 IEM_MC_IF_FCW_IM()
16501 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16502 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16503 IEM_MC_ENDIF();
16504 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16505 IEM_MC_ENDIF();
16506 IEM_MC_ADVANCE_RIP();
16507
16508 IEM_MC_END();
16509 return VINF_SUCCESS;
16510}
16511
16512
16513/** Opcode 0xdd !11/0. */
16514FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16515{
16516 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16517 IEM_MC_BEGIN(3, 0);
16518 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16519 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16520 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16524 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16525 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16526 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16527 IEM_MC_END();
16528 return VINF_SUCCESS;
16529}
16530
16531
16532/** Opcode 0xdd !11/0. */
16533FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16534{
16535 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16536 IEM_MC_BEGIN(3, 0);
16537 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16538 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16539 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16542 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16543 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16544 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16545 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16546 IEM_MC_END();
16547 return VINF_SUCCESS;
16548
16549}
16550
16551/** Opcode 0xdd !11/0. */
16552FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16553{
16554 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16555
16556 IEM_MC_BEGIN(0, 2);
16557 IEM_MC_LOCAL(uint16_t, u16Tmp);
16558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16559
16560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16562 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16563
16564 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16565 IEM_MC_FETCH_FSW(u16Tmp);
16566 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16567 IEM_MC_ADVANCE_RIP();
16568
16569/** @todo Debug / drop a hint to the verifier that things may differ
16570 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16571 * NT4SP1. (X86_FSW_PE) */
16572 IEM_MC_END();
16573 return VINF_SUCCESS;
16574}
16575
16576
16577/** Opcode 0xdd 11/0. */
16578FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16579{
16580 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16582 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16583 unmodified. */
16584
16585 IEM_MC_BEGIN(0, 0);
16586
16587 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16588 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16589
16590 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16591 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16592 IEM_MC_UPDATE_FPU_OPCODE_IP();
16593
16594 IEM_MC_ADVANCE_RIP();
16595 IEM_MC_END();
16596 return VINF_SUCCESS;
16597}
16598
16599
16600/** Opcode 0xdd 11/1. */
16601FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16602{
16603 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16605
16606 IEM_MC_BEGIN(0, 2);
16607 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16608 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16609 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16610 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16611
16612 IEM_MC_PREPARE_FPU_USAGE();
16613 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16614 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16615 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16616 IEM_MC_ELSE()
16617 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16618 IEM_MC_ENDIF();
16619
16620 IEM_MC_ADVANCE_RIP();
16621 IEM_MC_END();
16622 return VINF_SUCCESS;
16623}
16624
16625
16626/** Opcode 0xdd 11/3. */
16627FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16628{
16629 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16630 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16631}
16632
16633
16634/** Opcode 0xdd 11/4. */
16635FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16636{
16637 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16638 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16639}
16640
16641
16642/** Opcode 0xdd. */
16643FNIEMOP_DEF(iemOp_EscF5)
16644{
16645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16646 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16648 {
16649 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16650 {
16651 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16652 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16653 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16654 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16655 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16656 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16657 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16658 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16660 }
16661 }
16662 else
16663 {
16664 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16665 {
16666 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16667 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16668 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16669 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16670 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16671 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16672 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16673 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16675 }
16676 }
16677}
16678
16679
16680/** Opcode 0xde 11/0. */
16681FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16682{
16683 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16684 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16685}
16686
16687
16688/** Opcode 0xde 11/0. */
16689FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16690{
16691 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16692 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16693}
16694
16695
16696/** Opcode 0xde 0xd9. */
16697FNIEMOP_DEF(iemOp_fcompp)
16698{
16699 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16700 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16701}
16702
16703
16704/** Opcode 0xde 11/4. */
16705FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16706{
16707 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16708 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16709}
16710
16711
16712/** Opcode 0xde 11/5. */
16713FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16714{
16715 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16716 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16717}
16718
16719
16720/** Opcode 0xde 11/6. */
16721FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16722{
16723 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16724 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16725}
16726
16727
16728/** Opcode 0xde 11/7. */
16729FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16730{
16731 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16732 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16733}
16734
16735
16736/**
16737 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16738 * the result in ST0.
16739 *
16740 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16741 */
16742FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16743{
16744 IEM_MC_BEGIN(3, 3);
16745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16746 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16747 IEM_MC_LOCAL(int16_t, i16Val2);
16748 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16749 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16750 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16751
16752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16754
16755 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16756 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16757 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16758
16759 IEM_MC_PREPARE_FPU_USAGE();
16760 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16761 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16762 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16763 IEM_MC_ELSE()
16764 IEM_MC_FPU_STACK_UNDERFLOW(0);
16765 IEM_MC_ENDIF();
16766 IEM_MC_ADVANCE_RIP();
16767
16768 IEM_MC_END();
16769 return VINF_SUCCESS;
16770}
16771
16772
16773/** Opcode 0xde !11/0. */
16774FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16775{
16776 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16777 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16778}
16779
16780
16781/** Opcode 0xde !11/1. */
16782FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16783{
16784 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16785 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16786}
16787
16788
16789/** Opcode 0xde !11/2. */
16790FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16791{
16792 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16793
16794 IEM_MC_BEGIN(3, 3);
16795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16796 IEM_MC_LOCAL(uint16_t, u16Fsw);
16797 IEM_MC_LOCAL(int16_t, i16Val2);
16798 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16799 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16800 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16801
16802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16804
16805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16806 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16807 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16808
16809 IEM_MC_PREPARE_FPU_USAGE();
16810 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16811 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16812 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16813 IEM_MC_ELSE()
16814 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16815 IEM_MC_ENDIF();
16816 IEM_MC_ADVANCE_RIP();
16817
16818 IEM_MC_END();
16819 return VINF_SUCCESS;
16820}
16821
16822
16823/** Opcode 0xde !11/3. */
16824FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16825{
16826 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16827
16828 IEM_MC_BEGIN(3, 3);
16829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16830 IEM_MC_LOCAL(uint16_t, u16Fsw);
16831 IEM_MC_LOCAL(int16_t, i16Val2);
16832 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16833 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16834 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16835
16836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16838
16839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16841 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16842
16843 IEM_MC_PREPARE_FPU_USAGE();
16844 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16845 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16846 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16847 IEM_MC_ELSE()
16848 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16849 IEM_MC_ENDIF();
16850 IEM_MC_ADVANCE_RIP();
16851
16852 IEM_MC_END();
16853 return VINF_SUCCESS;
16854}
16855
16856
16857/** Opcode 0xde !11/4. */
16858FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16859{
16860 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16861 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16862}
16863
16864
16865/** Opcode 0xde !11/5. */
16866FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16867{
16868 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16869 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16870}
16871
16872
16873/** Opcode 0xde !11/6. */
16874FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16875{
16876 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16877 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16878}
16879
16880
16881/** Opcode 0xde !11/7. */
16882FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16883{
16884 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16885 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16886}
16887
16888
16889/** Opcode 0xde. */
16890FNIEMOP_DEF(iemOp_EscF6)
16891{
16892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16893 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16895 {
16896 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16897 {
16898 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16899 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16900 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16901 case 3: if (bRm == 0xd9)
16902 return FNIEMOP_CALL(iemOp_fcompp);
16903 return IEMOP_RAISE_INVALID_OPCODE();
16904 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16905 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16906 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16907 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16909 }
16910 }
16911 else
16912 {
16913 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16914 {
16915 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16916 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16917 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16918 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16919 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16920 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16921 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16922 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16924 }
16925 }
16926}
16927
16928
16929/** Opcode 0xdf 11/0.
16930 * Undocument instruction, assumed to work like ffree + fincstp. */
16931FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16932{
16933 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
16934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16935
16936 IEM_MC_BEGIN(0, 0);
16937
16938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16940
16941 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16942 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16943 IEM_MC_FPU_STACK_INC_TOP();
16944 IEM_MC_UPDATE_FPU_OPCODE_IP();
16945
16946 IEM_MC_ADVANCE_RIP();
16947 IEM_MC_END();
16948 return VINF_SUCCESS;
16949}
16950
16951
16952/** Opcode 0xdf 0xe0. */
16953FNIEMOP_DEF(iemOp_fnstsw_ax)
16954{
16955 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
16956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16957
16958 IEM_MC_BEGIN(0, 1);
16959 IEM_MC_LOCAL(uint16_t, u16Tmp);
16960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16961 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16962 IEM_MC_FETCH_FSW(u16Tmp);
16963 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16964 IEM_MC_ADVANCE_RIP();
16965 IEM_MC_END();
16966 return VINF_SUCCESS;
16967}
16968
16969
16970/** Opcode 0xdf 11/5. */
16971FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16972{
16973 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
16974 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16975}
16976
16977
16978/** Opcode 0xdf 11/6. */
16979FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16980{
16981 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
16982 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16983}
16984
16985
16986/** Opcode 0xdf !11/0. */
16987FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16988{
16989 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
16990
16991 IEM_MC_BEGIN(2, 3);
16992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16993 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16994 IEM_MC_LOCAL(int16_t, i16Val);
16995 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16996 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16997
16998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17000
17001 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17002 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17003 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17004
17005 IEM_MC_PREPARE_FPU_USAGE();
17006 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17007 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17008 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17009 IEM_MC_ELSE()
17010 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17011 IEM_MC_ENDIF();
17012 IEM_MC_ADVANCE_RIP();
17013
17014 IEM_MC_END();
17015 return VINF_SUCCESS;
17016}
17017
17018
17019/** Opcode 0xdf !11/1. */
17020FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17021{
17022 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17023 IEM_MC_BEGIN(3, 2);
17024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17025 IEM_MC_LOCAL(uint16_t, u16Fsw);
17026 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17027 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17028 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17029
17030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17034
17035 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17036 IEM_MC_PREPARE_FPU_USAGE();
17037 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17038 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17039 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17040 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17041 IEM_MC_ELSE()
17042 IEM_MC_IF_FCW_IM()
17043 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17044 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17045 IEM_MC_ENDIF();
17046 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17047 IEM_MC_ENDIF();
17048 IEM_MC_ADVANCE_RIP();
17049
17050 IEM_MC_END();
17051 return VINF_SUCCESS;
17052}
17053
17054
17055/** Opcode 0xdf !11/2. */
17056FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17057{
17058 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17059 IEM_MC_BEGIN(3, 2);
17060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17061 IEM_MC_LOCAL(uint16_t, u16Fsw);
17062 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17063 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17064 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17065
17066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17070
17071 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17072 IEM_MC_PREPARE_FPU_USAGE();
17073 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17074 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17075 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17076 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17077 IEM_MC_ELSE()
17078 IEM_MC_IF_FCW_IM()
17079 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17080 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17081 IEM_MC_ENDIF();
17082 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17083 IEM_MC_ENDIF();
17084 IEM_MC_ADVANCE_RIP();
17085
17086 IEM_MC_END();
17087 return VINF_SUCCESS;
17088}
17089
17090
17091/** Opcode 0xdf !11/3. */
17092FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17093{
17094 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17095 IEM_MC_BEGIN(3, 2);
17096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17097 IEM_MC_LOCAL(uint16_t, u16Fsw);
17098 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17099 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17100 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17101
17102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17104 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17105 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17106
17107 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17108 IEM_MC_PREPARE_FPU_USAGE();
17109 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17110 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17111 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17112 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17113 IEM_MC_ELSE()
17114 IEM_MC_IF_FCW_IM()
17115 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17116 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17117 IEM_MC_ENDIF();
17118 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17119 IEM_MC_ENDIF();
17120 IEM_MC_ADVANCE_RIP();
17121
17122 IEM_MC_END();
17123 return VINF_SUCCESS;
17124}
17125
17126
17127/** Opcode 0xdf !11/4. */
17128FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17129
17130
17131/** Opcode 0xdf !11/5. */
17132FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17133{
17134 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17135
17136 IEM_MC_BEGIN(2, 3);
17137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17138 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17139 IEM_MC_LOCAL(int64_t, i64Val);
17140 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17141 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17142
17143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17145
17146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17147 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17148 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17149
17150 IEM_MC_PREPARE_FPU_USAGE();
17151 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17152 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17153 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17154 IEM_MC_ELSE()
17155 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17156 IEM_MC_ENDIF();
17157 IEM_MC_ADVANCE_RIP();
17158
17159 IEM_MC_END();
17160 return VINF_SUCCESS;
17161}
17162
17163
17164/** Opcode 0xdf !11/6. */
17165FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17166
17167
17168/** Opcode 0xdf !11/7. */
17169FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17170{
17171 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17172 IEM_MC_BEGIN(3, 2);
17173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17174 IEM_MC_LOCAL(uint16_t, u16Fsw);
17175 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17176 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17177 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17178
17179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17181 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17182 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17183
17184 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17185 IEM_MC_PREPARE_FPU_USAGE();
17186 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17187 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17188 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17189 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17190 IEM_MC_ELSE()
17191 IEM_MC_IF_FCW_IM()
17192 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17193 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17194 IEM_MC_ENDIF();
17195 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17196 IEM_MC_ENDIF();
17197 IEM_MC_ADVANCE_RIP();
17198
17199 IEM_MC_END();
17200 return VINF_SUCCESS;
17201}
17202
17203
17204/** Opcode 0xdf. */
17205FNIEMOP_DEF(iemOp_EscF7)
17206{
17207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17209 {
17210 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17211 {
17212 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17213 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17214 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17215 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17216 case 4: if (bRm == 0xe0)
17217 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17218 return IEMOP_RAISE_INVALID_OPCODE();
17219 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17220 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17221 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17223 }
17224 }
17225 else
17226 {
17227 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17228 {
17229 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17230 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17231 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17232 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17233 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17234 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17235 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17236 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17238 }
17239 }
17240}
17241
17242
17243/** Opcode 0xe0. */
17244FNIEMOP_DEF(iemOp_loopne_Jb)
17245{
17246 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17247 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17249 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17250
17251 switch (pVCpu->iem.s.enmEffAddrMode)
17252 {
17253 case IEMMODE_16BIT:
17254 IEM_MC_BEGIN(0,0);
17255 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17256 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17257 IEM_MC_REL_JMP_S8(i8Imm);
17258 } IEM_MC_ELSE() {
17259 IEM_MC_ADVANCE_RIP();
17260 } IEM_MC_ENDIF();
17261 IEM_MC_END();
17262 return VINF_SUCCESS;
17263
17264 case IEMMODE_32BIT:
17265 IEM_MC_BEGIN(0,0);
17266 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17267 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17268 IEM_MC_REL_JMP_S8(i8Imm);
17269 } IEM_MC_ELSE() {
17270 IEM_MC_ADVANCE_RIP();
17271 } IEM_MC_ENDIF();
17272 IEM_MC_END();
17273 return VINF_SUCCESS;
17274
17275 case IEMMODE_64BIT:
17276 IEM_MC_BEGIN(0,0);
17277 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17278 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17279 IEM_MC_REL_JMP_S8(i8Imm);
17280 } IEM_MC_ELSE() {
17281 IEM_MC_ADVANCE_RIP();
17282 } IEM_MC_ENDIF();
17283 IEM_MC_END();
17284 return VINF_SUCCESS;
17285
17286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17287 }
17288}
17289
17290
17291/** Opcode 0xe1. */
17292FNIEMOP_DEF(iemOp_loope_Jb)
17293{
17294 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17295 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17297 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17298
17299 switch (pVCpu->iem.s.enmEffAddrMode)
17300 {
17301 case IEMMODE_16BIT:
17302 IEM_MC_BEGIN(0,0);
17303 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17304 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17305 IEM_MC_REL_JMP_S8(i8Imm);
17306 } IEM_MC_ELSE() {
17307 IEM_MC_ADVANCE_RIP();
17308 } IEM_MC_ENDIF();
17309 IEM_MC_END();
17310 return VINF_SUCCESS;
17311
17312 case IEMMODE_32BIT:
17313 IEM_MC_BEGIN(0,0);
17314 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17315 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17316 IEM_MC_REL_JMP_S8(i8Imm);
17317 } IEM_MC_ELSE() {
17318 IEM_MC_ADVANCE_RIP();
17319 } IEM_MC_ENDIF();
17320 IEM_MC_END();
17321 return VINF_SUCCESS;
17322
17323 case IEMMODE_64BIT:
17324 IEM_MC_BEGIN(0,0);
17325 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17326 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17327 IEM_MC_REL_JMP_S8(i8Imm);
17328 } IEM_MC_ELSE() {
17329 IEM_MC_ADVANCE_RIP();
17330 } IEM_MC_ENDIF();
17331 IEM_MC_END();
17332 return VINF_SUCCESS;
17333
17334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17335 }
17336}
17337
17338
17339/** Opcode 0xe2. */
17340FNIEMOP_DEF(iemOp_loop_Jb)
17341{
17342 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17343 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17345 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17346
17347 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17348 * using the 32-bit operand size override. How can that be restarted? See
17349 * weird pseudo code in intel manual. */
17350 switch (pVCpu->iem.s.enmEffAddrMode)
17351 {
17352 case IEMMODE_16BIT:
17353 IEM_MC_BEGIN(0,0);
17354 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17355 {
17356 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17357 IEM_MC_IF_CX_IS_NZ() {
17358 IEM_MC_REL_JMP_S8(i8Imm);
17359 } IEM_MC_ELSE() {
17360 IEM_MC_ADVANCE_RIP();
17361 } IEM_MC_ENDIF();
17362 }
17363 else
17364 {
17365 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17366 IEM_MC_ADVANCE_RIP();
17367 }
17368 IEM_MC_END();
17369 return VINF_SUCCESS;
17370
17371 case IEMMODE_32BIT:
17372 IEM_MC_BEGIN(0,0);
17373 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17374 {
17375 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17376 IEM_MC_IF_ECX_IS_NZ() {
17377 IEM_MC_REL_JMP_S8(i8Imm);
17378 } IEM_MC_ELSE() {
17379 IEM_MC_ADVANCE_RIP();
17380 } IEM_MC_ENDIF();
17381 }
17382 else
17383 {
17384 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17385 IEM_MC_ADVANCE_RIP();
17386 }
17387 IEM_MC_END();
17388 return VINF_SUCCESS;
17389
17390 case IEMMODE_64BIT:
17391 IEM_MC_BEGIN(0,0);
17392 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17393 {
17394 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17395 IEM_MC_IF_RCX_IS_NZ() {
17396 IEM_MC_REL_JMP_S8(i8Imm);
17397 } IEM_MC_ELSE() {
17398 IEM_MC_ADVANCE_RIP();
17399 } IEM_MC_ENDIF();
17400 }
17401 else
17402 {
17403 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17404 IEM_MC_ADVANCE_RIP();
17405 }
17406 IEM_MC_END();
17407 return VINF_SUCCESS;
17408
17409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17410 }
17411}
17412
17413
17414/** Opcode 0xe3. */
17415FNIEMOP_DEF(iemOp_jecxz_Jb)
17416{
17417 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17418 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17420 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17421
17422 switch (pVCpu->iem.s.enmEffAddrMode)
17423 {
17424 case IEMMODE_16BIT:
17425 IEM_MC_BEGIN(0,0);
17426 IEM_MC_IF_CX_IS_NZ() {
17427 IEM_MC_ADVANCE_RIP();
17428 } IEM_MC_ELSE() {
17429 IEM_MC_REL_JMP_S8(i8Imm);
17430 } IEM_MC_ENDIF();
17431 IEM_MC_END();
17432 return VINF_SUCCESS;
17433
17434 case IEMMODE_32BIT:
17435 IEM_MC_BEGIN(0,0);
17436 IEM_MC_IF_ECX_IS_NZ() {
17437 IEM_MC_ADVANCE_RIP();
17438 } IEM_MC_ELSE() {
17439 IEM_MC_REL_JMP_S8(i8Imm);
17440 } IEM_MC_ENDIF();
17441 IEM_MC_END();
17442 return VINF_SUCCESS;
17443
17444 case IEMMODE_64BIT:
17445 IEM_MC_BEGIN(0,0);
17446 IEM_MC_IF_RCX_IS_NZ() {
17447 IEM_MC_ADVANCE_RIP();
17448 } IEM_MC_ELSE() {
17449 IEM_MC_REL_JMP_S8(i8Imm);
17450 } IEM_MC_ENDIF();
17451 IEM_MC_END();
17452 return VINF_SUCCESS;
17453
17454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17455 }
17456}
17457
17458
17459/** Opcode 0xe4 */
17460FNIEMOP_DEF(iemOp_in_AL_Ib)
17461{
17462 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17463 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17465 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17466}
17467
17468
17469/** Opcode 0xe5 */
17470FNIEMOP_DEF(iemOp_in_eAX_Ib)
17471{
17472 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17473 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17475 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17476}
17477
17478
17479/** Opcode 0xe6 */
17480FNIEMOP_DEF(iemOp_out_Ib_AL)
17481{
17482 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17483 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17485 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17486}
17487
17488
17489/** Opcode 0xe7 */
17490FNIEMOP_DEF(iemOp_out_Ib_eAX)
17491{
17492 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17493 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17495 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17496}
17497
17498
17499/** Opcode 0xe8. */
17500FNIEMOP_DEF(iemOp_call_Jv)
17501{
17502 IEMOP_MNEMONIC(call_Jv, "call Jv");
17503 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17504 switch (pVCpu->iem.s.enmEffOpSize)
17505 {
17506 case IEMMODE_16BIT:
17507 {
17508 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17509 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17510 }
17511
17512 case IEMMODE_32BIT:
17513 {
17514 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17515 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17516 }
17517
17518 case IEMMODE_64BIT:
17519 {
17520 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17521 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17522 }
17523
17524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17525 }
17526}
17527
17528
17529/** Opcode 0xe9. */
17530FNIEMOP_DEF(iemOp_jmp_Jv)
17531{
17532 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17533 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17534 switch (pVCpu->iem.s.enmEffOpSize)
17535 {
17536 case IEMMODE_16BIT:
17537 {
17538 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17539 IEM_MC_BEGIN(0, 0);
17540 IEM_MC_REL_JMP_S16(i16Imm);
17541 IEM_MC_END();
17542 return VINF_SUCCESS;
17543 }
17544
17545 case IEMMODE_64BIT:
17546 case IEMMODE_32BIT:
17547 {
17548 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17549 IEM_MC_BEGIN(0, 0);
17550 IEM_MC_REL_JMP_S32(i32Imm);
17551 IEM_MC_END();
17552 return VINF_SUCCESS;
17553 }
17554
17555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17556 }
17557}
17558
17559
17560/** Opcode 0xea. */
17561FNIEMOP_DEF(iemOp_jmp_Ap)
17562{
17563 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17564 IEMOP_HLP_NO_64BIT();
17565
17566 /* Decode the far pointer address and pass it on to the far call C implementation. */
17567 uint32_t offSeg;
17568 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17569 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17570 else
17571 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17572 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17574 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17575}
17576
17577
17578/** Opcode 0xeb. */
17579FNIEMOP_DEF(iemOp_jmp_Jb)
17580{
17581 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17582 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17584 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17585
17586 IEM_MC_BEGIN(0, 0);
17587 IEM_MC_REL_JMP_S8(i8Imm);
17588 IEM_MC_END();
17589 return VINF_SUCCESS;
17590}
17591
17592
17593/** Opcode 0xec */
17594FNIEMOP_DEF(iemOp_in_AL_DX)
17595{
17596 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17598 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17599}
17600
17601
17602/** Opcode 0xed */
17603FNIEMOP_DEF(iemOp_eAX_DX)
17604{
17605 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17607 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17608}
17609
17610
17611/** Opcode 0xee */
17612FNIEMOP_DEF(iemOp_out_DX_AL)
17613{
17614 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17616 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17617}
17618
17619
17620/** Opcode 0xef */
17621FNIEMOP_DEF(iemOp_out_DX_eAX)
17622{
17623 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17625 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17626}
17627
17628
17629/** Opcode 0xf0. */
17630FNIEMOP_DEF(iemOp_lock)
17631{
17632 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17633 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17634
17635 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17636 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17637}
17638
17639
17640/** Opcode 0xf1. */
17641FNIEMOP_DEF(iemOp_int_1)
17642{
17643 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17644 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17645 /** @todo testcase! */
17646 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17647}
17648
17649
17650/** Opcode 0xf2. */
17651FNIEMOP_DEF(iemOp_repne)
17652{
17653 /* This overrides any previous REPE prefix. */
17654 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17655 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17656 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17657
17658 /* For the 4 entry opcode tables, REPNZ overrides any previous
17659 REPZ and operand size prefixes. */
17660 pVCpu->iem.s.idxPrefix = 3;
17661
17662 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17663 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17664}
17665
17666
17667/** Opcode 0xf3. */
17668FNIEMOP_DEF(iemOp_repe)
17669{
17670 /* This overrides any previous REPNE prefix. */
17671 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17672 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17673 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17674
17675 /* For the 4 entry opcode tables, REPNZ overrides any previous
17676 REPNZ and operand size prefixes. */
17677 pVCpu->iem.s.idxPrefix = 2;
17678
17679 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17680 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17681}
17682
17683
17684/** Opcode 0xf4. */
17685FNIEMOP_DEF(iemOp_hlt)
17686{
17687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17688 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17689}
17690
17691
17692/** Opcode 0xf5. */
17693FNIEMOP_DEF(iemOp_cmc)
17694{
17695 IEMOP_MNEMONIC(cmc, "cmc");
17696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17697 IEM_MC_BEGIN(0, 0);
17698 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17699 IEM_MC_ADVANCE_RIP();
17700 IEM_MC_END();
17701 return VINF_SUCCESS;
17702}
17703
17704
17705/**
17706 * Common implementation of 'inc/dec/not/neg Eb'.
17707 *
17708 * @param bRm The RM byte.
17709 * @param pImpl The instruction implementation.
17710 */
17711FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17712{
17713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17714 {
17715 /* register access */
17716 IEM_MC_BEGIN(2, 0);
17717 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17718 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17719 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17720 IEM_MC_REF_EFLAGS(pEFlags);
17721 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17722 IEM_MC_ADVANCE_RIP();
17723 IEM_MC_END();
17724 }
17725 else
17726 {
17727 /* memory access. */
17728 IEM_MC_BEGIN(2, 2);
17729 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17730 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17732
17733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17734 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17735 IEM_MC_FETCH_EFLAGS(EFlags);
17736 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17737 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17738 else
17739 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17740
17741 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17742 IEM_MC_COMMIT_EFLAGS(EFlags);
17743 IEM_MC_ADVANCE_RIP();
17744 IEM_MC_END();
17745 }
17746 return VINF_SUCCESS;
17747}
17748
17749
17750/**
17751 * Common implementation of 'inc/dec/not/neg Ev'.
17752 *
17753 * @param bRm The RM byte.
17754 * @param pImpl The instruction implementation.
17755 */
17756FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17757{
17758 /* Registers are handled by a common worker. */
17759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17760 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17761
17762 /* Memory we do here. */
17763 switch (pVCpu->iem.s.enmEffOpSize)
17764 {
17765 case IEMMODE_16BIT:
17766 IEM_MC_BEGIN(2, 2);
17767 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17768 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17770
17771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17772 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17773 IEM_MC_FETCH_EFLAGS(EFlags);
17774 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17775 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17776 else
17777 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17778
17779 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17780 IEM_MC_COMMIT_EFLAGS(EFlags);
17781 IEM_MC_ADVANCE_RIP();
17782 IEM_MC_END();
17783 return VINF_SUCCESS;
17784
17785 case IEMMODE_32BIT:
17786 IEM_MC_BEGIN(2, 2);
17787 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17788 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17790
17791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17792 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17793 IEM_MC_FETCH_EFLAGS(EFlags);
17794 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17795 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17796 else
17797 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17798
17799 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17800 IEM_MC_COMMIT_EFLAGS(EFlags);
17801 IEM_MC_ADVANCE_RIP();
17802 IEM_MC_END();
17803 return VINF_SUCCESS;
17804
17805 case IEMMODE_64BIT:
17806 IEM_MC_BEGIN(2, 2);
17807 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17808 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17810
17811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17812 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17813 IEM_MC_FETCH_EFLAGS(EFlags);
17814 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17815 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17816 else
17817 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17818
17819 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17820 IEM_MC_COMMIT_EFLAGS(EFlags);
17821 IEM_MC_ADVANCE_RIP();
17822 IEM_MC_END();
17823 return VINF_SUCCESS;
17824
17825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17826 }
17827}
17828
17829
17830/** Opcode 0xf6 /0. */
17831FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17832{
17833 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17834 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17835
17836 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17837 {
17838 /* register access */
17839 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17841
17842 IEM_MC_BEGIN(3, 0);
17843 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17844 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17846 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17847 IEM_MC_REF_EFLAGS(pEFlags);
17848 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17849 IEM_MC_ADVANCE_RIP();
17850 IEM_MC_END();
17851 }
17852 else
17853 {
17854 /* memory access. */
17855 IEM_MC_BEGIN(3, 2);
17856 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17857 IEM_MC_ARG(uint8_t, u8Src, 1);
17858 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17860
17861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17862 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17863 IEM_MC_ASSIGN(u8Src, u8Imm);
17864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17865 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17866 IEM_MC_FETCH_EFLAGS(EFlags);
17867 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17868
17869 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17870 IEM_MC_COMMIT_EFLAGS(EFlags);
17871 IEM_MC_ADVANCE_RIP();
17872 IEM_MC_END();
17873 }
17874 return VINF_SUCCESS;
17875}
17876
17877
17878/** Opcode 0xf7 /0. */
17879FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17880{
17881 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17883
17884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17885 {
17886 /* register access */
17887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17888 switch (pVCpu->iem.s.enmEffOpSize)
17889 {
17890 case IEMMODE_16BIT:
17891 {
17892 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17893 IEM_MC_BEGIN(3, 0);
17894 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17895 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17896 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17897 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17898 IEM_MC_REF_EFLAGS(pEFlags);
17899 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17900 IEM_MC_ADVANCE_RIP();
17901 IEM_MC_END();
17902 return VINF_SUCCESS;
17903 }
17904
17905 case IEMMODE_32BIT:
17906 {
17907 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17908 IEM_MC_BEGIN(3, 0);
17909 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17910 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17911 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17912 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17913 IEM_MC_REF_EFLAGS(pEFlags);
17914 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17915 /* No clearing the high dword here - test doesn't write back the result. */
17916 IEM_MC_ADVANCE_RIP();
17917 IEM_MC_END();
17918 return VINF_SUCCESS;
17919 }
17920
17921 case IEMMODE_64BIT:
17922 {
17923 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17924 IEM_MC_BEGIN(3, 0);
17925 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17926 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17928 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17929 IEM_MC_REF_EFLAGS(pEFlags);
17930 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17931 IEM_MC_ADVANCE_RIP();
17932 IEM_MC_END();
17933 return VINF_SUCCESS;
17934 }
17935
17936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17937 }
17938 }
17939 else
17940 {
17941 /* memory access. */
17942 switch (pVCpu->iem.s.enmEffOpSize)
17943 {
17944 case IEMMODE_16BIT:
17945 {
17946 IEM_MC_BEGIN(3, 2);
17947 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17948 IEM_MC_ARG(uint16_t, u16Src, 1);
17949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17951
17952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17953 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17954 IEM_MC_ASSIGN(u16Src, u16Imm);
17955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17956 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17957 IEM_MC_FETCH_EFLAGS(EFlags);
17958 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17959
17960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17961 IEM_MC_COMMIT_EFLAGS(EFlags);
17962 IEM_MC_ADVANCE_RIP();
17963 IEM_MC_END();
17964 return VINF_SUCCESS;
17965 }
17966
17967 case IEMMODE_32BIT:
17968 {
17969 IEM_MC_BEGIN(3, 2);
17970 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17971 IEM_MC_ARG(uint32_t, u32Src, 1);
17972 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17974
17975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17976 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17977 IEM_MC_ASSIGN(u32Src, u32Imm);
17978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17979 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17980 IEM_MC_FETCH_EFLAGS(EFlags);
17981 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17982
17983 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17984 IEM_MC_COMMIT_EFLAGS(EFlags);
17985 IEM_MC_ADVANCE_RIP();
17986 IEM_MC_END();
17987 return VINF_SUCCESS;
17988 }
17989
17990 case IEMMODE_64BIT:
17991 {
17992 IEM_MC_BEGIN(3, 2);
17993 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17994 IEM_MC_ARG(uint64_t, u64Src, 1);
17995 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17997
17998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17999 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18000 IEM_MC_ASSIGN(u64Src, u64Imm);
18001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18002 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18003 IEM_MC_FETCH_EFLAGS(EFlags);
18004 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18005
18006 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18007 IEM_MC_COMMIT_EFLAGS(EFlags);
18008 IEM_MC_ADVANCE_RIP();
18009 IEM_MC_END();
18010 return VINF_SUCCESS;
18011 }
18012
18013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18014 }
18015 }
18016}
18017
18018
18019/** Opcode 0xf6 /4, /5, /6 and /7. */
18020FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18021{
18022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18023 {
18024 /* register access */
18025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18026 IEM_MC_BEGIN(3, 1);
18027 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18028 IEM_MC_ARG(uint8_t, u8Value, 1);
18029 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18030 IEM_MC_LOCAL(int32_t, rc);
18031
18032 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18033 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18034 IEM_MC_REF_EFLAGS(pEFlags);
18035 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18036 IEM_MC_IF_LOCAL_IS_Z(rc) {
18037 IEM_MC_ADVANCE_RIP();
18038 } IEM_MC_ELSE() {
18039 IEM_MC_RAISE_DIVIDE_ERROR();
18040 } IEM_MC_ENDIF();
18041
18042 IEM_MC_END();
18043 }
18044 else
18045 {
18046 /* memory access. */
18047 IEM_MC_BEGIN(3, 2);
18048 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18049 IEM_MC_ARG(uint8_t, u8Value, 1);
18050 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18052 IEM_MC_LOCAL(int32_t, rc);
18053
18054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18056 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18057 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18058 IEM_MC_REF_EFLAGS(pEFlags);
18059 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18060 IEM_MC_IF_LOCAL_IS_Z(rc) {
18061 IEM_MC_ADVANCE_RIP();
18062 } IEM_MC_ELSE() {
18063 IEM_MC_RAISE_DIVIDE_ERROR();
18064 } IEM_MC_ENDIF();
18065
18066 IEM_MC_END();
18067 }
18068 return VINF_SUCCESS;
18069}
18070
18071
18072/** Opcode 0xf7 /4, /5, /6 and /7. */
18073FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18074{
18075 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18076
18077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18078 {
18079 /* register access */
18080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18081 switch (pVCpu->iem.s.enmEffOpSize)
18082 {
18083 case IEMMODE_16BIT:
18084 {
18085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18086 IEM_MC_BEGIN(4, 1);
18087 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18088 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18089 IEM_MC_ARG(uint16_t, u16Value, 2);
18090 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18091 IEM_MC_LOCAL(int32_t, rc);
18092
18093 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18094 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18095 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18096 IEM_MC_REF_EFLAGS(pEFlags);
18097 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18098 IEM_MC_IF_LOCAL_IS_Z(rc) {
18099 IEM_MC_ADVANCE_RIP();
18100 } IEM_MC_ELSE() {
18101 IEM_MC_RAISE_DIVIDE_ERROR();
18102 } IEM_MC_ENDIF();
18103
18104 IEM_MC_END();
18105 return VINF_SUCCESS;
18106 }
18107
18108 case IEMMODE_32BIT:
18109 {
18110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18111 IEM_MC_BEGIN(4, 1);
18112 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18113 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18114 IEM_MC_ARG(uint32_t, u32Value, 2);
18115 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18116 IEM_MC_LOCAL(int32_t, rc);
18117
18118 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18119 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18120 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18121 IEM_MC_REF_EFLAGS(pEFlags);
18122 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18123 IEM_MC_IF_LOCAL_IS_Z(rc) {
18124 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18125 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18126 IEM_MC_ADVANCE_RIP();
18127 } IEM_MC_ELSE() {
18128 IEM_MC_RAISE_DIVIDE_ERROR();
18129 } IEM_MC_ENDIF();
18130
18131 IEM_MC_END();
18132 return VINF_SUCCESS;
18133 }
18134
18135 case IEMMODE_64BIT:
18136 {
18137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18138 IEM_MC_BEGIN(4, 1);
18139 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18140 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18141 IEM_MC_ARG(uint64_t, u64Value, 2);
18142 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18143 IEM_MC_LOCAL(int32_t, rc);
18144
18145 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18146 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18147 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18148 IEM_MC_REF_EFLAGS(pEFlags);
18149 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18150 IEM_MC_IF_LOCAL_IS_Z(rc) {
18151 IEM_MC_ADVANCE_RIP();
18152 } IEM_MC_ELSE() {
18153 IEM_MC_RAISE_DIVIDE_ERROR();
18154 } IEM_MC_ENDIF();
18155
18156 IEM_MC_END();
18157 return VINF_SUCCESS;
18158 }
18159
18160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18161 }
18162 }
18163 else
18164 {
18165 /* memory access. */
18166 switch (pVCpu->iem.s.enmEffOpSize)
18167 {
18168 case IEMMODE_16BIT:
18169 {
18170 IEM_MC_BEGIN(4, 2);
18171 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18172 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18173 IEM_MC_ARG(uint16_t, u16Value, 2);
18174 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18176 IEM_MC_LOCAL(int32_t, rc);
18177
18178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18180 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18181 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18182 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18183 IEM_MC_REF_EFLAGS(pEFlags);
18184 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18185 IEM_MC_IF_LOCAL_IS_Z(rc) {
18186 IEM_MC_ADVANCE_RIP();
18187 } IEM_MC_ELSE() {
18188 IEM_MC_RAISE_DIVIDE_ERROR();
18189 } IEM_MC_ENDIF();
18190
18191 IEM_MC_END();
18192 return VINF_SUCCESS;
18193 }
18194
18195 case IEMMODE_32BIT:
18196 {
18197 IEM_MC_BEGIN(4, 2);
18198 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18199 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18200 IEM_MC_ARG(uint32_t, u32Value, 2);
18201 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18203 IEM_MC_LOCAL(int32_t, rc);
18204
18205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18207 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18208 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18209 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18210 IEM_MC_REF_EFLAGS(pEFlags);
18211 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18212 IEM_MC_IF_LOCAL_IS_Z(rc) {
18213 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18214 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18215 IEM_MC_ADVANCE_RIP();
18216 } IEM_MC_ELSE() {
18217 IEM_MC_RAISE_DIVIDE_ERROR();
18218 } IEM_MC_ENDIF();
18219
18220 IEM_MC_END();
18221 return VINF_SUCCESS;
18222 }
18223
18224 case IEMMODE_64BIT:
18225 {
18226 IEM_MC_BEGIN(4, 2);
18227 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18228 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18229 IEM_MC_ARG(uint64_t, u64Value, 2);
18230 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18232 IEM_MC_LOCAL(int32_t, rc);
18233
18234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18236 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18237 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18238 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18239 IEM_MC_REF_EFLAGS(pEFlags);
18240 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18241 IEM_MC_IF_LOCAL_IS_Z(rc) {
18242 IEM_MC_ADVANCE_RIP();
18243 } IEM_MC_ELSE() {
18244 IEM_MC_RAISE_DIVIDE_ERROR();
18245 } IEM_MC_ENDIF();
18246
18247 IEM_MC_END();
18248 return VINF_SUCCESS;
18249 }
18250
18251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18252 }
18253 }
18254}
18255
18256/** Opcode 0xf6. */
18257FNIEMOP_DEF(iemOp_Grp3_Eb)
18258{
18259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18260 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18261 {
18262 case 0:
18263 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18264 case 1:
18265/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18266 return IEMOP_RAISE_INVALID_OPCODE();
18267 case 2:
18268 IEMOP_MNEMONIC(not_Eb, "not Eb");
18269 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18270 case 3:
18271 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18272 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18273 case 4:
18274 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18276 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18277 case 5:
18278 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18279 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18280 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18281 case 6:
18282 IEMOP_MNEMONIC(div_Eb, "div Eb");
18283 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18284 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18285 case 7:
18286 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18287 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18288 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18290 }
18291}
18292
18293
18294/** Opcode 0xf7. */
18295FNIEMOP_DEF(iemOp_Grp3_Ev)
18296{
18297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18298 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18299 {
18300 case 0:
18301 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18302 case 1:
18303/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18304 return IEMOP_RAISE_INVALID_OPCODE();
18305 case 2:
18306 IEMOP_MNEMONIC(not_Ev, "not Ev");
18307 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18308 case 3:
18309 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18310 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18311 case 4:
18312 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18313 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18314 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18315 case 5:
18316 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18318 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18319 case 6:
18320 IEMOP_MNEMONIC(div_Ev, "div Ev");
18321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18322 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18323 case 7:
18324 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18325 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18326 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18328 }
18329}
18330
18331
18332/** Opcode 0xf8. */
18333FNIEMOP_DEF(iemOp_clc)
18334{
18335 IEMOP_MNEMONIC(clc, "clc");
18336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18337 IEM_MC_BEGIN(0, 0);
18338 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18339 IEM_MC_ADVANCE_RIP();
18340 IEM_MC_END();
18341 return VINF_SUCCESS;
18342}
18343
18344
18345/** Opcode 0xf9. */
18346FNIEMOP_DEF(iemOp_stc)
18347{
18348 IEMOP_MNEMONIC(stc, "stc");
18349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18350 IEM_MC_BEGIN(0, 0);
18351 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18352 IEM_MC_ADVANCE_RIP();
18353 IEM_MC_END();
18354 return VINF_SUCCESS;
18355}
18356
18357
18358/** Opcode 0xfa. */
18359FNIEMOP_DEF(iemOp_cli)
18360{
18361 IEMOP_MNEMONIC(cli, "cli");
18362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18363 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18364}
18365
18366
18367FNIEMOP_DEF(iemOp_sti)
18368{
18369 IEMOP_MNEMONIC(sti, "sti");
18370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18371 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18372}
18373
18374
18375/** Opcode 0xfc. */
18376FNIEMOP_DEF(iemOp_cld)
18377{
18378 IEMOP_MNEMONIC(cld, "cld");
18379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18380 IEM_MC_BEGIN(0, 0);
18381 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18382 IEM_MC_ADVANCE_RIP();
18383 IEM_MC_END();
18384 return VINF_SUCCESS;
18385}
18386
18387
18388/** Opcode 0xfd. */
18389FNIEMOP_DEF(iemOp_std)
18390{
18391 IEMOP_MNEMONIC(std, "std");
18392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18393 IEM_MC_BEGIN(0, 0);
18394 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18395 IEM_MC_ADVANCE_RIP();
18396 IEM_MC_END();
18397 return VINF_SUCCESS;
18398}
18399
18400
18401/** Opcode 0xfe. */
18402FNIEMOP_DEF(iemOp_Grp4)
18403{
18404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18405 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18406 {
18407 case 0:
18408 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18409 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18410 case 1:
18411 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18412 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18413 default:
18414 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18415 return IEMOP_RAISE_INVALID_OPCODE();
18416 }
18417}
18418
18419
18420/**
18421 * Opcode 0xff /2.
18422 * @param bRm The RM byte.
18423 */
18424FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18425{
18426 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18427 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18428
18429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18430 {
18431 /* The new RIP is taken from a register. */
18432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18433 switch (pVCpu->iem.s.enmEffOpSize)
18434 {
18435 case IEMMODE_16BIT:
18436 IEM_MC_BEGIN(1, 0);
18437 IEM_MC_ARG(uint16_t, u16Target, 0);
18438 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18439 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18440 IEM_MC_END()
18441 return VINF_SUCCESS;
18442
18443 case IEMMODE_32BIT:
18444 IEM_MC_BEGIN(1, 0);
18445 IEM_MC_ARG(uint32_t, u32Target, 0);
18446 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18447 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18448 IEM_MC_END()
18449 return VINF_SUCCESS;
18450
18451 case IEMMODE_64BIT:
18452 IEM_MC_BEGIN(1, 0);
18453 IEM_MC_ARG(uint64_t, u64Target, 0);
18454 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18455 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18456 IEM_MC_END()
18457 return VINF_SUCCESS;
18458
18459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18460 }
18461 }
18462 else
18463 {
18464 /* The new RIP is taken from a register. */
18465 switch (pVCpu->iem.s.enmEffOpSize)
18466 {
18467 case IEMMODE_16BIT:
18468 IEM_MC_BEGIN(1, 1);
18469 IEM_MC_ARG(uint16_t, u16Target, 0);
18470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18473 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18474 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18475 IEM_MC_END()
18476 return VINF_SUCCESS;
18477
18478 case IEMMODE_32BIT:
18479 IEM_MC_BEGIN(1, 1);
18480 IEM_MC_ARG(uint32_t, u32Target, 0);
18481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18484 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18485 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18486 IEM_MC_END()
18487 return VINF_SUCCESS;
18488
18489 case IEMMODE_64BIT:
18490 IEM_MC_BEGIN(1, 1);
18491 IEM_MC_ARG(uint64_t, u64Target, 0);
18492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18495 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18496 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18497 IEM_MC_END()
18498 return VINF_SUCCESS;
18499
18500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18501 }
18502 }
18503}
18504
18505typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18506
18507FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18508{
18509 /* Registers? How?? */
18510 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18511 { /* likely */ }
18512 else
18513 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18514
18515 /* Far pointer loaded from memory. */
18516 switch (pVCpu->iem.s.enmEffOpSize)
18517 {
18518 case IEMMODE_16BIT:
18519 IEM_MC_BEGIN(3, 1);
18520 IEM_MC_ARG(uint16_t, u16Sel, 0);
18521 IEM_MC_ARG(uint16_t, offSeg, 1);
18522 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18526 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18527 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18528 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18529 IEM_MC_END();
18530 return VINF_SUCCESS;
18531
18532 case IEMMODE_64BIT:
18533 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18534 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18535 * and call far qword [rsp] encodings. */
18536 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18537 {
18538 IEM_MC_BEGIN(3, 1);
18539 IEM_MC_ARG(uint16_t, u16Sel, 0);
18540 IEM_MC_ARG(uint64_t, offSeg, 1);
18541 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18545 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18546 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18547 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18548 IEM_MC_END();
18549 return VINF_SUCCESS;
18550 }
18551 /* AMD falls thru. */
18552
18553 case IEMMODE_32BIT:
18554 IEM_MC_BEGIN(3, 1);
18555 IEM_MC_ARG(uint16_t, u16Sel, 0);
18556 IEM_MC_ARG(uint32_t, offSeg, 1);
18557 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18561 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18562 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18563 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18564 IEM_MC_END();
18565 return VINF_SUCCESS;
18566
18567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18568 }
18569}
18570
18571
18572/**
18573 * Opcode 0xff /3.
18574 * @param bRm The RM byte.
18575 */
18576FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18577{
18578 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18579 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18580}
18581
18582
18583/**
18584 * Opcode 0xff /4.
18585 * @param bRm The RM byte.
18586 */
18587FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18588{
18589 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18591
18592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18593 {
18594 /* The new RIP is taken from a register. */
18595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18596 switch (pVCpu->iem.s.enmEffOpSize)
18597 {
18598 case IEMMODE_16BIT:
18599 IEM_MC_BEGIN(0, 1);
18600 IEM_MC_LOCAL(uint16_t, u16Target);
18601 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18602 IEM_MC_SET_RIP_U16(u16Target);
18603 IEM_MC_END()
18604 return VINF_SUCCESS;
18605
18606 case IEMMODE_32BIT:
18607 IEM_MC_BEGIN(0, 1);
18608 IEM_MC_LOCAL(uint32_t, u32Target);
18609 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18610 IEM_MC_SET_RIP_U32(u32Target);
18611 IEM_MC_END()
18612 return VINF_SUCCESS;
18613
18614 case IEMMODE_64BIT:
18615 IEM_MC_BEGIN(0, 1);
18616 IEM_MC_LOCAL(uint64_t, u64Target);
18617 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18618 IEM_MC_SET_RIP_U64(u64Target);
18619 IEM_MC_END()
18620 return VINF_SUCCESS;
18621
18622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18623 }
18624 }
18625 else
18626 {
18627 /* The new RIP is taken from a memory location. */
18628 switch (pVCpu->iem.s.enmEffOpSize)
18629 {
18630 case IEMMODE_16BIT:
18631 IEM_MC_BEGIN(0, 2);
18632 IEM_MC_LOCAL(uint16_t, u16Target);
18633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18636 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18637 IEM_MC_SET_RIP_U16(u16Target);
18638 IEM_MC_END()
18639 return VINF_SUCCESS;
18640
18641 case IEMMODE_32BIT:
18642 IEM_MC_BEGIN(0, 2);
18643 IEM_MC_LOCAL(uint32_t, u32Target);
18644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18647 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18648 IEM_MC_SET_RIP_U32(u32Target);
18649 IEM_MC_END()
18650 return VINF_SUCCESS;
18651
18652 case IEMMODE_64BIT:
18653 IEM_MC_BEGIN(0, 2);
18654 IEM_MC_LOCAL(uint64_t, u64Target);
18655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18658 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18659 IEM_MC_SET_RIP_U64(u64Target);
18660 IEM_MC_END()
18661 return VINF_SUCCESS;
18662
18663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18664 }
18665 }
18666}
18667
18668
18669/**
18670 * Opcode 0xff /5.
18671 * @param bRm The RM byte.
18672 */
18673FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18674{
18675 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18676 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18677}
18678
18679
18680/**
18681 * Opcode 0xff /6.
18682 * @param bRm The RM byte.
18683 */
18684FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18685{
18686 IEMOP_MNEMONIC(push_Ev, "push Ev");
18687
18688 /* Registers are handled by a common worker. */
18689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18690 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18691
18692 /* Memory we do here. */
18693 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18694 switch (pVCpu->iem.s.enmEffOpSize)
18695 {
18696 case IEMMODE_16BIT:
18697 IEM_MC_BEGIN(0, 2);
18698 IEM_MC_LOCAL(uint16_t, u16Src);
18699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18702 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18703 IEM_MC_PUSH_U16(u16Src);
18704 IEM_MC_ADVANCE_RIP();
18705 IEM_MC_END();
18706 return VINF_SUCCESS;
18707
18708 case IEMMODE_32BIT:
18709 IEM_MC_BEGIN(0, 2);
18710 IEM_MC_LOCAL(uint32_t, u32Src);
18711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18714 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18715 IEM_MC_PUSH_U32(u32Src);
18716 IEM_MC_ADVANCE_RIP();
18717 IEM_MC_END();
18718 return VINF_SUCCESS;
18719
18720 case IEMMODE_64BIT:
18721 IEM_MC_BEGIN(0, 2);
18722 IEM_MC_LOCAL(uint64_t, u64Src);
18723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18726 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18727 IEM_MC_PUSH_U64(u64Src);
18728 IEM_MC_ADVANCE_RIP();
18729 IEM_MC_END();
18730 return VINF_SUCCESS;
18731
18732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18733 }
18734}
18735
18736
18737/** Opcode 0xff. */
18738FNIEMOP_DEF(iemOp_Grp5)
18739{
18740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18741 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18742 {
18743 case 0:
18744 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18745 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18746 case 1:
18747 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18748 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18749 case 2:
18750 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18751 case 3:
18752 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18753 case 4:
18754 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18755 case 5:
18756 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18757 case 6:
18758 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18759 case 7:
18760 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18761 return IEMOP_RAISE_INVALID_OPCODE();
18762 }
18763 AssertFailedReturn(VERR_IEM_IPE_3);
18764}
18765
18766
18767
18768const PFNIEMOP g_apfnOneByteMap[256] =
18769{
18770 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18771 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18772 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18773 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18774 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18775 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18776 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18777 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18778 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18779 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18780 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18781 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18782 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18783 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18784 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18785 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18786 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18787 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18788 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18789 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18790 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18791 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18792 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18793 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18794 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18795 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18796 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18797 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18798 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18799 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18800 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18801 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18802 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18803 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18804 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18805 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18806 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18807 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18808 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18809 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18810 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18811 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18812 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18813 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18814 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18815 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18816 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18817 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18818 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18819 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18820 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18821 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18822 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18823 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18824 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18825 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18826 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18827 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18828 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18829 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18830 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18831 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18832 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18833 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18834};
18835
18836
18837/** @} */
18838
18839#ifdef _MSC_VER
18840# pragma warning(pop)
18841#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette