VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 62411

Last change on this file since 62411 was 62171, checked in by vboxsync, 8 years ago

IEM: Working on instruction fetching optimizations (incomplete and disabled).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 621.1 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 62171 2016-07-11 18:30:07Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 if (!pImpl->pfnLockedU8)
71 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
72 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
73 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
74 IEM_MC_FETCH_EFLAGS(EFlags);
75 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
77 else
78 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
79
80 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
81 IEM_MC_COMMIT_EFLAGS(EFlags);
82 IEM_MC_ADVANCE_RIP();
83 IEM_MC_END();
84 }
85 return VINF_SUCCESS;
86}
87
88
89/**
90 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
91 * memory/register as the destination.
92 *
93 * @param pImpl Pointer to the instruction implementation (assembly).
94 */
95FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
96{
97 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
98
99 /*
100 * If rm is denoting a register, no more instruction bytes.
101 */
102 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
103 {
104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
105
106 switch (pVCpu->iem.s.enmEffOpSize)
107 {
108 case IEMMODE_16BIT:
109 IEM_MC_BEGIN(3, 0);
110 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
111 IEM_MC_ARG(uint16_t, u16Src, 1);
112 IEM_MC_ARG(uint32_t *, pEFlags, 2);
113
114 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
115 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
116 IEM_MC_REF_EFLAGS(pEFlags);
117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
118
119 IEM_MC_ADVANCE_RIP();
120 IEM_MC_END();
121 break;
122
123 case IEMMODE_32BIT:
124 IEM_MC_BEGIN(3, 0);
125 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
126 IEM_MC_ARG(uint32_t, u32Src, 1);
127 IEM_MC_ARG(uint32_t *, pEFlags, 2);
128
129 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
130 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
131 IEM_MC_REF_EFLAGS(pEFlags);
132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
133
134 if (pImpl != &g_iemAImpl_test)
135 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
136 IEM_MC_ADVANCE_RIP();
137 IEM_MC_END();
138 break;
139
140 case IEMMODE_64BIT:
141 IEM_MC_BEGIN(3, 0);
142 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
143 IEM_MC_ARG(uint64_t, u64Src, 1);
144 IEM_MC_ARG(uint32_t *, pEFlags, 2);
145
146 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
147 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
148 IEM_MC_REF_EFLAGS(pEFlags);
149 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
150
151 IEM_MC_ADVANCE_RIP();
152 IEM_MC_END();
153 break;
154 }
155 }
156 else
157 {
158 /*
159 * We're accessing memory.
160 * Note! We're putting the eflags on the stack here so we can commit them
161 * after the memory.
162 */
163 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
164 switch (pVCpu->iem.s.enmEffOpSize)
165 {
166 case IEMMODE_16BIT:
167 IEM_MC_BEGIN(3, 2);
168 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
169 IEM_MC_ARG(uint16_t, u16Src, 1);
170 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
172
173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
174 if (!pImpl->pfnLockedU16)
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
176 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
177 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
178 IEM_MC_FETCH_EFLAGS(EFlags);
179 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
181 else
182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
183
184 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
185 IEM_MC_COMMIT_EFLAGS(EFlags);
186 IEM_MC_ADVANCE_RIP();
187 IEM_MC_END();
188 break;
189
190 case IEMMODE_32BIT:
191 IEM_MC_BEGIN(3, 2);
192 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
193 IEM_MC_ARG(uint32_t, u32Src, 1);
194 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
196
197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
198 if (!pImpl->pfnLockedU32)
199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
200 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
201 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
202 IEM_MC_FETCH_EFLAGS(EFlags);
203 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
205 else
206 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
207
208 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
209 IEM_MC_COMMIT_EFLAGS(EFlags);
210 IEM_MC_ADVANCE_RIP();
211 IEM_MC_END();
212 break;
213
214 case IEMMODE_64BIT:
215 IEM_MC_BEGIN(3, 2);
216 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
217 IEM_MC_ARG(uint64_t, u64Src, 1);
218 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
220
221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
222 if (!pImpl->pfnLockedU64)
223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
224 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
225 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
226 IEM_MC_FETCH_EFLAGS(EFlags);
227 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
229 else
230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
231
232 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
233 IEM_MC_COMMIT_EFLAGS(EFlags);
234 IEM_MC_ADVANCE_RIP();
235 IEM_MC_END();
236 break;
237 }
238 }
239 return VINF_SUCCESS;
240}
241
242
243/**
244 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
245 * the destination.
246 *
247 * @param pImpl Pointer to the instruction implementation (assembly).
248 */
249FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
250{
251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
252
253 /*
254 * If rm is denoting a register, no more instruction bytes.
255 */
256 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
257 {
258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
259 IEM_MC_BEGIN(3, 0);
260 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
261 IEM_MC_ARG(uint8_t, u8Src, 1);
262 IEM_MC_ARG(uint32_t *, pEFlags, 2);
263
264 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
265 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
266 IEM_MC_REF_EFLAGS(pEFlags);
267 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
268
269 IEM_MC_ADVANCE_RIP();
270 IEM_MC_END();
271 }
272 else
273 {
274 /*
275 * We're accessing memory.
276 */
277 IEM_MC_BEGIN(3, 1);
278 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
279 IEM_MC_ARG(uint8_t, u8Src, 1);
280 IEM_MC_ARG(uint32_t *, pEFlags, 2);
281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
282
283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
285 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
286 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
287 IEM_MC_REF_EFLAGS(pEFlags);
288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
289
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 return VINF_SUCCESS;
294}
295
296
297/**
298 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
299 * register as the destination.
300 *
301 * @param pImpl Pointer to the instruction implementation (assembly).
302 */
303FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
304{
305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
306
307 /*
308 * If rm is denoting a register, no more instruction bytes.
309 */
310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
311 {
312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
313 switch (pVCpu->iem.s.enmEffOpSize)
314 {
315 case IEMMODE_16BIT:
316 IEM_MC_BEGIN(3, 0);
317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
318 IEM_MC_ARG(uint16_t, u16Src, 1);
319 IEM_MC_ARG(uint32_t *, pEFlags, 2);
320
321 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
322 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
323 IEM_MC_REF_EFLAGS(pEFlags);
324 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
325
326 IEM_MC_ADVANCE_RIP();
327 IEM_MC_END();
328 break;
329
330 case IEMMODE_32BIT:
331 IEM_MC_BEGIN(3, 0);
332 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
333 IEM_MC_ARG(uint32_t, u32Src, 1);
334 IEM_MC_ARG(uint32_t *, pEFlags, 2);
335
336 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
337 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
338 IEM_MC_REF_EFLAGS(pEFlags);
339 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
340
341 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
342 IEM_MC_ADVANCE_RIP();
343 IEM_MC_END();
344 break;
345
346 case IEMMODE_64BIT:
347 IEM_MC_BEGIN(3, 0);
348 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
349 IEM_MC_ARG(uint64_t, u64Src, 1);
350 IEM_MC_ARG(uint32_t *, pEFlags, 2);
351
352 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
353 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
354 IEM_MC_REF_EFLAGS(pEFlags);
355 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
356
357 IEM_MC_ADVANCE_RIP();
358 IEM_MC_END();
359 break;
360 }
361 }
362 else
363 {
364 /*
365 * We're accessing memory.
366 */
367 switch (pVCpu->iem.s.enmEffOpSize)
368 {
369 case IEMMODE_16BIT:
370 IEM_MC_BEGIN(3, 1);
371 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
372 IEM_MC_ARG(uint16_t, u16Src, 1);
373 IEM_MC_ARG(uint32_t *, pEFlags, 2);
374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
375
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
378 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
379 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
380 IEM_MC_REF_EFLAGS(pEFlags);
381 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
382
383 IEM_MC_ADVANCE_RIP();
384 IEM_MC_END();
385 break;
386
387 case IEMMODE_32BIT:
388 IEM_MC_BEGIN(3, 1);
389 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
390 IEM_MC_ARG(uint32_t, u32Src, 1);
391 IEM_MC_ARG(uint32_t *, pEFlags, 2);
392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
393
394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
396 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
397 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
398 IEM_MC_REF_EFLAGS(pEFlags);
399 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
400
401 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
402 IEM_MC_ADVANCE_RIP();
403 IEM_MC_END();
404 break;
405
406 case IEMMODE_64BIT:
407 IEM_MC_BEGIN(3, 1);
408 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
409 IEM_MC_ARG(uint64_t, u64Src, 1);
410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
412
413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
415 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
416 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
417 IEM_MC_REF_EFLAGS(pEFlags);
418 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
419
420 IEM_MC_ADVANCE_RIP();
421 IEM_MC_END();
422 break;
423 }
424 }
425 return VINF_SUCCESS;
426}
427
428
429/**
430 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
431 * a byte immediate.
432 *
433 * @param pImpl Pointer to the instruction implementation (assembly).
434 */
435FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
436{
437 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
439
440 IEM_MC_BEGIN(3, 0);
441 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
442 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
443 IEM_MC_ARG(uint32_t *, pEFlags, 2);
444
445 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
446 IEM_MC_REF_EFLAGS(pEFlags);
447 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
448
449 IEM_MC_ADVANCE_RIP();
450 IEM_MC_END();
451 return VINF_SUCCESS;
452}
453
454
455/**
456 * Common worker for instructions like ADD, AND, OR, ++ with working on
457 * AX/EAX/RAX with a word/dword immediate.
458 *
459 * @param pImpl Pointer to the instruction implementation (assembly).
460 */
461FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
462{
463 switch (pVCpu->iem.s.enmEffOpSize)
464 {
465 case IEMMODE_16BIT:
466 {
467 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
469
470 IEM_MC_BEGIN(3, 0);
471 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
472 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
473 IEM_MC_ARG(uint32_t *, pEFlags, 2);
474
475 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
476 IEM_MC_REF_EFLAGS(pEFlags);
477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
478
479 IEM_MC_ADVANCE_RIP();
480 IEM_MC_END();
481 return VINF_SUCCESS;
482 }
483
484 case IEMMODE_32BIT:
485 {
486 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
488
489 IEM_MC_BEGIN(3, 0);
490 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
491 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
492 IEM_MC_ARG(uint32_t *, pEFlags, 2);
493
494 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
495 IEM_MC_REF_EFLAGS(pEFlags);
496 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
497
498 if (pImpl != &g_iemAImpl_test)
499 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
500 IEM_MC_ADVANCE_RIP();
501 IEM_MC_END();
502 return VINF_SUCCESS;
503 }
504
505 case IEMMODE_64BIT:
506 {
507 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
509
510 IEM_MC_BEGIN(3, 0);
511 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
512 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
513 IEM_MC_ARG(uint32_t *, pEFlags, 2);
514
515 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
516 IEM_MC_REF_EFLAGS(pEFlags);
517 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
518
519 IEM_MC_ADVANCE_RIP();
520 IEM_MC_END();
521 return VINF_SUCCESS;
522 }
523
524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
525 }
526}
527
528
529/** Opcodes 0xf1, 0xd6. */
530FNIEMOP_DEF(iemOp_Invalid)
531{
532 IEMOP_MNEMONIC("Invalid");
533 return IEMOP_RAISE_INVALID_OPCODE();
534}
535
536
537/** Invalid with RM byte . */
538FNIEMOPRM_DEF(iemOp_InvalidWithRM)
539{
540 IEMOP_MNEMONIC("InvalidWithRM");
541 return IEMOP_RAISE_INVALID_OPCODE();
542}
543
544
545
546/** @name ..... opcodes.
547 *
548 * @{
549 */
550
551/** @} */
552
553
554/** @name Two byte opcodes (first byte 0x0f).
555 *
556 * @{
557 */
558
559/** Opcode 0x0f 0x00 /0. */
560FNIEMOPRM_DEF(iemOp_Grp6_sldt)
561{
562 IEMOP_MNEMONIC("sldt Rv/Mw");
563 IEMOP_HLP_MIN_286();
564 IEMOP_HLP_NO_REAL_OR_V86_MODE();
565
566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
567 {
568 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
569 switch (pVCpu->iem.s.enmEffOpSize)
570 {
571 case IEMMODE_16BIT:
572 IEM_MC_BEGIN(0, 1);
573 IEM_MC_LOCAL(uint16_t, u16Ldtr);
574 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
575 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
576 IEM_MC_ADVANCE_RIP();
577 IEM_MC_END();
578 break;
579
580 case IEMMODE_32BIT:
581 IEM_MC_BEGIN(0, 1);
582 IEM_MC_LOCAL(uint32_t, u32Ldtr);
583 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
584 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
585 IEM_MC_ADVANCE_RIP();
586 IEM_MC_END();
587 break;
588
589 case IEMMODE_64BIT:
590 IEM_MC_BEGIN(0, 1);
591 IEM_MC_LOCAL(uint64_t, u64Ldtr);
592 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
593 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
594 IEM_MC_ADVANCE_RIP();
595 IEM_MC_END();
596 break;
597
598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
599 }
600 }
601 else
602 {
603 IEM_MC_BEGIN(0, 2);
604 IEM_MC_LOCAL(uint16_t, u16Ldtr);
605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
607 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
608 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
609 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 return VINF_SUCCESS;
614}
615
616
617/** Opcode 0x0f 0x00 /1. */
618FNIEMOPRM_DEF(iemOp_Grp6_str)
619{
620 IEMOP_MNEMONIC("str Rv/Mw");
621 IEMOP_HLP_MIN_286();
622 IEMOP_HLP_NO_REAL_OR_V86_MODE();
623
624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
625 {
626 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
627 switch (pVCpu->iem.s.enmEffOpSize)
628 {
629 case IEMMODE_16BIT:
630 IEM_MC_BEGIN(0, 1);
631 IEM_MC_LOCAL(uint16_t, u16Tr);
632 IEM_MC_FETCH_TR_U16(u16Tr);
633 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
634 IEM_MC_ADVANCE_RIP();
635 IEM_MC_END();
636 break;
637
638 case IEMMODE_32BIT:
639 IEM_MC_BEGIN(0, 1);
640 IEM_MC_LOCAL(uint32_t, u32Tr);
641 IEM_MC_FETCH_TR_U32(u32Tr);
642 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
643 IEM_MC_ADVANCE_RIP();
644 IEM_MC_END();
645 break;
646
647 case IEMMODE_64BIT:
648 IEM_MC_BEGIN(0, 1);
649 IEM_MC_LOCAL(uint64_t, u64Tr);
650 IEM_MC_FETCH_TR_U64(u64Tr);
651 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
652 IEM_MC_ADVANCE_RIP();
653 IEM_MC_END();
654 break;
655
656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
657 }
658 }
659 else
660 {
661 IEM_MC_BEGIN(0, 2);
662 IEM_MC_LOCAL(uint16_t, u16Tr);
663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
665 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
666 IEM_MC_FETCH_TR_U16(u16Tr);
667 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
668 IEM_MC_ADVANCE_RIP();
669 IEM_MC_END();
670 }
671 return VINF_SUCCESS;
672}
673
674
675/** Opcode 0x0f 0x00 /2. */
676FNIEMOPRM_DEF(iemOp_Grp6_lldt)
677{
678 IEMOP_MNEMONIC("lldt Ew");
679 IEMOP_HLP_MIN_286();
680 IEMOP_HLP_NO_REAL_OR_V86_MODE();
681
682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
683 {
684 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
685 IEM_MC_BEGIN(1, 0);
686 IEM_MC_ARG(uint16_t, u16Sel, 0);
687 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
688 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
689 IEM_MC_END();
690 }
691 else
692 {
693 IEM_MC_BEGIN(1, 1);
694 IEM_MC_ARG(uint16_t, u16Sel, 0);
695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
697 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
698 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
699 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
701 IEM_MC_END();
702 }
703 return VINF_SUCCESS;
704}
705
706
707/** Opcode 0x0f 0x00 /3. */
708FNIEMOPRM_DEF(iemOp_Grp6_ltr)
709{
710 IEMOP_MNEMONIC("ltr Ew");
711 IEMOP_HLP_MIN_286();
712 IEMOP_HLP_NO_REAL_OR_V86_MODE();
713
714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
715 {
716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
717 IEM_MC_BEGIN(1, 0);
718 IEM_MC_ARG(uint16_t, u16Sel, 0);
719 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
720 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
721 IEM_MC_END();
722 }
723 else
724 {
725 IEM_MC_BEGIN(1, 1);
726 IEM_MC_ARG(uint16_t, u16Sel, 0);
727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
730 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
731 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
732 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
733 IEM_MC_END();
734 }
735 return VINF_SUCCESS;
736}
737
738
739/** Opcode 0x0f 0x00 /3. */
740FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
741{
742 IEMOP_HLP_MIN_286();
743 IEMOP_HLP_NO_REAL_OR_V86_MODE();
744
745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
746 {
747 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
748 IEM_MC_BEGIN(2, 0);
749 IEM_MC_ARG(uint16_t, u16Sel, 0);
750 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
751 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
752 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
753 IEM_MC_END();
754 }
755 else
756 {
757 IEM_MC_BEGIN(2, 1);
758 IEM_MC_ARG(uint16_t, u16Sel, 0);
759 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
762 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
763 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
764 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
765 IEM_MC_END();
766 }
767 return VINF_SUCCESS;
768}
769
770
771/** Opcode 0x0f 0x00 /4. */
772FNIEMOPRM_DEF(iemOp_Grp6_verr)
773{
774 IEMOP_MNEMONIC("verr Ew");
775 IEMOP_HLP_MIN_286();
776 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
777}
778
779
780/** Opcode 0x0f 0x00 /5. */
781FNIEMOPRM_DEF(iemOp_Grp6_verw)
782{
783 IEMOP_MNEMONIC("verr Ew");
784 IEMOP_HLP_MIN_286();
785 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
786}
787
788
789/**
790 * Group 6 jump table.
791 */
792IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
793{
794 iemOp_Grp6_sldt,
795 iemOp_Grp6_str,
796 iemOp_Grp6_lldt,
797 iemOp_Grp6_ltr,
798 iemOp_Grp6_verr,
799 iemOp_Grp6_verw,
800 iemOp_InvalidWithRM,
801 iemOp_InvalidWithRM
802};
803
804/** Opcode 0x0f 0x00. */
805FNIEMOP_DEF(iemOp_Grp6)
806{
807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
808 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
809}
810
811
812/** Opcode 0x0f 0x01 /0. */
813FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
814{
815 IEMOP_MNEMONIC("sgdt Ms");
816 IEMOP_HLP_MIN_286();
817 IEMOP_HLP_64BIT_OP_SIZE();
818 IEM_MC_BEGIN(2, 1);
819 IEM_MC_ARG(uint8_t, iEffSeg, 0);
820 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
823 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
824 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
825 IEM_MC_END();
826 return VINF_SUCCESS;
827}
828
829
830/** Opcode 0x0f 0x01 /0. */
831FNIEMOP_DEF(iemOp_Grp7_vmcall)
832{
833 IEMOP_BITCH_ABOUT_STUB();
834 return IEMOP_RAISE_INVALID_OPCODE();
835}
836
837
838/** Opcode 0x0f 0x01 /0. */
839FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
840{
841 IEMOP_BITCH_ABOUT_STUB();
842 return IEMOP_RAISE_INVALID_OPCODE();
843}
844
845
846/** Opcode 0x0f 0x01 /0. */
847FNIEMOP_DEF(iemOp_Grp7_vmresume)
848{
849 IEMOP_BITCH_ABOUT_STUB();
850 return IEMOP_RAISE_INVALID_OPCODE();
851}
852
853
854/** Opcode 0x0f 0x01 /0. */
855FNIEMOP_DEF(iemOp_Grp7_vmxoff)
856{
857 IEMOP_BITCH_ABOUT_STUB();
858 return IEMOP_RAISE_INVALID_OPCODE();
859}
860
861
862/** Opcode 0x0f 0x01 /1. */
863FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
864{
865 IEMOP_MNEMONIC("sidt Ms");
866 IEMOP_HLP_MIN_286();
867 IEMOP_HLP_64BIT_OP_SIZE();
868 IEM_MC_BEGIN(2, 1);
869 IEM_MC_ARG(uint8_t, iEffSeg, 0);
870 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
873 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
874 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
875 IEM_MC_END();
876 return VINF_SUCCESS;
877}
878
879
880/** Opcode 0x0f 0x01 /1. */
881FNIEMOP_DEF(iemOp_Grp7_monitor)
882{
883 IEMOP_MNEMONIC("monitor");
884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
885 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
886}
887
888
889/** Opcode 0x0f 0x01 /1. */
890FNIEMOP_DEF(iemOp_Grp7_mwait)
891{
892 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
894 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
895}
896
897
898/** Opcode 0x0f 0x01 /2. */
899FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
900{
901 IEMOP_MNEMONIC("lgdt");
902 IEMOP_HLP_64BIT_OP_SIZE();
903 IEM_MC_BEGIN(3, 1);
904 IEM_MC_ARG(uint8_t, iEffSeg, 0);
905 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
906 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
909 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
910 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
911 IEM_MC_END();
912 return VINF_SUCCESS;
913}
914
915
916/** Opcode 0x0f 0x01 0xd0. */
917FNIEMOP_DEF(iemOp_Grp7_xgetbv)
918{
919 IEMOP_MNEMONIC("xgetbv");
920 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
921 {
922 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
923 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
924 }
925 return IEMOP_RAISE_INVALID_OPCODE();
926}
927
928
929/** Opcode 0x0f 0x01 0xd1. */
930FNIEMOP_DEF(iemOp_Grp7_xsetbv)
931{
932 IEMOP_MNEMONIC("xsetbv");
933 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
934 {
935 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
936 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
937 }
938 return IEMOP_RAISE_INVALID_OPCODE();
939}
940
941
942/** Opcode 0x0f 0x01 /3. */
943FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
944{
945 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
946 ? IEMMODE_64BIT
947 : pVCpu->iem.s.enmEffOpSize;
948 IEM_MC_BEGIN(3, 1);
949 IEM_MC_ARG(uint8_t, iEffSeg, 0);
950 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
951 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
954 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
955 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
956 IEM_MC_END();
957 return VINF_SUCCESS;
958}
959
960
961/** Opcode 0x0f 0x01 0xd8. */
962FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
963
964/** Opcode 0x0f 0x01 0xd9. */
965FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
966
967/** Opcode 0x0f 0x01 0xda. */
968FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
969
970/** Opcode 0x0f 0x01 0xdb. */
971FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
972
973/** Opcode 0x0f 0x01 0xdc. */
974FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
975
976/** Opcode 0x0f 0x01 0xdd. */
977FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
978
979/** Opcode 0x0f 0x01 0xde. */
980FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
981
982/** Opcode 0x0f 0x01 0xdf. */
983FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
984
985/** Opcode 0x0f 0x01 /4. */
986FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
987{
988 IEMOP_MNEMONIC("smsw");
989 IEMOP_HLP_MIN_286();
990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
991 {
992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
993 switch (pVCpu->iem.s.enmEffOpSize)
994 {
995 case IEMMODE_16BIT:
996 IEM_MC_BEGIN(0, 1);
997 IEM_MC_LOCAL(uint16_t, u16Tmp);
998 IEM_MC_FETCH_CR0_U16(u16Tmp);
999 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1000 { /* likely */ }
1001 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1002 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1003 else
1004 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1005 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1006 IEM_MC_ADVANCE_RIP();
1007 IEM_MC_END();
1008 return VINF_SUCCESS;
1009
1010 case IEMMODE_32BIT:
1011 IEM_MC_BEGIN(0, 1);
1012 IEM_MC_LOCAL(uint32_t, u32Tmp);
1013 IEM_MC_FETCH_CR0_U32(u32Tmp);
1014 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1015 IEM_MC_ADVANCE_RIP();
1016 IEM_MC_END();
1017 return VINF_SUCCESS;
1018
1019 case IEMMODE_64BIT:
1020 IEM_MC_BEGIN(0, 1);
1021 IEM_MC_LOCAL(uint64_t, u64Tmp);
1022 IEM_MC_FETCH_CR0_U64(u64Tmp);
1023 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1024 IEM_MC_ADVANCE_RIP();
1025 IEM_MC_END();
1026 return VINF_SUCCESS;
1027
1028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1029 }
1030 }
1031 else
1032 {
1033 /* Ignore operand size here, memory refs are always 16-bit. */
1034 IEM_MC_BEGIN(0, 2);
1035 IEM_MC_LOCAL(uint16_t, u16Tmp);
1036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1039 IEM_MC_FETCH_CR0_U16(u16Tmp);
1040 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1041 { /* likely */ }
1042 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1043 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1044 else
1045 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1046 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1047 IEM_MC_ADVANCE_RIP();
1048 IEM_MC_END();
1049 return VINF_SUCCESS;
1050 }
1051}
1052
1053
1054/** Opcode 0x0f 0x01 /6. */
1055FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1056{
1057 /* The operand size is effectively ignored, all is 16-bit and only the
1058 lower 3-bits are used. */
1059 IEMOP_MNEMONIC("lmsw");
1060 IEMOP_HLP_MIN_286();
1061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1062 {
1063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1064 IEM_MC_BEGIN(1, 0);
1065 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1066 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1067 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1068 IEM_MC_END();
1069 }
1070 else
1071 {
1072 IEM_MC_BEGIN(1, 1);
1073 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1077 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1078 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1079 IEM_MC_END();
1080 }
1081 return VINF_SUCCESS;
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1087{
1088 IEMOP_MNEMONIC("invlpg");
1089 IEMOP_HLP_MIN_486();
1090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1091 IEM_MC_BEGIN(1, 1);
1092 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1094 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1095 IEM_MC_END();
1096 return VINF_SUCCESS;
1097}
1098
1099
1100/** Opcode 0x0f 0x01 /7. */
1101FNIEMOP_DEF(iemOp_Grp7_swapgs)
1102{
1103 IEMOP_MNEMONIC("swapgs");
1104 IEMOP_HLP_ONLY_64BIT();
1105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1106 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1107}
1108
1109
1110/** Opcode 0x0f 0x01 /7. */
1111FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1112{
1113 NOREF(pVCpu);
1114 IEMOP_BITCH_ABOUT_STUB();
1115 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1116}
1117
1118
1119/** Opcode 0x0f 0x01. */
1120FNIEMOP_DEF(iemOp_Grp7)
1121{
1122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1123 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1124 {
1125 case 0:
1126 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1127 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1128 switch (bRm & X86_MODRM_RM_MASK)
1129 {
1130 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1131 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1132 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1133 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1134 }
1135 return IEMOP_RAISE_INVALID_OPCODE();
1136
1137 case 1:
1138 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1139 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1140 switch (bRm & X86_MODRM_RM_MASK)
1141 {
1142 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1143 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1144 }
1145 return IEMOP_RAISE_INVALID_OPCODE();
1146
1147 case 2:
1148 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1149 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1150 switch (bRm & X86_MODRM_RM_MASK)
1151 {
1152 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1153 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1154 }
1155 return IEMOP_RAISE_INVALID_OPCODE();
1156
1157 case 3:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1164 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1165 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1166 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1167 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1168 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1169 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1171 }
1172
1173 case 4:
1174 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1175
1176 case 5:
1177 return IEMOP_RAISE_INVALID_OPCODE();
1178
1179 case 6:
1180 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1181
1182 case 7:
1183 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1184 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1185 switch (bRm & X86_MODRM_RM_MASK)
1186 {
1187 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1188 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1189 }
1190 return IEMOP_RAISE_INVALID_OPCODE();
1191
1192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1193 }
1194}
1195
1196/** Opcode 0x0f 0x00 /3. */
1197FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1198{
1199 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1201
1202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1203 {
1204 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1205 switch (pVCpu->iem.s.enmEffOpSize)
1206 {
1207 case IEMMODE_16BIT:
1208 {
1209 IEM_MC_BEGIN(4, 0);
1210 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1211 IEM_MC_ARG(uint16_t, u16Sel, 1);
1212 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1213 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1214
1215 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1216 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1217 IEM_MC_REF_EFLAGS(pEFlags);
1218 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1219
1220 IEM_MC_END();
1221 return VINF_SUCCESS;
1222 }
1223
1224 case IEMMODE_32BIT:
1225 case IEMMODE_64BIT:
1226 {
1227 IEM_MC_BEGIN(4, 0);
1228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1229 IEM_MC_ARG(uint16_t, u16Sel, 1);
1230 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1231 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1232
1233 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1234 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1235 IEM_MC_REF_EFLAGS(pEFlags);
1236 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1237
1238 IEM_MC_END();
1239 return VINF_SUCCESS;
1240 }
1241
1242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1243 }
1244 }
1245 else
1246 {
1247 switch (pVCpu->iem.s.enmEffOpSize)
1248 {
1249 case IEMMODE_16BIT:
1250 {
1251 IEM_MC_BEGIN(4, 1);
1252 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1253 IEM_MC_ARG(uint16_t, u16Sel, 1);
1254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1255 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1257
1258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1259 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1260
1261 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1262 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1263 IEM_MC_REF_EFLAGS(pEFlags);
1264 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1265
1266 IEM_MC_END();
1267 return VINF_SUCCESS;
1268 }
1269
1270 case IEMMODE_32BIT:
1271 case IEMMODE_64BIT:
1272 {
1273 IEM_MC_BEGIN(4, 1);
1274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1275 IEM_MC_ARG(uint16_t, u16Sel, 1);
1276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1277 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279
1280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1281 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1282/** @todo testcase: make sure it's a 16-bit read. */
1283
1284 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1286 IEM_MC_REF_EFLAGS(pEFlags);
1287 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1288
1289 IEM_MC_END();
1290 return VINF_SUCCESS;
1291 }
1292
1293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1294 }
1295 }
1296}
1297
1298
1299
1300/** Opcode 0x0f 0x02. */
1301FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1302{
1303 IEMOP_MNEMONIC("lar Gv,Ew");
1304 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1305}
1306
1307
1308/** Opcode 0x0f 0x03. */
1309FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1310{
1311 IEMOP_MNEMONIC("lsl Gv,Ew");
1312 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1313}
1314
1315
1316/** Opcode 0x0f 0x05. */
1317FNIEMOP_DEF(iemOp_syscall)
1318{
1319 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1321 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1322}
1323
1324
1325/** Opcode 0x0f 0x06. */
1326FNIEMOP_DEF(iemOp_clts)
1327{
1328 IEMOP_MNEMONIC("clts");
1329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1330 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1331}
1332
1333
1334/** Opcode 0x0f 0x07. */
1335FNIEMOP_DEF(iemOp_sysret)
1336{
1337 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1339 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1340}
1341
1342
1343/** Opcode 0x0f 0x08. */
1344FNIEMOP_STUB(iemOp_invd);
1345// IEMOP_HLP_MIN_486();
1346
1347
1348/** Opcode 0x0f 0x09. */
1349FNIEMOP_DEF(iemOp_wbinvd)
1350{
1351 IEMOP_MNEMONIC("wbinvd");
1352 IEMOP_HLP_MIN_486();
1353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1354 IEM_MC_BEGIN(0, 0);
1355 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1356 IEM_MC_ADVANCE_RIP();
1357 IEM_MC_END();
1358 return VINF_SUCCESS; /* ignore for now */
1359}
1360
1361
1362/** Opcode 0x0f 0x0b. */
1363FNIEMOP_DEF(iemOp_ud2)
1364{
1365 IEMOP_MNEMONIC("ud2");
1366 return IEMOP_RAISE_INVALID_OPCODE();
1367}
1368
1369/** Opcode 0x0f 0x0d. */
1370FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1371{
1372 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1373 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1374 {
1375 IEMOP_MNEMONIC("GrpP");
1376 return IEMOP_RAISE_INVALID_OPCODE();
1377 }
1378
1379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1381 {
1382 IEMOP_MNEMONIC("GrpP");
1383 return IEMOP_RAISE_INVALID_OPCODE();
1384 }
1385
1386 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1387 {
1388 case 2: /* Aliased to /0 for the time being. */
1389 case 4: /* Aliased to /0 for the time being. */
1390 case 5: /* Aliased to /0 for the time being. */
1391 case 6: /* Aliased to /0 for the time being. */
1392 case 7: /* Aliased to /0 for the time being. */
1393 case 0: IEMOP_MNEMONIC("prefetch"); break;
1394 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1395 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1397 }
1398
1399 IEM_MC_BEGIN(0, 1);
1400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1403 /* Currently a NOP. */
1404 NOREF(GCPtrEffSrc);
1405 IEM_MC_ADVANCE_RIP();
1406 IEM_MC_END();
1407 return VINF_SUCCESS;
1408}
1409
1410
1411/** Opcode 0x0f 0x0e. */
1412FNIEMOP_STUB(iemOp_femms);
1413
1414
1415/** Opcode 0x0f 0x0f 0x0c. */
1416FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1417
1418/** Opcode 0x0f 0x0f 0x0d. */
1419FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1420
1421/** Opcode 0x0f 0x0f 0x1c. */
1422FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1423
1424/** Opcode 0x0f 0x0f 0x1d. */
1425FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1426
1427/** Opcode 0x0f 0x0f 0x8a. */
1428FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1429
1430/** Opcode 0x0f 0x0f 0x8e. */
1431FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1432
1433/** Opcode 0x0f 0x0f 0x90. */
1434FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1435
1436/** Opcode 0x0f 0x0f 0x94. */
1437FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1438
1439/** Opcode 0x0f 0x0f 0x96. */
1440FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1441
1442/** Opcode 0x0f 0x0f 0x97. */
1443FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1444
1445/** Opcode 0x0f 0x0f 0x9a. */
1446FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1447
1448/** Opcode 0x0f 0x0f 0x9e. */
1449FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1450
1451/** Opcode 0x0f 0x0f 0xa0. */
1452FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1453
1454/** Opcode 0x0f 0x0f 0xa4. */
1455FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1456
1457/** Opcode 0x0f 0x0f 0xa6. */
1458FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1459
1460/** Opcode 0x0f 0x0f 0xa7. */
1461FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1462
1463/** Opcode 0x0f 0x0f 0xaa. */
1464FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1465
1466/** Opcode 0x0f 0x0f 0xae. */
1467FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1468
1469/** Opcode 0x0f 0x0f 0xb0. */
1470FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1471
1472/** Opcode 0x0f 0x0f 0xb4. */
1473FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1474
1475/** Opcode 0x0f 0x0f 0xb6. */
1476FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1477
1478/** Opcode 0x0f 0x0f 0xb7. */
1479FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1480
1481/** Opcode 0x0f 0x0f 0xbb. */
1482FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1483
1484/** Opcode 0x0f 0x0f 0xbf. */
1485FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1486
1487
1488/** Opcode 0x0f 0x0f. */
1489FNIEMOP_DEF(iemOp_3Dnow)
1490{
1491 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1492 {
1493 IEMOP_MNEMONIC("3Dnow");
1494 return IEMOP_RAISE_INVALID_OPCODE();
1495 }
1496
1497 /* This is pretty sparse, use switch instead of table. */
1498 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1499 switch (b)
1500 {
1501 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1502 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1503 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1504 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1505 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1506 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1507 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1508 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1509 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1510 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1511 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1512 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1513 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1514 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1515 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1516 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1517 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1518 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1519 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1520 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1521 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1522 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1523 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1524 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1525 default:
1526 return IEMOP_RAISE_INVALID_OPCODE();
1527 }
1528}
1529
1530
1531/** Opcode 0x0f 0x10. */
1532FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1533
1534
1535/** Opcode 0x0f 0x11. */
1536FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1537{
1538 /* Quick hack. Need to restructure all of this later some time. */
1539 uint32_t const fRelevantPrefix = pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ);
1540 if (fRelevantPrefix == 0)
1541 {
1542 IEMOP_MNEMONIC("movups Wps,Vps");
1543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1545 {
1546 /*
1547 * Register, register.
1548 */
1549 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1550 IEM_MC_BEGIN(0, 0);
1551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1553 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1554 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Memory, register.
1562 */
1563 IEM_MC_BEGIN(0, 2);
1564 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566
1567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1568 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1569 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1570 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1571
1572 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1573 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1574
1575 IEM_MC_ADVANCE_RIP();
1576 IEM_MC_END();
1577 }
1578 }
1579 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ)
1580 {
1581 IEMOP_MNEMONIC("movsd Wsd,Vsd");
1582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1584 {
1585 /*
1586 * Register, register.
1587 */
1588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1589 IEM_MC_BEGIN(0, 1);
1590 IEM_MC_LOCAL(uint64_t, uSrc);
1591
1592 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1593 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1594 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1595 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1596
1597 IEM_MC_ADVANCE_RIP();
1598 IEM_MC_END();
1599 }
1600 else
1601 {
1602 /*
1603 * Memory, register.
1604 */
1605 IEM_MC_BEGIN(0, 2);
1606 IEM_MC_LOCAL(uint64_t, uSrc);
1607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1608
1609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1611 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1612 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1613
1614 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1615 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1616
1617 IEM_MC_ADVANCE_RIP();
1618 IEM_MC_END();
1619 }
1620 }
1621 else
1622 {
1623 IEMOP_BITCH_ABOUT_STUB();
1624 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1625 }
1626 return VINF_SUCCESS;
1627}
1628
1629
1630/** Opcode 0x0f 0x12. */
1631FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1632
1633
1634/** Opcode 0x0f 0x13. */
1635FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1636{
1637 /* Quick hack. Need to restructure all of this later some time. */
1638 if (pVCpu->iem.s.fPrefixes == IEM_OP_PRF_SIZE_OP)
1639 {
1640 IEMOP_MNEMONIC("movlpd Mq,Vq");
1641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1642 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1643 {
1644#if 0
1645 /*
1646 * Register, register.
1647 */
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1649 IEM_MC_BEGIN(0, 1);
1650 IEM_MC_LOCAL(uint64_t, uSrc);
1651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1653 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1654 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1655 IEM_MC_ADVANCE_RIP();
1656 IEM_MC_END();
1657#else
1658 return IEMOP_RAISE_INVALID_OPCODE();
1659#endif
1660 }
1661 else
1662 {
1663 /*
1664 * Memory, register.
1665 */
1666 IEM_MC_BEGIN(0, 2);
1667 IEM_MC_LOCAL(uint64_t, uSrc);
1668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1669
1670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1671 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1672 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1673 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1674
1675 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1676 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1677
1678 IEM_MC_ADVANCE_RIP();
1679 IEM_MC_END();
1680 }
1681 return VINF_SUCCESS;
1682 }
1683
1684 IEMOP_BITCH_ABOUT_STUB();
1685 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1686}
1687
1688
1689/** Opcode 0x0f 0x14. */
1690FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1691/** Opcode 0x0f 0x15. */
1692FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1693/** Opcode 0x0f 0x16. */
1694FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1695/** Opcode 0x0f 0x17. */
1696FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1697
1698
1699/** Opcode 0x0f 0x18. */
1700FNIEMOP_DEF(iemOp_prefetch_Grp16)
1701{
1702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1703 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1704 {
1705 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1706 {
1707 case 4: /* Aliased to /0 for the time being according to AMD. */
1708 case 5: /* Aliased to /0 for the time being according to AMD. */
1709 case 6: /* Aliased to /0 for the time being according to AMD. */
1710 case 7: /* Aliased to /0 for the time being according to AMD. */
1711 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1712 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1713 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1714 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1716 }
1717
1718 IEM_MC_BEGIN(0, 1);
1719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1722 /* Currently a NOP. */
1723 NOREF(GCPtrEffSrc);
1724 IEM_MC_ADVANCE_RIP();
1725 IEM_MC_END();
1726 return VINF_SUCCESS;
1727 }
1728
1729 return IEMOP_RAISE_INVALID_OPCODE();
1730}
1731
1732
1733/** Opcode 0x0f 0x19..0x1f. */
1734FNIEMOP_DEF(iemOp_nop_Ev)
1735{
1736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1738 {
1739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740 IEM_MC_BEGIN(0, 0);
1741 IEM_MC_ADVANCE_RIP();
1742 IEM_MC_END();
1743 }
1744 else
1745 {
1746 IEM_MC_BEGIN(0, 1);
1747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 /* Currently a NOP. */
1751 NOREF(GCPtrEffSrc);
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756}
1757
1758
1759/** Opcode 0x0f 0x20. */
1760FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1761{
1762 /* mod is ignored, as is operand size overrides. */
1763 IEMOP_MNEMONIC("mov Rd,Cd");
1764 IEMOP_HLP_MIN_386();
1765 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1766 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1767 else
1768 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1769
1770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1771 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1773 {
1774 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1776 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1777 iCrReg |= 8;
1778 }
1779 switch (iCrReg)
1780 {
1781 case 0: case 2: case 3: case 4: case 8:
1782 break;
1783 default:
1784 return IEMOP_RAISE_INVALID_OPCODE();
1785 }
1786 IEMOP_HLP_DONE_DECODING();
1787
1788 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1789}
1790
1791
1792/** Opcode 0x0f 0x21. */
1793FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1794{
1795 IEMOP_MNEMONIC("mov Rd,Dd");
1796 IEMOP_HLP_MIN_386();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1800 return IEMOP_RAISE_INVALID_OPCODE();
1801 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1802 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1803 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1804}
1805
1806
1807/** Opcode 0x0f 0x22. */
1808FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1809{
1810 /* mod is ignored, as is operand size overrides. */
1811 IEMOP_MNEMONIC("mov Cd,Rd");
1812 IEMOP_HLP_MIN_386();
1813 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1814 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1815 else
1816 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1817
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1821 {
1822 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1823 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1824 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1825 iCrReg |= 8;
1826 }
1827 switch (iCrReg)
1828 {
1829 case 0: case 2: case 3: case 4: case 8:
1830 break;
1831 default:
1832 return IEMOP_RAISE_INVALID_OPCODE();
1833 }
1834 IEMOP_HLP_DONE_DECODING();
1835
1836 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1837}
1838
1839
1840/** Opcode 0x0f 0x23. */
1841FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1842{
1843 IEMOP_MNEMONIC("mov Dd,Rd");
1844 IEMOP_HLP_MIN_386();
1845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1848 return IEMOP_RAISE_INVALID_OPCODE();
1849 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1850 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1851 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1852}
1853
1854
1855/** Opcode 0x0f 0x24. */
1856FNIEMOP_DEF(iemOp_mov_Rd_Td)
1857{
1858 IEMOP_MNEMONIC("mov Rd,Td");
1859 /** @todo works on 386 and 486. */
1860 /* The RM byte is not considered, see testcase. */
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862}
1863
1864
1865/** Opcode 0x0f 0x26. */
1866FNIEMOP_DEF(iemOp_mov_Td_Rd)
1867{
1868 IEMOP_MNEMONIC("mov Td,Rd");
1869 /** @todo works on 386 and 486. */
1870 /* The RM byte is not considered, see testcase. */
1871 return IEMOP_RAISE_INVALID_OPCODE();
1872}
1873
1874
1875/** Opcode 0x0f 0x28. */
1876FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1877{
1878 IEMOP_MNEMONIC(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1881 {
1882 /*
1883 * Register, register.
1884 */
1885 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1886 IEM_MC_BEGIN(0, 0);
1887 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1888 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1889 else
1890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1891 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1892 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1893 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1894 IEM_MC_ADVANCE_RIP();
1895 IEM_MC_END();
1896 }
1897 else
1898 {
1899 /*
1900 * Register, memory.
1901 */
1902 IEM_MC_BEGIN(0, 2);
1903 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1905
1906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1907 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1908 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1909 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1910 else
1911 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1912 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1913
1914 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1916
1917 IEM_MC_ADVANCE_RIP();
1918 IEM_MC_END();
1919 }
1920 return VINF_SUCCESS;
1921}
1922
1923
1924/** Opcode 0x0f 0x29. */
1925FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1926{
1927 IEMOP_MNEMONIC(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1930 {
1931 /*
1932 * Register, register.
1933 */
1934 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1935 IEM_MC_BEGIN(0, 0);
1936 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1937 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1938 else
1939 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1941 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1942 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1943 IEM_MC_ADVANCE_RIP();
1944 IEM_MC_END();
1945 }
1946 else
1947 {
1948 /*
1949 * Memory, register.
1950 */
1951 IEM_MC_BEGIN(0, 2);
1952 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1954
1955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1956 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1957 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1958 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1959 else
1960 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1961 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1962
1963 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1964 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1965
1966 IEM_MC_ADVANCE_RIP();
1967 IEM_MC_END();
1968 }
1969 return VINF_SUCCESS;
1970}
1971
1972
1973/** Opcode 0x0f 0x2a. */
1974FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1975
1976
1977/** Opcode 0x0f 0x2b. */
1978FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1979{
1980 IEMOP_MNEMONIC(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1982 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1983 {
1984 /*
1985 * memory, register.
1986 */
1987 IEM_MC_BEGIN(0, 2);
1988 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1990
1991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1992 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1993 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1994 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1995 else
1996 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1997 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1998
1999 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2000 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2001
2002 IEM_MC_ADVANCE_RIP();
2003 IEM_MC_END();
2004 }
2005 /* The register, register encoding is invalid. */
2006 else
2007 return IEMOP_RAISE_INVALID_OPCODE();
2008 return VINF_SUCCESS;
2009}
2010
2011
2012/** Opcode 0x0f 0x2c. */
2013FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
2014/** Opcode 0x0f 0x2d. */
2015FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
2016/** Opcode 0x0f 0x2e. */
2017FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
2018/** Opcode 0x0f 0x2f. */
2019FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
2020
2021
2022/** Opcode 0x0f 0x30. */
2023FNIEMOP_DEF(iemOp_wrmsr)
2024{
2025 IEMOP_MNEMONIC("wrmsr");
2026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2027 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2028}
2029
2030
2031/** Opcode 0x0f 0x31. */
2032FNIEMOP_DEF(iemOp_rdtsc)
2033{
2034 IEMOP_MNEMONIC("rdtsc");
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2037}
2038
2039
2040/** Opcode 0x0f 0x33. */
2041FNIEMOP_DEF(iemOp_rdmsr)
2042{
2043 IEMOP_MNEMONIC("rdmsr");
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2046}
2047
2048
2049/** Opcode 0x0f 0x34. */
2050FNIEMOP_STUB(iemOp_rdpmc);
2051/** Opcode 0x0f 0x34. */
2052FNIEMOP_STUB(iemOp_sysenter);
2053/** Opcode 0x0f 0x35. */
2054FNIEMOP_STUB(iemOp_sysexit);
2055/** Opcode 0x0f 0x37. */
2056FNIEMOP_STUB(iemOp_getsec);
2057/** Opcode 0x0f 0x38. */
2058FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2059/** Opcode 0x0f 0x3a. */
2060FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2061
2062
2063/**
2064 * Implements a conditional move.
2065 *
2066 * Wish there was an obvious way to do this where we could share and reduce
2067 * code bloat.
2068 *
2069 * @param a_Cnd The conditional "microcode" operation.
2070 */
2071#define CMOV_X(a_Cnd) \
2072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2074 { \
2075 switch (pVCpu->iem.s.enmEffOpSize) \
2076 { \
2077 case IEMMODE_16BIT: \
2078 IEM_MC_BEGIN(0, 1); \
2079 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2080 a_Cnd { \
2081 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2082 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2083 } IEM_MC_ENDIF(); \
2084 IEM_MC_ADVANCE_RIP(); \
2085 IEM_MC_END(); \
2086 return VINF_SUCCESS; \
2087 \
2088 case IEMMODE_32BIT: \
2089 IEM_MC_BEGIN(0, 1); \
2090 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2091 a_Cnd { \
2092 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2093 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2094 } IEM_MC_ELSE() { \
2095 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2096 } IEM_MC_ENDIF(); \
2097 IEM_MC_ADVANCE_RIP(); \
2098 IEM_MC_END(); \
2099 return VINF_SUCCESS; \
2100 \
2101 case IEMMODE_64BIT: \
2102 IEM_MC_BEGIN(0, 1); \
2103 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2104 a_Cnd { \
2105 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2106 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2107 } IEM_MC_ENDIF(); \
2108 IEM_MC_ADVANCE_RIP(); \
2109 IEM_MC_END(); \
2110 return VINF_SUCCESS; \
2111 \
2112 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2113 } \
2114 } \
2115 else \
2116 { \
2117 switch (pVCpu->iem.s.enmEffOpSize) \
2118 { \
2119 case IEMMODE_16BIT: \
2120 IEM_MC_BEGIN(0, 2); \
2121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2122 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2124 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2125 a_Cnd { \
2126 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2127 } IEM_MC_ENDIF(); \
2128 IEM_MC_ADVANCE_RIP(); \
2129 IEM_MC_END(); \
2130 return VINF_SUCCESS; \
2131 \
2132 case IEMMODE_32BIT: \
2133 IEM_MC_BEGIN(0, 2); \
2134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2135 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2137 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2138 a_Cnd { \
2139 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2140 } IEM_MC_ELSE() { \
2141 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2142 } IEM_MC_ENDIF(); \
2143 IEM_MC_ADVANCE_RIP(); \
2144 IEM_MC_END(); \
2145 return VINF_SUCCESS; \
2146 \
2147 case IEMMODE_64BIT: \
2148 IEM_MC_BEGIN(0, 2); \
2149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2150 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2152 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2153 a_Cnd { \
2154 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2155 } IEM_MC_ENDIF(); \
2156 IEM_MC_ADVANCE_RIP(); \
2157 IEM_MC_END(); \
2158 return VINF_SUCCESS; \
2159 \
2160 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2161 } \
2162 } do {} while (0)
2163
2164
2165
2166/** Opcode 0x0f 0x40. */
2167FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2168{
2169 IEMOP_MNEMONIC("cmovo Gv,Ev");
2170 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2171}
2172
2173
2174/** Opcode 0x0f 0x41. */
2175FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2176{
2177 IEMOP_MNEMONIC("cmovno Gv,Ev");
2178 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2179}
2180
2181
2182/** Opcode 0x0f 0x42. */
2183FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2184{
2185 IEMOP_MNEMONIC("cmovc Gv,Ev");
2186 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2187}
2188
2189
2190/** Opcode 0x0f 0x43. */
2191FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2192{
2193 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2194 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2195}
2196
2197
2198/** Opcode 0x0f 0x44. */
2199FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2200{
2201 IEMOP_MNEMONIC("cmove Gv,Ev");
2202 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2203}
2204
2205
2206/** Opcode 0x0f 0x45. */
2207FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2208{
2209 IEMOP_MNEMONIC("cmovne Gv,Ev");
2210 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2211}
2212
2213
2214/** Opcode 0x0f 0x46. */
2215FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2216{
2217 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2218 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2219}
2220
2221
2222/** Opcode 0x0f 0x47. */
2223FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2224{
2225 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2226 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2227}
2228
2229
2230/** Opcode 0x0f 0x48. */
2231FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2232{
2233 IEMOP_MNEMONIC("cmovs Gv,Ev");
2234 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2235}
2236
2237
2238/** Opcode 0x0f 0x49. */
2239FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2240{
2241 IEMOP_MNEMONIC("cmovns Gv,Ev");
2242 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2243}
2244
2245
2246/** Opcode 0x0f 0x4a. */
2247FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2248{
2249 IEMOP_MNEMONIC("cmovp Gv,Ev");
2250 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2251}
2252
2253
2254/** Opcode 0x0f 0x4b. */
2255FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2256{
2257 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2258 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2259}
2260
2261
2262/** Opcode 0x0f 0x4c. */
2263FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2264{
2265 IEMOP_MNEMONIC("cmovl Gv,Ev");
2266 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2267}
2268
2269
2270/** Opcode 0x0f 0x4d. */
2271FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2272{
2273 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2274 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2275}
2276
2277
2278/** Opcode 0x0f 0x4e. */
2279FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2280{
2281 IEMOP_MNEMONIC("cmovle Gv,Ev");
2282 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2283}
2284
2285
2286/** Opcode 0x0f 0x4f. */
2287FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2288{
2289 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2290 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2291}
2292
2293#undef CMOV_X
2294
2295/** Opcode 0x0f 0x50. */
2296FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2297/** Opcode 0x0f 0x51. */
2298FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2299/** Opcode 0x0f 0x52. */
2300FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2301/** Opcode 0x0f 0x53. */
2302FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2303/** Opcode 0x0f 0x54. */
2304FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2305/** Opcode 0x0f 0x55. */
2306FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2307/** Opcode 0x0f 0x56. */
2308FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2309/** Opcode 0x0f 0x57. */
2310FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2311/** Opcode 0x0f 0x58. */
2312FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2313/** Opcode 0x0f 0x59. */
2314FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2315/** Opcode 0x0f 0x5a. */
2316FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2317/** Opcode 0x0f 0x5b. */
2318FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2319/** Opcode 0x0f 0x5c. */
2320FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2321/** Opcode 0x0f 0x5d. */
2322FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2323/** Opcode 0x0f 0x5e. */
2324FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2325/** Opcode 0x0f 0x5f. */
2326FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2327
2328
2329/**
2330 * Common worker for SSE2 and MMX instructions on the forms:
2331 * pxxxx xmm1, xmm2/mem128
2332 * pxxxx mm1, mm2/mem32
2333 *
2334 * The 2nd operand is the first half of a register, which in the memory case
2335 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2336 * memory accessed for MMX.
2337 *
2338 * Exceptions type 4.
2339 */
2340FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2341{
2342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2343 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2344 {
2345 case IEM_OP_PRF_SIZE_OP: /* SSE */
2346 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2347 {
2348 /*
2349 * Register, register.
2350 */
2351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2352 IEM_MC_BEGIN(2, 0);
2353 IEM_MC_ARG(uint128_t *, pDst, 0);
2354 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2355 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2356 IEM_MC_PREPARE_SSE_USAGE();
2357 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2358 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2359 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2360 IEM_MC_ADVANCE_RIP();
2361 IEM_MC_END();
2362 }
2363 else
2364 {
2365 /*
2366 * Register, memory.
2367 */
2368 IEM_MC_BEGIN(2, 2);
2369 IEM_MC_ARG(uint128_t *, pDst, 0);
2370 IEM_MC_LOCAL(uint64_t, uSrc);
2371 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2373
2374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2376 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2377 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2378
2379 IEM_MC_PREPARE_SSE_USAGE();
2380 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2381 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2382
2383 IEM_MC_ADVANCE_RIP();
2384 IEM_MC_END();
2385 }
2386 return VINF_SUCCESS;
2387
2388 case 0: /* MMX */
2389 if (!pImpl->pfnU64)
2390 return IEMOP_RAISE_INVALID_OPCODE();
2391 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2392 {
2393 /*
2394 * Register, register.
2395 */
2396 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2397 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2399 IEM_MC_BEGIN(2, 0);
2400 IEM_MC_ARG(uint64_t *, pDst, 0);
2401 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2402 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2403 IEM_MC_PREPARE_FPU_USAGE();
2404 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2405 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2406 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2407 IEM_MC_ADVANCE_RIP();
2408 IEM_MC_END();
2409 }
2410 else
2411 {
2412 /*
2413 * Register, memory.
2414 */
2415 IEM_MC_BEGIN(2, 2);
2416 IEM_MC_ARG(uint64_t *, pDst, 0);
2417 IEM_MC_LOCAL(uint32_t, uSrc);
2418 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2420
2421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2423 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2424 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2425
2426 IEM_MC_PREPARE_FPU_USAGE();
2427 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2428 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2429
2430 IEM_MC_ADVANCE_RIP();
2431 IEM_MC_END();
2432 }
2433 return VINF_SUCCESS;
2434
2435 default:
2436 return IEMOP_RAISE_INVALID_OPCODE();
2437 }
2438}
2439
2440
2441/** Opcode 0x0f 0x60. */
2442FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2443{
2444 IEMOP_MNEMONIC("punpcklbw");
2445 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2446}
2447
2448
2449/** Opcode 0x0f 0x61. */
2450FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2451{
2452 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2453 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2454}
2455
2456
2457/** Opcode 0x0f 0x62. */
2458FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2459{
2460 IEMOP_MNEMONIC("punpckldq");
2461 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2462}
2463
2464
2465/** Opcode 0x0f 0x63. */
2466FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2467/** Opcode 0x0f 0x64. */
2468FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2469/** Opcode 0x0f 0x65. */
2470FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2471/** Opcode 0x0f 0x66. */
2472FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2473/** Opcode 0x0f 0x67. */
2474FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2475
2476
2477/**
2478 * Common worker for SSE2 and MMX instructions on the forms:
2479 * pxxxx xmm1, xmm2/mem128
2480 * pxxxx mm1, mm2/mem64
2481 *
2482 * The 2nd operand is the second half of a register, which in the memory case
2483 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2484 * where it may read the full 128 bits or only the upper 64 bits.
2485 *
2486 * Exceptions type 4.
2487 */
2488FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2489{
2490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2491 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2492 {
2493 case IEM_OP_PRF_SIZE_OP: /* SSE */
2494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2495 {
2496 /*
2497 * Register, register.
2498 */
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500 IEM_MC_BEGIN(2, 0);
2501 IEM_MC_ARG(uint128_t *, pDst, 0);
2502 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2503 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2504 IEM_MC_PREPARE_SSE_USAGE();
2505 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2506 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2507 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2508 IEM_MC_ADVANCE_RIP();
2509 IEM_MC_END();
2510 }
2511 else
2512 {
2513 /*
2514 * Register, memory.
2515 */
2516 IEM_MC_BEGIN(2, 2);
2517 IEM_MC_ARG(uint128_t *, pDst, 0);
2518 IEM_MC_LOCAL(uint128_t, uSrc);
2519 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2521
2522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2524 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2525 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2526
2527 IEM_MC_PREPARE_SSE_USAGE();
2528 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2529 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2530
2531 IEM_MC_ADVANCE_RIP();
2532 IEM_MC_END();
2533 }
2534 return VINF_SUCCESS;
2535
2536 case 0: /* MMX */
2537 if (!pImpl->pfnU64)
2538 return IEMOP_RAISE_INVALID_OPCODE();
2539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2540 {
2541 /*
2542 * Register, register.
2543 */
2544 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2545 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2547 IEM_MC_BEGIN(2, 0);
2548 IEM_MC_ARG(uint64_t *, pDst, 0);
2549 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2550 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2551 IEM_MC_PREPARE_FPU_USAGE();
2552 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2553 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2554 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2555 IEM_MC_ADVANCE_RIP();
2556 IEM_MC_END();
2557 }
2558 else
2559 {
2560 /*
2561 * Register, memory.
2562 */
2563 IEM_MC_BEGIN(2, 2);
2564 IEM_MC_ARG(uint64_t *, pDst, 0);
2565 IEM_MC_LOCAL(uint64_t, uSrc);
2566 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2568
2569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2571 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2572 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2573
2574 IEM_MC_PREPARE_FPU_USAGE();
2575 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2576 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 return VINF_SUCCESS;
2582
2583 default:
2584 return IEMOP_RAISE_INVALID_OPCODE();
2585 }
2586}
2587
2588
2589/** Opcode 0x0f 0x68. */
2590FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2591{
2592 IEMOP_MNEMONIC("punpckhbw");
2593 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2594}
2595
2596
2597/** Opcode 0x0f 0x69. */
2598FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2599{
2600 IEMOP_MNEMONIC("punpckhwd");
2601 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2602}
2603
2604
2605/** Opcode 0x0f 0x6a. */
2606FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2607{
2608 IEMOP_MNEMONIC("punpckhdq");
2609 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2610}
2611
2612/** Opcode 0x0f 0x6b. */
2613FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2614
2615
2616/** Opcode 0x0f 0x6c. */
2617FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2618{
2619 IEMOP_MNEMONIC("punpcklqdq");
2620 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2621}
2622
2623
2624/** Opcode 0x0f 0x6d. */
2625FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2626{
2627 IEMOP_MNEMONIC("punpckhqdq");
2628 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2629}
2630
2631
2632/** Opcode 0x0f 0x6e. */
2633FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2634{
2635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2637 {
2638 case IEM_OP_PRF_SIZE_OP: /* SSE */
2639 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2641 {
2642 /* XMM, greg*/
2643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2644 IEM_MC_BEGIN(0, 1);
2645 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2646 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2647 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2648 {
2649 IEM_MC_LOCAL(uint64_t, u64Tmp);
2650 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2651 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2652 }
2653 else
2654 {
2655 IEM_MC_LOCAL(uint32_t, u32Tmp);
2656 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2657 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2658 }
2659 IEM_MC_ADVANCE_RIP();
2660 IEM_MC_END();
2661 }
2662 else
2663 {
2664 /* XMM, [mem] */
2665 IEM_MC_BEGIN(0, 2);
2666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2667 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2670 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2671 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2672 {
2673 IEM_MC_LOCAL(uint64_t, u64Tmp);
2674 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2675 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2676 }
2677 else
2678 {
2679 IEM_MC_LOCAL(uint32_t, u32Tmp);
2680 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2681 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2682 }
2683 IEM_MC_ADVANCE_RIP();
2684 IEM_MC_END();
2685 }
2686 return VINF_SUCCESS;
2687
2688 case 0: /* MMX */
2689 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2691 {
2692 /* MMX, greg */
2693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2694 IEM_MC_BEGIN(0, 1);
2695 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2696 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2697 IEM_MC_LOCAL(uint64_t, u64Tmp);
2698 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2699 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2700 else
2701 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2702 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2703 IEM_MC_ADVANCE_RIP();
2704 IEM_MC_END();
2705 }
2706 else
2707 {
2708 /* MMX, [mem] */
2709 IEM_MC_BEGIN(0, 2);
2710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2714 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2715 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2716 {
2717 IEM_MC_LOCAL(uint64_t, u64Tmp);
2718 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2719 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2720 }
2721 else
2722 {
2723 IEM_MC_LOCAL(uint32_t, u32Tmp);
2724 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2725 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2726 }
2727 IEM_MC_ADVANCE_RIP();
2728 IEM_MC_END();
2729 }
2730 return VINF_SUCCESS;
2731
2732 default:
2733 return IEMOP_RAISE_INVALID_OPCODE();
2734 }
2735}
2736
2737
2738/** Opcode 0x0f 0x6f. */
2739FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2740{
2741 bool fAligned = false;
2742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2743 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2744 {
2745 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2746 fAligned = true;
2747 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2748 if (fAligned)
2749 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2750 else
2751 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2753 {
2754 /*
2755 * Register, register.
2756 */
2757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2758 IEM_MC_BEGIN(0, 0);
2759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2761 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2762 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2763 IEM_MC_ADVANCE_RIP();
2764 IEM_MC_END();
2765 }
2766 else
2767 {
2768 /*
2769 * Register, memory.
2770 */
2771 IEM_MC_BEGIN(0, 2);
2772 IEM_MC_LOCAL(uint128_t, u128Tmp);
2773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2774
2775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2777 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2778 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2779 if (fAligned)
2780 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2781 else
2782 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2783 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2784
2785 IEM_MC_ADVANCE_RIP();
2786 IEM_MC_END();
2787 }
2788 return VINF_SUCCESS;
2789
2790 case 0: /* MMX */
2791 IEMOP_MNEMONIC("movq Pq,Qq");
2792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2793 {
2794 /*
2795 * Register, register.
2796 */
2797 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2798 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEM_MC_BEGIN(0, 1);
2801 IEM_MC_LOCAL(uint64_t, u64Tmp);
2802 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2803 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2804 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2805 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2806 IEM_MC_ADVANCE_RIP();
2807 IEM_MC_END();
2808 }
2809 else
2810 {
2811 /*
2812 * Register, memory.
2813 */
2814 IEM_MC_BEGIN(0, 2);
2815 IEM_MC_LOCAL(uint64_t, u64Tmp);
2816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2817
2818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2820 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2821 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2822 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2823 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2824
2825 IEM_MC_ADVANCE_RIP();
2826 IEM_MC_END();
2827 }
2828 return VINF_SUCCESS;
2829
2830 default:
2831 return IEMOP_RAISE_INVALID_OPCODE();
2832 }
2833}
2834
2835
2836/** Opcode 0x0f 0x70. The immediate here is evil! */
2837FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2838{
2839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2840 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2841 {
2842 case IEM_OP_PRF_SIZE_OP: /* SSE */
2843 case IEM_OP_PRF_REPNZ: /* SSE */
2844 case IEM_OP_PRF_REPZ: /* SSE */
2845 {
2846 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2847 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2848 {
2849 case IEM_OP_PRF_SIZE_OP:
2850 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2851 pfnAImpl = iemAImpl_pshufd;
2852 break;
2853 case IEM_OP_PRF_REPNZ:
2854 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2855 pfnAImpl = iemAImpl_pshuflw;
2856 break;
2857 case IEM_OP_PRF_REPZ:
2858 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2859 pfnAImpl = iemAImpl_pshufhw;
2860 break;
2861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2862 }
2863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2864 {
2865 /*
2866 * Register, register.
2867 */
2868 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2870
2871 IEM_MC_BEGIN(3, 0);
2872 IEM_MC_ARG(uint128_t *, pDst, 0);
2873 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2874 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2875 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2876 IEM_MC_PREPARE_SSE_USAGE();
2877 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2878 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2879 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2880 IEM_MC_ADVANCE_RIP();
2881 IEM_MC_END();
2882 }
2883 else
2884 {
2885 /*
2886 * Register, memory.
2887 */
2888 IEM_MC_BEGIN(3, 2);
2889 IEM_MC_ARG(uint128_t *, pDst, 0);
2890 IEM_MC_LOCAL(uint128_t, uSrc);
2891 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2893
2894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2895 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2896 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2898 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2899
2900 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2901 IEM_MC_PREPARE_SSE_USAGE();
2902 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2903 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2904
2905 IEM_MC_ADVANCE_RIP();
2906 IEM_MC_END();
2907 }
2908 return VINF_SUCCESS;
2909 }
2910
2911 case 0: /* MMX Extension */
2912 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2914 {
2915 /*
2916 * Register, register.
2917 */
2918 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2920
2921 IEM_MC_BEGIN(3, 0);
2922 IEM_MC_ARG(uint64_t *, pDst, 0);
2923 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2924 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2925 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2926 IEM_MC_PREPARE_FPU_USAGE();
2927 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2928 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2929 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2930 IEM_MC_ADVANCE_RIP();
2931 IEM_MC_END();
2932 }
2933 else
2934 {
2935 /*
2936 * Register, memory.
2937 */
2938 IEM_MC_BEGIN(3, 2);
2939 IEM_MC_ARG(uint64_t *, pDst, 0);
2940 IEM_MC_LOCAL(uint64_t, uSrc);
2941 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2943
2944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2945 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2946 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2948 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2949
2950 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2951 IEM_MC_PREPARE_FPU_USAGE();
2952 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2953 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2954
2955 IEM_MC_ADVANCE_RIP();
2956 IEM_MC_END();
2957 }
2958 return VINF_SUCCESS;
2959
2960 default:
2961 return IEMOP_RAISE_INVALID_OPCODE();
2962 }
2963}
2964
2965
2966/** Opcode 0x0f 0x71 11/2. */
2967FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2968
2969/** Opcode 0x66 0x0f 0x71 11/2. */
2970FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2971
2972/** Opcode 0x0f 0x71 11/4. */
2973FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2974
2975/** Opcode 0x66 0x0f 0x71 11/4. */
2976FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2977
2978/** Opcode 0x0f 0x71 11/6. */
2979FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2980
2981/** Opcode 0x66 0x0f 0x71 11/6. */
2982FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2983
2984
2985/** Opcode 0x0f 0x71. */
2986FNIEMOP_DEF(iemOp_Grp12)
2987{
2988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2989 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2990 return IEMOP_RAISE_INVALID_OPCODE();
2991 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2992 {
2993 case 0: case 1: case 3: case 5: case 7:
2994 return IEMOP_RAISE_INVALID_OPCODE();
2995 case 2:
2996 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2997 {
2998 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2999 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3000 default: return IEMOP_RAISE_INVALID_OPCODE();
3001 }
3002 case 4:
3003 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3004 {
3005 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3006 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3007 default: return IEMOP_RAISE_INVALID_OPCODE();
3008 }
3009 case 6:
3010 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3011 {
3012 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3013 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3014 default: return IEMOP_RAISE_INVALID_OPCODE();
3015 }
3016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3017 }
3018}
3019
3020
3021/** Opcode 0x0f 0x72 11/2. */
3022FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3023
3024/** Opcode 0x66 0x0f 0x72 11/2. */
3025FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3026
3027/** Opcode 0x0f 0x72 11/4. */
3028FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3029
3030/** Opcode 0x66 0x0f 0x72 11/4. */
3031FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3032
3033/** Opcode 0x0f 0x72 11/6. */
3034FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3035
3036/** Opcode 0x66 0x0f 0x72 11/6. */
3037FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3038
3039
3040/** Opcode 0x0f 0x72. */
3041FNIEMOP_DEF(iemOp_Grp13)
3042{
3043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3044 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3045 return IEMOP_RAISE_INVALID_OPCODE();
3046 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3047 {
3048 case 0: case 1: case 3: case 5: case 7:
3049 return IEMOP_RAISE_INVALID_OPCODE();
3050 case 2:
3051 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3052 {
3053 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3054 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3055 default: return IEMOP_RAISE_INVALID_OPCODE();
3056 }
3057 case 4:
3058 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3059 {
3060 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3061 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3062 default: return IEMOP_RAISE_INVALID_OPCODE();
3063 }
3064 case 6:
3065 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3066 {
3067 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3068 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3069 default: return IEMOP_RAISE_INVALID_OPCODE();
3070 }
3071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3072 }
3073}
3074
3075
3076/** Opcode 0x0f 0x73 11/2. */
3077FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3078
3079/** Opcode 0x66 0x0f 0x73 11/2. */
3080FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3081
3082/** Opcode 0x66 0x0f 0x73 11/3. */
3083FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3084
3085/** Opcode 0x0f 0x73 11/6. */
3086FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3087
3088/** Opcode 0x66 0x0f 0x73 11/6. */
3089FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3090
3091/** Opcode 0x66 0x0f 0x73 11/7. */
3092FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3093
3094
3095/** Opcode 0x0f 0x73. */
3096FNIEMOP_DEF(iemOp_Grp14)
3097{
3098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3099 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3100 return IEMOP_RAISE_INVALID_OPCODE();
3101 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3102 {
3103 case 0: case 1: case 4: case 5:
3104 return IEMOP_RAISE_INVALID_OPCODE();
3105 case 2:
3106 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3107 {
3108 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3109 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3110 default: return IEMOP_RAISE_INVALID_OPCODE();
3111 }
3112 case 3:
3113 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3114 {
3115 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3116 default: return IEMOP_RAISE_INVALID_OPCODE();
3117 }
3118 case 6:
3119 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3120 {
3121 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3122 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3123 default: return IEMOP_RAISE_INVALID_OPCODE();
3124 }
3125 case 7:
3126 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3127 {
3128 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3129 default: return IEMOP_RAISE_INVALID_OPCODE();
3130 }
3131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3132 }
3133}
3134
3135
3136/**
3137 * Common worker for SSE2 and MMX instructions on the forms:
3138 * pxxx mm1, mm2/mem64
3139 * pxxx xmm1, xmm2/mem128
3140 *
3141 * Proper alignment of the 128-bit operand is enforced.
3142 * Exceptions type 4. SSE2 and MMX cpuid checks.
3143 */
3144FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3145{
3146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3147 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3148 {
3149 case IEM_OP_PRF_SIZE_OP: /* SSE */
3150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3151 {
3152 /*
3153 * Register, register.
3154 */
3155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3156 IEM_MC_BEGIN(2, 0);
3157 IEM_MC_ARG(uint128_t *, pDst, 0);
3158 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3159 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3160 IEM_MC_PREPARE_SSE_USAGE();
3161 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3162 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3163 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 else
3168 {
3169 /*
3170 * Register, memory.
3171 */
3172 IEM_MC_BEGIN(2, 2);
3173 IEM_MC_ARG(uint128_t *, pDst, 0);
3174 IEM_MC_LOCAL(uint128_t, uSrc);
3175 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3177
3178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3180 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3181 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3182
3183 IEM_MC_PREPARE_SSE_USAGE();
3184 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3185 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3186
3187 IEM_MC_ADVANCE_RIP();
3188 IEM_MC_END();
3189 }
3190 return VINF_SUCCESS;
3191
3192 case 0: /* MMX */
3193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3194 {
3195 /*
3196 * Register, register.
3197 */
3198 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3199 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3201 IEM_MC_BEGIN(2, 0);
3202 IEM_MC_ARG(uint64_t *, pDst, 0);
3203 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3204 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3205 IEM_MC_PREPARE_FPU_USAGE();
3206 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3207 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3208 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3209 IEM_MC_ADVANCE_RIP();
3210 IEM_MC_END();
3211 }
3212 else
3213 {
3214 /*
3215 * Register, memory.
3216 */
3217 IEM_MC_BEGIN(2, 2);
3218 IEM_MC_ARG(uint64_t *, pDst, 0);
3219 IEM_MC_LOCAL(uint64_t, uSrc);
3220 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3222
3223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3226 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3227
3228 IEM_MC_PREPARE_FPU_USAGE();
3229 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3230 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3231
3232 IEM_MC_ADVANCE_RIP();
3233 IEM_MC_END();
3234 }
3235 return VINF_SUCCESS;
3236
3237 default:
3238 return IEMOP_RAISE_INVALID_OPCODE();
3239 }
3240}
3241
3242
3243/** Opcode 0x0f 0x74. */
3244FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3245{
3246 IEMOP_MNEMONIC("pcmpeqb");
3247 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3248}
3249
3250
3251/** Opcode 0x0f 0x75. */
3252FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3253{
3254 IEMOP_MNEMONIC("pcmpeqw");
3255 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3256}
3257
3258
3259/** Opcode 0x0f 0x76. */
3260FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3261{
3262 IEMOP_MNEMONIC("pcmpeqd");
3263 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3264}
3265
3266
3267/** Opcode 0x0f 0x77. */
3268FNIEMOP_STUB(iemOp_emms);
3269/** Opcode 0x0f 0x78. */
3270FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3271/** Opcode 0x0f 0x79. */
3272FNIEMOP_UD_STUB(iemOp_vmwrite);
3273/** Opcode 0x0f 0x7c. */
3274FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3275/** Opcode 0x0f 0x7d. */
3276FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3277
3278
3279/** Opcode 0x0f 0x7e. */
3280FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3281{
3282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3283 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3284 {
3285 case IEM_OP_PRF_SIZE_OP: /* SSE */
3286 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3288 {
3289 /* greg, XMM */
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_BEGIN(0, 1);
3292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3294 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3295 {
3296 IEM_MC_LOCAL(uint64_t, u64Tmp);
3297 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3298 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3299 }
3300 else
3301 {
3302 IEM_MC_LOCAL(uint32_t, u32Tmp);
3303 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3304 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3305 }
3306 IEM_MC_ADVANCE_RIP();
3307 IEM_MC_END();
3308 }
3309 else
3310 {
3311 /* [mem], XMM */
3312 IEM_MC_BEGIN(0, 2);
3313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3314 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3317 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3318 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3319 {
3320 IEM_MC_LOCAL(uint64_t, u64Tmp);
3321 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3322 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3323 }
3324 else
3325 {
3326 IEM_MC_LOCAL(uint32_t, u32Tmp);
3327 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3328 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3329 }
3330 IEM_MC_ADVANCE_RIP();
3331 IEM_MC_END();
3332 }
3333 return VINF_SUCCESS;
3334
3335 case 0: /* MMX */
3336 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3337 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3338 {
3339 /* greg, MMX */
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 IEM_MC_BEGIN(0, 1);
3342 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3343 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3344 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3345 {
3346 IEM_MC_LOCAL(uint64_t, u64Tmp);
3347 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3348 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3349 }
3350 else
3351 {
3352 IEM_MC_LOCAL(uint32_t, u32Tmp);
3353 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3354 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3355 }
3356 IEM_MC_ADVANCE_RIP();
3357 IEM_MC_END();
3358 }
3359 else
3360 {
3361 /* [mem], MMX */
3362 IEM_MC_BEGIN(0, 2);
3363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3364 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3367 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3368 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3369 {
3370 IEM_MC_LOCAL(uint64_t, u64Tmp);
3371 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3372 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3373 }
3374 else
3375 {
3376 IEM_MC_LOCAL(uint32_t, u32Tmp);
3377 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3378 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3379 }
3380 IEM_MC_ADVANCE_RIP();
3381 IEM_MC_END();
3382 }
3383 return VINF_SUCCESS;
3384
3385 default:
3386 return IEMOP_RAISE_INVALID_OPCODE();
3387 }
3388}
3389
3390
3391/** Opcode 0x0f 0x7f. */
3392FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3393{
3394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3395 bool fAligned = false;
3396 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3397 {
3398 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3399 fAligned = true;
3400 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3401 if (fAligned)
3402 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3403 else
3404 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3405 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3406 {
3407 /*
3408 * Register, register.
3409 */
3410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3411 IEM_MC_BEGIN(0, 0);
3412 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3413 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3414 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3415 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3416 IEM_MC_ADVANCE_RIP();
3417 IEM_MC_END();
3418 }
3419 else
3420 {
3421 /*
3422 * Register, memory.
3423 */
3424 IEM_MC_BEGIN(0, 2);
3425 IEM_MC_LOCAL(uint128_t, u128Tmp);
3426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3427
3428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3430 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3431 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3432
3433 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3434 if (fAligned)
3435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3436 else
3437 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3438
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 }
3442 return VINF_SUCCESS;
3443
3444 case 0: /* MMX */
3445 IEMOP_MNEMONIC("movq Qq,Pq");
3446
3447 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3448 {
3449 /*
3450 * Register, register.
3451 */
3452 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3453 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3455 IEM_MC_BEGIN(0, 1);
3456 IEM_MC_LOCAL(uint64_t, u64Tmp);
3457 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3458 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3459 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3460 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3461 IEM_MC_ADVANCE_RIP();
3462 IEM_MC_END();
3463 }
3464 else
3465 {
3466 /*
3467 * Register, memory.
3468 */
3469 IEM_MC_BEGIN(0, 2);
3470 IEM_MC_LOCAL(uint64_t, u64Tmp);
3471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3472
3473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3477
3478 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3479 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3480
3481 IEM_MC_ADVANCE_RIP();
3482 IEM_MC_END();
3483 }
3484 return VINF_SUCCESS;
3485
3486 default:
3487 return IEMOP_RAISE_INVALID_OPCODE();
3488 }
3489}
3490
3491
3492
3493/** Opcode 0x0f 0x80. */
3494FNIEMOP_DEF(iemOp_jo_Jv)
3495{
3496 IEMOP_MNEMONIC("jo Jv");
3497 IEMOP_HLP_MIN_386();
3498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3499 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3500 {
3501 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3503
3504 IEM_MC_BEGIN(0, 0);
3505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3506 IEM_MC_REL_JMP_S16(i16Imm);
3507 } IEM_MC_ELSE() {
3508 IEM_MC_ADVANCE_RIP();
3509 } IEM_MC_ENDIF();
3510 IEM_MC_END();
3511 }
3512 else
3513 {
3514 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3516
3517 IEM_MC_BEGIN(0, 0);
3518 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3519 IEM_MC_REL_JMP_S32(i32Imm);
3520 } IEM_MC_ELSE() {
3521 IEM_MC_ADVANCE_RIP();
3522 } IEM_MC_ENDIF();
3523 IEM_MC_END();
3524 }
3525 return VINF_SUCCESS;
3526}
3527
3528
3529/** Opcode 0x0f 0x81. */
3530FNIEMOP_DEF(iemOp_jno_Jv)
3531{
3532 IEMOP_MNEMONIC("jno Jv");
3533 IEMOP_HLP_MIN_386();
3534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3535 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3536 {
3537 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539
3540 IEM_MC_BEGIN(0, 0);
3541 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3542 IEM_MC_ADVANCE_RIP();
3543 } IEM_MC_ELSE() {
3544 IEM_MC_REL_JMP_S16(i16Imm);
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547 }
3548 else
3549 {
3550 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3552
3553 IEM_MC_BEGIN(0, 0);
3554 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3555 IEM_MC_ADVANCE_RIP();
3556 } IEM_MC_ELSE() {
3557 IEM_MC_REL_JMP_S32(i32Imm);
3558 } IEM_MC_ENDIF();
3559 IEM_MC_END();
3560 }
3561 return VINF_SUCCESS;
3562}
3563
3564
3565/** Opcode 0x0f 0x82. */
3566FNIEMOP_DEF(iemOp_jc_Jv)
3567{
3568 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3569 IEMOP_HLP_MIN_386();
3570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3571 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3572 {
3573 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3575
3576 IEM_MC_BEGIN(0, 0);
3577 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3578 IEM_MC_REL_JMP_S16(i16Imm);
3579 } IEM_MC_ELSE() {
3580 IEM_MC_ADVANCE_RIP();
3581 } IEM_MC_ENDIF();
3582 IEM_MC_END();
3583 }
3584 else
3585 {
3586 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3588
3589 IEM_MC_BEGIN(0, 0);
3590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3591 IEM_MC_REL_JMP_S32(i32Imm);
3592 } IEM_MC_ELSE() {
3593 IEM_MC_ADVANCE_RIP();
3594 } IEM_MC_ENDIF();
3595 IEM_MC_END();
3596 }
3597 return VINF_SUCCESS;
3598}
3599
3600
3601/** Opcode 0x0f 0x83. */
3602FNIEMOP_DEF(iemOp_jnc_Jv)
3603{
3604 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3605 IEMOP_HLP_MIN_386();
3606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3607 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3608 {
3609 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3611
3612 IEM_MC_BEGIN(0, 0);
3613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3614 IEM_MC_ADVANCE_RIP();
3615 } IEM_MC_ELSE() {
3616 IEM_MC_REL_JMP_S16(i16Imm);
3617 } IEM_MC_ENDIF();
3618 IEM_MC_END();
3619 }
3620 else
3621 {
3622 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3624
3625 IEM_MC_BEGIN(0, 0);
3626 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3627 IEM_MC_ADVANCE_RIP();
3628 } IEM_MC_ELSE() {
3629 IEM_MC_REL_JMP_S32(i32Imm);
3630 } IEM_MC_ENDIF();
3631 IEM_MC_END();
3632 }
3633 return VINF_SUCCESS;
3634}
3635
3636
3637/** Opcode 0x0f 0x84. */
3638FNIEMOP_DEF(iemOp_je_Jv)
3639{
3640 IEMOP_MNEMONIC("je/jz Jv");
3641 IEMOP_HLP_MIN_386();
3642 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3643 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3644 {
3645 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3647
3648 IEM_MC_BEGIN(0, 0);
3649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3650 IEM_MC_REL_JMP_S16(i16Imm);
3651 } IEM_MC_ELSE() {
3652 IEM_MC_ADVANCE_RIP();
3653 } IEM_MC_ENDIF();
3654 IEM_MC_END();
3655 }
3656 else
3657 {
3658 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3660
3661 IEM_MC_BEGIN(0, 0);
3662 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3663 IEM_MC_REL_JMP_S32(i32Imm);
3664 } IEM_MC_ELSE() {
3665 IEM_MC_ADVANCE_RIP();
3666 } IEM_MC_ENDIF();
3667 IEM_MC_END();
3668 }
3669 return VINF_SUCCESS;
3670}
3671
3672
3673/** Opcode 0x0f 0x85. */
3674FNIEMOP_DEF(iemOp_jne_Jv)
3675{
3676 IEMOP_MNEMONIC("jne/jnz Jv");
3677 IEMOP_HLP_MIN_386();
3678 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3679 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3680 {
3681 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3683
3684 IEM_MC_BEGIN(0, 0);
3685 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3686 IEM_MC_ADVANCE_RIP();
3687 } IEM_MC_ELSE() {
3688 IEM_MC_REL_JMP_S16(i16Imm);
3689 } IEM_MC_ENDIF();
3690 IEM_MC_END();
3691 }
3692 else
3693 {
3694 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696
3697 IEM_MC_BEGIN(0, 0);
3698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3699 IEM_MC_ADVANCE_RIP();
3700 } IEM_MC_ELSE() {
3701 IEM_MC_REL_JMP_S32(i32Imm);
3702 } IEM_MC_ENDIF();
3703 IEM_MC_END();
3704 }
3705 return VINF_SUCCESS;
3706}
3707
3708
3709/** Opcode 0x0f 0x86. */
3710FNIEMOP_DEF(iemOp_jbe_Jv)
3711{
3712 IEMOP_MNEMONIC("jbe/jna Jv");
3713 IEMOP_HLP_MIN_386();
3714 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3715 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3716 {
3717 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3719
3720 IEM_MC_BEGIN(0, 0);
3721 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3722 IEM_MC_REL_JMP_S16(i16Imm);
3723 } IEM_MC_ELSE() {
3724 IEM_MC_ADVANCE_RIP();
3725 } IEM_MC_ENDIF();
3726 IEM_MC_END();
3727 }
3728 else
3729 {
3730 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3732
3733 IEM_MC_BEGIN(0, 0);
3734 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3735 IEM_MC_REL_JMP_S32(i32Imm);
3736 } IEM_MC_ELSE() {
3737 IEM_MC_ADVANCE_RIP();
3738 } IEM_MC_ENDIF();
3739 IEM_MC_END();
3740 }
3741 return VINF_SUCCESS;
3742}
3743
3744
3745/** Opcode 0x0f 0x87. */
3746FNIEMOP_DEF(iemOp_jnbe_Jv)
3747{
3748 IEMOP_MNEMONIC("jnbe/ja Jv");
3749 IEMOP_HLP_MIN_386();
3750 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3751 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3752 {
3753 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755
3756 IEM_MC_BEGIN(0, 0);
3757 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3758 IEM_MC_ADVANCE_RIP();
3759 } IEM_MC_ELSE() {
3760 IEM_MC_REL_JMP_S16(i16Imm);
3761 } IEM_MC_ENDIF();
3762 IEM_MC_END();
3763 }
3764 else
3765 {
3766 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3768
3769 IEM_MC_BEGIN(0, 0);
3770 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3771 IEM_MC_ADVANCE_RIP();
3772 } IEM_MC_ELSE() {
3773 IEM_MC_REL_JMP_S32(i32Imm);
3774 } IEM_MC_ENDIF();
3775 IEM_MC_END();
3776 }
3777 return VINF_SUCCESS;
3778}
3779
3780
3781/** Opcode 0x0f 0x88. */
3782FNIEMOP_DEF(iemOp_js_Jv)
3783{
3784 IEMOP_MNEMONIC("js Jv");
3785 IEMOP_HLP_MIN_386();
3786 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3787 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3788 {
3789 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3791
3792 IEM_MC_BEGIN(0, 0);
3793 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3794 IEM_MC_REL_JMP_S16(i16Imm);
3795 } IEM_MC_ELSE() {
3796 IEM_MC_ADVANCE_RIP();
3797 } IEM_MC_ENDIF();
3798 IEM_MC_END();
3799 }
3800 else
3801 {
3802 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804
3805 IEM_MC_BEGIN(0, 0);
3806 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3807 IEM_MC_REL_JMP_S32(i32Imm);
3808 } IEM_MC_ELSE() {
3809 IEM_MC_ADVANCE_RIP();
3810 } IEM_MC_ENDIF();
3811 IEM_MC_END();
3812 }
3813 return VINF_SUCCESS;
3814}
3815
3816
3817/** Opcode 0x0f 0x89. */
3818FNIEMOP_DEF(iemOp_jns_Jv)
3819{
3820 IEMOP_MNEMONIC("jns Jv");
3821 IEMOP_HLP_MIN_386();
3822 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3823 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3824 {
3825 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3827
3828 IEM_MC_BEGIN(0, 0);
3829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3830 IEM_MC_ADVANCE_RIP();
3831 } IEM_MC_ELSE() {
3832 IEM_MC_REL_JMP_S16(i16Imm);
3833 } IEM_MC_ENDIF();
3834 IEM_MC_END();
3835 }
3836 else
3837 {
3838 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840
3841 IEM_MC_BEGIN(0, 0);
3842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3843 IEM_MC_ADVANCE_RIP();
3844 } IEM_MC_ELSE() {
3845 IEM_MC_REL_JMP_S32(i32Imm);
3846 } IEM_MC_ENDIF();
3847 IEM_MC_END();
3848 }
3849 return VINF_SUCCESS;
3850}
3851
3852
3853/** Opcode 0x0f 0x8a. */
3854FNIEMOP_DEF(iemOp_jp_Jv)
3855{
3856 IEMOP_MNEMONIC("jp Jv");
3857 IEMOP_HLP_MIN_386();
3858 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3859 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3860 {
3861 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3863
3864 IEM_MC_BEGIN(0, 0);
3865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3866 IEM_MC_REL_JMP_S16(i16Imm);
3867 } IEM_MC_ELSE() {
3868 IEM_MC_ADVANCE_RIP();
3869 } IEM_MC_ENDIF();
3870 IEM_MC_END();
3871 }
3872 else
3873 {
3874 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3876
3877 IEM_MC_BEGIN(0, 0);
3878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3879 IEM_MC_REL_JMP_S32(i32Imm);
3880 } IEM_MC_ELSE() {
3881 IEM_MC_ADVANCE_RIP();
3882 } IEM_MC_ENDIF();
3883 IEM_MC_END();
3884 }
3885 return VINF_SUCCESS;
3886}
3887
3888
3889/** Opcode 0x0f 0x8b. */
3890FNIEMOP_DEF(iemOp_jnp_Jv)
3891{
3892 IEMOP_MNEMONIC("jo Jv");
3893 IEMOP_HLP_MIN_386();
3894 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3895 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3896 {
3897 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3899
3900 IEM_MC_BEGIN(0, 0);
3901 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3902 IEM_MC_ADVANCE_RIP();
3903 } IEM_MC_ELSE() {
3904 IEM_MC_REL_JMP_S16(i16Imm);
3905 } IEM_MC_ENDIF();
3906 IEM_MC_END();
3907 }
3908 else
3909 {
3910 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3912
3913 IEM_MC_BEGIN(0, 0);
3914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3915 IEM_MC_ADVANCE_RIP();
3916 } IEM_MC_ELSE() {
3917 IEM_MC_REL_JMP_S32(i32Imm);
3918 } IEM_MC_ENDIF();
3919 IEM_MC_END();
3920 }
3921 return VINF_SUCCESS;
3922}
3923
3924
3925/** Opcode 0x0f 0x8c. */
3926FNIEMOP_DEF(iemOp_jl_Jv)
3927{
3928 IEMOP_MNEMONIC("jl/jnge Jv");
3929 IEMOP_HLP_MIN_386();
3930 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3931 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3932 {
3933 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3935
3936 IEM_MC_BEGIN(0, 0);
3937 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3938 IEM_MC_REL_JMP_S16(i16Imm);
3939 } IEM_MC_ELSE() {
3940 IEM_MC_ADVANCE_RIP();
3941 } IEM_MC_ENDIF();
3942 IEM_MC_END();
3943 }
3944 else
3945 {
3946 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3948
3949 IEM_MC_BEGIN(0, 0);
3950 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3951 IEM_MC_REL_JMP_S32(i32Imm);
3952 } IEM_MC_ELSE() {
3953 IEM_MC_ADVANCE_RIP();
3954 } IEM_MC_ENDIF();
3955 IEM_MC_END();
3956 }
3957 return VINF_SUCCESS;
3958}
3959
3960
3961/** Opcode 0x0f 0x8d. */
3962FNIEMOP_DEF(iemOp_jnl_Jv)
3963{
3964 IEMOP_MNEMONIC("jnl/jge Jv");
3965 IEMOP_HLP_MIN_386();
3966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3967 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3968 {
3969 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3971
3972 IEM_MC_BEGIN(0, 0);
3973 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3974 IEM_MC_ADVANCE_RIP();
3975 } IEM_MC_ELSE() {
3976 IEM_MC_REL_JMP_S16(i16Imm);
3977 } IEM_MC_ENDIF();
3978 IEM_MC_END();
3979 }
3980 else
3981 {
3982 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3984
3985 IEM_MC_BEGIN(0, 0);
3986 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3987 IEM_MC_ADVANCE_RIP();
3988 } IEM_MC_ELSE() {
3989 IEM_MC_REL_JMP_S32(i32Imm);
3990 } IEM_MC_ENDIF();
3991 IEM_MC_END();
3992 }
3993 return VINF_SUCCESS;
3994}
3995
3996
3997/** Opcode 0x0f 0x8e. */
3998FNIEMOP_DEF(iemOp_jle_Jv)
3999{
4000 IEMOP_MNEMONIC("jle/jng Jv");
4001 IEMOP_HLP_MIN_386();
4002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4003 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4004 {
4005 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4007
4008 IEM_MC_BEGIN(0, 0);
4009 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4010 IEM_MC_REL_JMP_S16(i16Imm);
4011 } IEM_MC_ELSE() {
4012 IEM_MC_ADVANCE_RIP();
4013 } IEM_MC_ENDIF();
4014 IEM_MC_END();
4015 }
4016 else
4017 {
4018 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020
4021 IEM_MC_BEGIN(0, 0);
4022 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4023 IEM_MC_REL_JMP_S32(i32Imm);
4024 } IEM_MC_ELSE() {
4025 IEM_MC_ADVANCE_RIP();
4026 } IEM_MC_ENDIF();
4027 IEM_MC_END();
4028 }
4029 return VINF_SUCCESS;
4030}
4031
4032
4033/** Opcode 0x0f 0x8f. */
4034FNIEMOP_DEF(iemOp_jnle_Jv)
4035{
4036 IEMOP_MNEMONIC("jnle/jg Jv");
4037 IEMOP_HLP_MIN_386();
4038 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4039 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4040 {
4041 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4043
4044 IEM_MC_BEGIN(0, 0);
4045 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4046 IEM_MC_ADVANCE_RIP();
4047 } IEM_MC_ELSE() {
4048 IEM_MC_REL_JMP_S16(i16Imm);
4049 } IEM_MC_ENDIF();
4050 IEM_MC_END();
4051 }
4052 else
4053 {
4054 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4056
4057 IEM_MC_BEGIN(0, 0);
4058 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4059 IEM_MC_ADVANCE_RIP();
4060 } IEM_MC_ELSE() {
4061 IEM_MC_REL_JMP_S32(i32Imm);
4062 } IEM_MC_ENDIF();
4063 IEM_MC_END();
4064 }
4065 return VINF_SUCCESS;
4066}
4067
4068
4069/** Opcode 0x0f 0x90. */
4070FNIEMOP_DEF(iemOp_seto_Eb)
4071{
4072 IEMOP_MNEMONIC("seto Eb");
4073 IEMOP_HLP_MIN_386();
4074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4075
4076 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4077 * any way. AMD says it's "unused", whatever that means. We're
4078 * ignoring for now. */
4079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4080 {
4081 /* register target */
4082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4083 IEM_MC_BEGIN(0, 0);
4084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4085 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4086 } IEM_MC_ELSE() {
4087 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4088 } IEM_MC_ENDIF();
4089 IEM_MC_ADVANCE_RIP();
4090 IEM_MC_END();
4091 }
4092 else
4093 {
4094 /* memory target */
4095 IEM_MC_BEGIN(0, 1);
4096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4100 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4101 } IEM_MC_ELSE() {
4102 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4103 } IEM_MC_ENDIF();
4104 IEM_MC_ADVANCE_RIP();
4105 IEM_MC_END();
4106 }
4107 return VINF_SUCCESS;
4108}
4109
4110
4111/** Opcode 0x0f 0x91. */
4112FNIEMOP_DEF(iemOp_setno_Eb)
4113{
4114 IEMOP_MNEMONIC("setno Eb");
4115 IEMOP_HLP_MIN_386();
4116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4117
4118 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4119 * any way. AMD says it's "unused", whatever that means. We're
4120 * ignoring for now. */
4121 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4122 {
4123 /* register target */
4124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4125 IEM_MC_BEGIN(0, 0);
4126 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4127 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4128 } IEM_MC_ELSE() {
4129 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4130 } IEM_MC_ENDIF();
4131 IEM_MC_ADVANCE_RIP();
4132 IEM_MC_END();
4133 }
4134 else
4135 {
4136 /* memory target */
4137 IEM_MC_BEGIN(0, 1);
4138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4141 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4142 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4143 } IEM_MC_ELSE() {
4144 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4145 } IEM_MC_ENDIF();
4146 IEM_MC_ADVANCE_RIP();
4147 IEM_MC_END();
4148 }
4149 return VINF_SUCCESS;
4150}
4151
4152
4153/** Opcode 0x0f 0x92. */
4154FNIEMOP_DEF(iemOp_setc_Eb)
4155{
4156 IEMOP_MNEMONIC("setc Eb");
4157 IEMOP_HLP_MIN_386();
4158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4159
4160 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4161 * any way. AMD says it's "unused", whatever that means. We're
4162 * ignoring for now. */
4163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4164 {
4165 /* register target */
4166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4167 IEM_MC_BEGIN(0, 0);
4168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4169 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4170 } IEM_MC_ELSE() {
4171 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4172 } IEM_MC_ENDIF();
4173 IEM_MC_ADVANCE_RIP();
4174 IEM_MC_END();
4175 }
4176 else
4177 {
4178 /* memory target */
4179 IEM_MC_BEGIN(0, 1);
4180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4183 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4184 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4185 } IEM_MC_ELSE() {
4186 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4187 } IEM_MC_ENDIF();
4188 IEM_MC_ADVANCE_RIP();
4189 IEM_MC_END();
4190 }
4191 return VINF_SUCCESS;
4192}
4193
4194
4195/** Opcode 0x0f 0x93. */
4196FNIEMOP_DEF(iemOp_setnc_Eb)
4197{
4198 IEMOP_MNEMONIC("setnc Eb");
4199 IEMOP_HLP_MIN_386();
4200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4201
4202 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4203 * any way. AMD says it's "unused", whatever that means. We're
4204 * ignoring for now. */
4205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4206 {
4207 /* register target */
4208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4209 IEM_MC_BEGIN(0, 0);
4210 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4211 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4212 } IEM_MC_ELSE() {
4213 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4214 } IEM_MC_ENDIF();
4215 IEM_MC_ADVANCE_RIP();
4216 IEM_MC_END();
4217 }
4218 else
4219 {
4220 /* memory target */
4221 IEM_MC_BEGIN(0, 1);
4222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4225 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4226 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4227 } IEM_MC_ELSE() {
4228 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4229 } IEM_MC_ENDIF();
4230 IEM_MC_ADVANCE_RIP();
4231 IEM_MC_END();
4232 }
4233 return VINF_SUCCESS;
4234}
4235
4236
4237/** Opcode 0x0f 0x94. */
4238FNIEMOP_DEF(iemOp_sete_Eb)
4239{
4240 IEMOP_MNEMONIC("sete Eb");
4241 IEMOP_HLP_MIN_386();
4242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4243
4244 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4245 * any way. AMD says it's "unused", whatever that means. We're
4246 * ignoring for now. */
4247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4248 {
4249 /* register target */
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4251 IEM_MC_BEGIN(0, 0);
4252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4253 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4254 } IEM_MC_ELSE() {
4255 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4256 } IEM_MC_ENDIF();
4257 IEM_MC_ADVANCE_RIP();
4258 IEM_MC_END();
4259 }
4260 else
4261 {
4262 /* memory target */
4263 IEM_MC_BEGIN(0, 1);
4264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4268 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4269 } IEM_MC_ELSE() {
4270 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4271 } IEM_MC_ENDIF();
4272 IEM_MC_ADVANCE_RIP();
4273 IEM_MC_END();
4274 }
4275 return VINF_SUCCESS;
4276}
4277
4278
4279/** Opcode 0x0f 0x95. */
4280FNIEMOP_DEF(iemOp_setne_Eb)
4281{
4282 IEMOP_MNEMONIC("setne Eb");
4283 IEMOP_HLP_MIN_386();
4284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4285
4286 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4287 * any way. AMD says it's "unused", whatever that means. We're
4288 * ignoring for now. */
4289 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4290 {
4291 /* register target */
4292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4293 IEM_MC_BEGIN(0, 0);
4294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4295 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4296 } IEM_MC_ELSE() {
4297 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4298 } IEM_MC_ENDIF();
4299 IEM_MC_ADVANCE_RIP();
4300 IEM_MC_END();
4301 }
4302 else
4303 {
4304 /* memory target */
4305 IEM_MC_BEGIN(0, 1);
4306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4310 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4311 } IEM_MC_ELSE() {
4312 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4313 } IEM_MC_ENDIF();
4314 IEM_MC_ADVANCE_RIP();
4315 IEM_MC_END();
4316 }
4317 return VINF_SUCCESS;
4318}
4319
4320
4321/** Opcode 0x0f 0x96. */
4322FNIEMOP_DEF(iemOp_setbe_Eb)
4323{
4324 IEMOP_MNEMONIC("setbe Eb");
4325 IEMOP_HLP_MIN_386();
4326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4327
4328 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4329 * any way. AMD says it's "unused", whatever that means. We're
4330 * ignoring for now. */
4331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4332 {
4333 /* register target */
4334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4335 IEM_MC_BEGIN(0, 0);
4336 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4337 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4338 } IEM_MC_ELSE() {
4339 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4340 } IEM_MC_ENDIF();
4341 IEM_MC_ADVANCE_RIP();
4342 IEM_MC_END();
4343 }
4344 else
4345 {
4346 /* memory target */
4347 IEM_MC_BEGIN(0, 1);
4348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4351 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4353 } IEM_MC_ELSE() {
4354 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4355 } IEM_MC_ENDIF();
4356 IEM_MC_ADVANCE_RIP();
4357 IEM_MC_END();
4358 }
4359 return VINF_SUCCESS;
4360}
4361
4362
4363/** Opcode 0x0f 0x97. */
4364FNIEMOP_DEF(iemOp_setnbe_Eb)
4365{
4366 IEMOP_MNEMONIC("setnbe Eb");
4367 IEMOP_HLP_MIN_386();
4368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4369
4370 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4371 * any way. AMD says it's "unused", whatever that means. We're
4372 * ignoring for now. */
4373 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4374 {
4375 /* register target */
4376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4377 IEM_MC_BEGIN(0, 0);
4378 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4379 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4380 } IEM_MC_ELSE() {
4381 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4382 } IEM_MC_ENDIF();
4383 IEM_MC_ADVANCE_RIP();
4384 IEM_MC_END();
4385 }
4386 else
4387 {
4388 /* memory target */
4389 IEM_MC_BEGIN(0, 1);
4390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4393 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4394 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4395 } IEM_MC_ELSE() {
4396 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4397 } IEM_MC_ENDIF();
4398 IEM_MC_ADVANCE_RIP();
4399 IEM_MC_END();
4400 }
4401 return VINF_SUCCESS;
4402}
4403
4404
4405/** Opcode 0x0f 0x98. */
4406FNIEMOP_DEF(iemOp_sets_Eb)
4407{
4408 IEMOP_MNEMONIC("sets Eb");
4409 IEMOP_HLP_MIN_386();
4410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4411
4412 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4413 * any way. AMD says it's "unused", whatever that means. We're
4414 * ignoring for now. */
4415 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4416 {
4417 /* register target */
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4419 IEM_MC_BEGIN(0, 0);
4420 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4421 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4422 } IEM_MC_ELSE() {
4423 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4424 } IEM_MC_ENDIF();
4425 IEM_MC_ADVANCE_RIP();
4426 IEM_MC_END();
4427 }
4428 else
4429 {
4430 /* memory target */
4431 IEM_MC_BEGIN(0, 1);
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4436 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4437 } IEM_MC_ELSE() {
4438 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4439 } IEM_MC_ENDIF();
4440 IEM_MC_ADVANCE_RIP();
4441 IEM_MC_END();
4442 }
4443 return VINF_SUCCESS;
4444}
4445
4446
4447/** Opcode 0x0f 0x99. */
4448FNIEMOP_DEF(iemOp_setns_Eb)
4449{
4450 IEMOP_MNEMONIC("setns Eb");
4451 IEMOP_HLP_MIN_386();
4452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4453
4454 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4455 * any way. AMD says it's "unused", whatever that means. We're
4456 * ignoring for now. */
4457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4458 {
4459 /* register target */
4460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4461 IEM_MC_BEGIN(0, 0);
4462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4463 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4464 } IEM_MC_ELSE() {
4465 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4466 } IEM_MC_ENDIF();
4467 IEM_MC_ADVANCE_RIP();
4468 IEM_MC_END();
4469 }
4470 else
4471 {
4472 /* memory target */
4473 IEM_MC_BEGIN(0, 1);
4474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4477 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4478 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4479 } IEM_MC_ELSE() {
4480 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4481 } IEM_MC_ENDIF();
4482 IEM_MC_ADVANCE_RIP();
4483 IEM_MC_END();
4484 }
4485 return VINF_SUCCESS;
4486}
4487
4488
4489/** Opcode 0x0f 0x9a. */
4490FNIEMOP_DEF(iemOp_setp_Eb)
4491{
4492 IEMOP_MNEMONIC("setnp Eb");
4493 IEMOP_HLP_MIN_386();
4494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4495
4496 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4497 * any way. AMD says it's "unused", whatever that means. We're
4498 * ignoring for now. */
4499 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4500 {
4501 /* register target */
4502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4503 IEM_MC_BEGIN(0, 0);
4504 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4505 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4506 } IEM_MC_ELSE() {
4507 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4508 } IEM_MC_ENDIF();
4509 IEM_MC_ADVANCE_RIP();
4510 IEM_MC_END();
4511 }
4512 else
4513 {
4514 /* memory target */
4515 IEM_MC_BEGIN(0, 1);
4516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4520 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4521 } IEM_MC_ELSE() {
4522 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4523 } IEM_MC_ENDIF();
4524 IEM_MC_ADVANCE_RIP();
4525 IEM_MC_END();
4526 }
4527 return VINF_SUCCESS;
4528}
4529
4530
4531/** Opcode 0x0f 0x9b. */
4532FNIEMOP_DEF(iemOp_setnp_Eb)
4533{
4534 IEMOP_MNEMONIC("setnp Eb");
4535 IEMOP_HLP_MIN_386();
4536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4537
4538 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4539 * any way. AMD says it's "unused", whatever that means. We're
4540 * ignoring for now. */
4541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4542 {
4543 /* register target */
4544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4545 IEM_MC_BEGIN(0, 0);
4546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4547 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4548 } IEM_MC_ELSE() {
4549 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4550 } IEM_MC_ENDIF();
4551 IEM_MC_ADVANCE_RIP();
4552 IEM_MC_END();
4553 }
4554 else
4555 {
4556 /* memory target */
4557 IEM_MC_BEGIN(0, 1);
4558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4562 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4563 } IEM_MC_ELSE() {
4564 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4565 } IEM_MC_ENDIF();
4566 IEM_MC_ADVANCE_RIP();
4567 IEM_MC_END();
4568 }
4569 return VINF_SUCCESS;
4570}
4571
4572
4573/** Opcode 0x0f 0x9c. */
4574FNIEMOP_DEF(iemOp_setl_Eb)
4575{
4576 IEMOP_MNEMONIC("setl Eb");
4577 IEMOP_HLP_MIN_386();
4578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4579
4580 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4581 * any way. AMD says it's "unused", whatever that means. We're
4582 * ignoring for now. */
4583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4584 {
4585 /* register target */
4586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4587 IEM_MC_BEGIN(0, 0);
4588 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4589 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4590 } IEM_MC_ELSE() {
4591 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4592 } IEM_MC_ENDIF();
4593 IEM_MC_ADVANCE_RIP();
4594 IEM_MC_END();
4595 }
4596 else
4597 {
4598 /* memory target */
4599 IEM_MC_BEGIN(0, 1);
4600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4603 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4604 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4605 } IEM_MC_ELSE() {
4606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4607 } IEM_MC_ENDIF();
4608 IEM_MC_ADVANCE_RIP();
4609 IEM_MC_END();
4610 }
4611 return VINF_SUCCESS;
4612}
4613
4614
4615/** Opcode 0x0f 0x9d. */
4616FNIEMOP_DEF(iemOp_setnl_Eb)
4617{
4618 IEMOP_MNEMONIC("setnl Eb");
4619 IEMOP_HLP_MIN_386();
4620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4621
4622 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4623 * any way. AMD says it's "unused", whatever that means. We're
4624 * ignoring for now. */
4625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4626 {
4627 /* register target */
4628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4629 IEM_MC_BEGIN(0, 0);
4630 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4631 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4632 } IEM_MC_ELSE() {
4633 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4634 } IEM_MC_ENDIF();
4635 IEM_MC_ADVANCE_RIP();
4636 IEM_MC_END();
4637 }
4638 else
4639 {
4640 /* memory target */
4641 IEM_MC_BEGIN(0, 1);
4642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4645 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4646 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4647 } IEM_MC_ELSE() {
4648 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4649 } IEM_MC_ENDIF();
4650 IEM_MC_ADVANCE_RIP();
4651 IEM_MC_END();
4652 }
4653 return VINF_SUCCESS;
4654}
4655
4656
4657/** Opcode 0x0f 0x9e. */
4658FNIEMOP_DEF(iemOp_setle_Eb)
4659{
4660 IEMOP_MNEMONIC("setle Eb");
4661 IEMOP_HLP_MIN_386();
4662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4663
4664 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4665 * any way. AMD says it's "unused", whatever that means. We're
4666 * ignoring for now. */
4667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4668 {
4669 /* register target */
4670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4671 IEM_MC_BEGIN(0, 0);
4672 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4673 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4674 } IEM_MC_ELSE() {
4675 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4676 } IEM_MC_ENDIF();
4677 IEM_MC_ADVANCE_RIP();
4678 IEM_MC_END();
4679 }
4680 else
4681 {
4682 /* memory target */
4683 IEM_MC_BEGIN(0, 1);
4684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4687 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4688 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4689 } IEM_MC_ELSE() {
4690 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4691 } IEM_MC_ENDIF();
4692 IEM_MC_ADVANCE_RIP();
4693 IEM_MC_END();
4694 }
4695 return VINF_SUCCESS;
4696}
4697
4698
4699/** Opcode 0x0f 0x9f. */
4700FNIEMOP_DEF(iemOp_setnle_Eb)
4701{
4702 IEMOP_MNEMONIC("setnle Eb");
4703 IEMOP_HLP_MIN_386();
4704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4705
4706 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4707 * any way. AMD says it's "unused", whatever that means. We're
4708 * ignoring for now. */
4709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4710 {
4711 /* register target */
4712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4713 IEM_MC_BEGIN(0, 0);
4714 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4715 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4716 } IEM_MC_ELSE() {
4717 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4718 } IEM_MC_ENDIF();
4719 IEM_MC_ADVANCE_RIP();
4720 IEM_MC_END();
4721 }
4722 else
4723 {
4724 /* memory target */
4725 IEM_MC_BEGIN(0, 1);
4726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4729 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4730 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4731 } IEM_MC_ELSE() {
4732 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4733 } IEM_MC_ENDIF();
4734 IEM_MC_ADVANCE_RIP();
4735 IEM_MC_END();
4736 }
4737 return VINF_SUCCESS;
4738}
4739
4740
4741/**
4742 * Common 'push segment-register' helper.
4743 */
4744FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4745{
4746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4747 if (iReg < X86_SREG_FS)
4748 IEMOP_HLP_NO_64BIT();
4749 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4750
4751 switch (pVCpu->iem.s.enmEffOpSize)
4752 {
4753 case IEMMODE_16BIT:
4754 IEM_MC_BEGIN(0, 1);
4755 IEM_MC_LOCAL(uint16_t, u16Value);
4756 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4757 IEM_MC_PUSH_U16(u16Value);
4758 IEM_MC_ADVANCE_RIP();
4759 IEM_MC_END();
4760 break;
4761
4762 case IEMMODE_32BIT:
4763 IEM_MC_BEGIN(0, 1);
4764 IEM_MC_LOCAL(uint32_t, u32Value);
4765 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4766 IEM_MC_PUSH_U32_SREG(u32Value);
4767 IEM_MC_ADVANCE_RIP();
4768 IEM_MC_END();
4769 break;
4770
4771 case IEMMODE_64BIT:
4772 IEM_MC_BEGIN(0, 1);
4773 IEM_MC_LOCAL(uint64_t, u64Value);
4774 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4775 IEM_MC_PUSH_U64(u64Value);
4776 IEM_MC_ADVANCE_RIP();
4777 IEM_MC_END();
4778 break;
4779 }
4780
4781 return VINF_SUCCESS;
4782}
4783
4784
4785/** Opcode 0x0f 0xa0. */
4786FNIEMOP_DEF(iemOp_push_fs)
4787{
4788 IEMOP_MNEMONIC("push fs");
4789 IEMOP_HLP_MIN_386();
4790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4791 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4792}
4793
4794
4795/** Opcode 0x0f 0xa1. */
4796FNIEMOP_DEF(iemOp_pop_fs)
4797{
4798 IEMOP_MNEMONIC("pop fs");
4799 IEMOP_HLP_MIN_386();
4800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4801 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4802}
4803
4804
4805/** Opcode 0x0f 0xa2. */
4806FNIEMOP_DEF(iemOp_cpuid)
4807{
4808 IEMOP_MNEMONIC("cpuid");
4809 IEMOP_HLP_MIN_486(); /* not all 486es. */
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4812}
4813
4814
4815/**
4816 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4817 * iemOp_bts_Ev_Gv.
4818 */
4819FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4820{
4821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4822 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4823
4824 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4825 {
4826 /* register destination. */
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828 switch (pVCpu->iem.s.enmEffOpSize)
4829 {
4830 case IEMMODE_16BIT:
4831 IEM_MC_BEGIN(3, 0);
4832 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4833 IEM_MC_ARG(uint16_t, u16Src, 1);
4834 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4835
4836 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4837 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4838 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4839 IEM_MC_REF_EFLAGS(pEFlags);
4840 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4841
4842 IEM_MC_ADVANCE_RIP();
4843 IEM_MC_END();
4844 return VINF_SUCCESS;
4845
4846 case IEMMODE_32BIT:
4847 IEM_MC_BEGIN(3, 0);
4848 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4849 IEM_MC_ARG(uint32_t, u32Src, 1);
4850 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4851
4852 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4853 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4854 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4855 IEM_MC_REF_EFLAGS(pEFlags);
4856 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4857
4858 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4859 IEM_MC_ADVANCE_RIP();
4860 IEM_MC_END();
4861 return VINF_SUCCESS;
4862
4863 case IEMMODE_64BIT:
4864 IEM_MC_BEGIN(3, 0);
4865 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4866 IEM_MC_ARG(uint64_t, u64Src, 1);
4867 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4868
4869 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4870 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4871 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4872 IEM_MC_REF_EFLAGS(pEFlags);
4873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4874
4875 IEM_MC_ADVANCE_RIP();
4876 IEM_MC_END();
4877 return VINF_SUCCESS;
4878
4879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4880 }
4881 }
4882 else
4883 {
4884 /* memory destination. */
4885
4886 uint32_t fAccess;
4887 if (pImpl->pfnLockedU16)
4888 fAccess = IEM_ACCESS_DATA_RW;
4889 else /* BT */
4890 fAccess = IEM_ACCESS_DATA_R;
4891
4892 NOREF(fAccess);
4893
4894 /** @todo test negative bit offsets! */
4895 switch (pVCpu->iem.s.enmEffOpSize)
4896 {
4897 case IEMMODE_16BIT:
4898 IEM_MC_BEGIN(3, 2);
4899 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4900 IEM_MC_ARG(uint16_t, u16Src, 1);
4901 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4903 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4904
4905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4906 if (pImpl->pfnLockedU16)
4907 IEMOP_HLP_DONE_DECODING();
4908 else
4909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4910 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4911 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4912 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4913 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4914 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4915 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4916 IEM_MC_FETCH_EFLAGS(EFlags);
4917
4918 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4919 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4920 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4921 else
4922 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4923 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4924
4925 IEM_MC_COMMIT_EFLAGS(EFlags);
4926 IEM_MC_ADVANCE_RIP();
4927 IEM_MC_END();
4928 return VINF_SUCCESS;
4929
4930 case IEMMODE_32BIT:
4931 IEM_MC_BEGIN(3, 2);
4932 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4933 IEM_MC_ARG(uint32_t, u32Src, 1);
4934 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4936 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4937
4938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4939 if (pImpl->pfnLockedU16)
4940 IEMOP_HLP_DONE_DECODING();
4941 else
4942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4943 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4944 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4945 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4946 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4947 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4948 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4949 IEM_MC_FETCH_EFLAGS(EFlags);
4950
4951 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4952 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4953 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4954 else
4955 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4956 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4957
4958 IEM_MC_COMMIT_EFLAGS(EFlags);
4959 IEM_MC_ADVANCE_RIP();
4960 IEM_MC_END();
4961 return VINF_SUCCESS;
4962
4963 case IEMMODE_64BIT:
4964 IEM_MC_BEGIN(3, 2);
4965 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4966 IEM_MC_ARG(uint64_t, u64Src, 1);
4967 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4969 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4970
4971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4972 if (pImpl->pfnLockedU16)
4973 IEMOP_HLP_DONE_DECODING();
4974 else
4975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4976 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4977 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4978 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4979 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4980 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4981 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4982 IEM_MC_FETCH_EFLAGS(EFlags);
4983
4984 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4985 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4986 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4987 else
4988 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4989 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4990
4991 IEM_MC_COMMIT_EFLAGS(EFlags);
4992 IEM_MC_ADVANCE_RIP();
4993 IEM_MC_END();
4994 return VINF_SUCCESS;
4995
4996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4997 }
4998 }
4999}
5000
5001
5002/** Opcode 0x0f 0xa3. */
5003FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5004{
5005 IEMOP_MNEMONIC("bt Gv,Gv");
5006 IEMOP_HLP_MIN_386();
5007 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5008}
5009
5010
5011/**
5012 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5013 */
5014FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5015{
5016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5017 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5018
5019 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5020 {
5021 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5023
5024 switch (pVCpu->iem.s.enmEffOpSize)
5025 {
5026 case IEMMODE_16BIT:
5027 IEM_MC_BEGIN(4, 0);
5028 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5029 IEM_MC_ARG(uint16_t, u16Src, 1);
5030 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5031 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5032
5033 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5034 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5035 IEM_MC_REF_EFLAGS(pEFlags);
5036 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5037
5038 IEM_MC_ADVANCE_RIP();
5039 IEM_MC_END();
5040 return VINF_SUCCESS;
5041
5042 case IEMMODE_32BIT:
5043 IEM_MC_BEGIN(4, 0);
5044 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5045 IEM_MC_ARG(uint32_t, u32Src, 1);
5046 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5047 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5048
5049 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5050 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5051 IEM_MC_REF_EFLAGS(pEFlags);
5052 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5053
5054 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5055 IEM_MC_ADVANCE_RIP();
5056 IEM_MC_END();
5057 return VINF_SUCCESS;
5058
5059 case IEMMODE_64BIT:
5060 IEM_MC_BEGIN(4, 0);
5061 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5062 IEM_MC_ARG(uint64_t, u64Src, 1);
5063 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5064 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5065
5066 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5067 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5068 IEM_MC_REF_EFLAGS(pEFlags);
5069 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5070
5071 IEM_MC_ADVANCE_RIP();
5072 IEM_MC_END();
5073 return VINF_SUCCESS;
5074
5075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5076 }
5077 }
5078 else
5079 {
5080 switch (pVCpu->iem.s.enmEffOpSize)
5081 {
5082 case IEMMODE_16BIT:
5083 IEM_MC_BEGIN(4, 2);
5084 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5085 IEM_MC_ARG(uint16_t, u16Src, 1);
5086 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5087 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5089
5090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5091 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5092 IEM_MC_ASSIGN(cShiftArg, cShift);
5093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5094 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5095 IEM_MC_FETCH_EFLAGS(EFlags);
5096 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5097 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5098
5099 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5100 IEM_MC_COMMIT_EFLAGS(EFlags);
5101 IEM_MC_ADVANCE_RIP();
5102 IEM_MC_END();
5103 return VINF_SUCCESS;
5104
5105 case IEMMODE_32BIT:
5106 IEM_MC_BEGIN(4, 2);
5107 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5108 IEM_MC_ARG(uint32_t, u32Src, 1);
5109 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5110 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5112
5113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5114 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5115 IEM_MC_ASSIGN(cShiftArg, cShift);
5116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5117 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5118 IEM_MC_FETCH_EFLAGS(EFlags);
5119 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5120 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5121
5122 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5123 IEM_MC_COMMIT_EFLAGS(EFlags);
5124 IEM_MC_ADVANCE_RIP();
5125 IEM_MC_END();
5126 return VINF_SUCCESS;
5127
5128 case IEMMODE_64BIT:
5129 IEM_MC_BEGIN(4, 2);
5130 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5131 IEM_MC_ARG(uint64_t, u64Src, 1);
5132 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5133 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5135
5136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5137 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5138 IEM_MC_ASSIGN(cShiftArg, cShift);
5139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5140 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5141 IEM_MC_FETCH_EFLAGS(EFlags);
5142 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5143 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5144
5145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5146 IEM_MC_COMMIT_EFLAGS(EFlags);
5147 IEM_MC_ADVANCE_RIP();
5148 IEM_MC_END();
5149 return VINF_SUCCESS;
5150
5151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5152 }
5153 }
5154}
5155
5156
5157/**
5158 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5159 */
5160FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5161{
5162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5163 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5164
5165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5166 {
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168
5169 switch (pVCpu->iem.s.enmEffOpSize)
5170 {
5171 case IEMMODE_16BIT:
5172 IEM_MC_BEGIN(4, 0);
5173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5174 IEM_MC_ARG(uint16_t, u16Src, 1);
5175 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5176 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5177
5178 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5179 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5180 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5181 IEM_MC_REF_EFLAGS(pEFlags);
5182 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5183
5184 IEM_MC_ADVANCE_RIP();
5185 IEM_MC_END();
5186 return VINF_SUCCESS;
5187
5188 case IEMMODE_32BIT:
5189 IEM_MC_BEGIN(4, 0);
5190 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5191 IEM_MC_ARG(uint32_t, u32Src, 1);
5192 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5193 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5194
5195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5196 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5197 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5198 IEM_MC_REF_EFLAGS(pEFlags);
5199 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5200
5201 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5202 IEM_MC_ADVANCE_RIP();
5203 IEM_MC_END();
5204 return VINF_SUCCESS;
5205
5206 case IEMMODE_64BIT:
5207 IEM_MC_BEGIN(4, 0);
5208 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5209 IEM_MC_ARG(uint64_t, u64Src, 1);
5210 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5211 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5212
5213 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5214 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5215 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5216 IEM_MC_REF_EFLAGS(pEFlags);
5217 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5218
5219 IEM_MC_ADVANCE_RIP();
5220 IEM_MC_END();
5221 return VINF_SUCCESS;
5222
5223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5224 }
5225 }
5226 else
5227 {
5228 switch (pVCpu->iem.s.enmEffOpSize)
5229 {
5230 case IEMMODE_16BIT:
5231 IEM_MC_BEGIN(4, 2);
5232 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5233 IEM_MC_ARG(uint16_t, u16Src, 1);
5234 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5235 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5237
5238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5240 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5241 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5242 IEM_MC_FETCH_EFLAGS(EFlags);
5243 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5244 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5245
5246 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5247 IEM_MC_COMMIT_EFLAGS(EFlags);
5248 IEM_MC_ADVANCE_RIP();
5249 IEM_MC_END();
5250 return VINF_SUCCESS;
5251
5252 case IEMMODE_32BIT:
5253 IEM_MC_BEGIN(4, 2);
5254 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5255 IEM_MC_ARG(uint32_t, u32Src, 1);
5256 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5257 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5259
5260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5262 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5263 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5264 IEM_MC_FETCH_EFLAGS(EFlags);
5265 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5266 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5267
5268 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5269 IEM_MC_COMMIT_EFLAGS(EFlags);
5270 IEM_MC_ADVANCE_RIP();
5271 IEM_MC_END();
5272 return VINF_SUCCESS;
5273
5274 case IEMMODE_64BIT:
5275 IEM_MC_BEGIN(4, 2);
5276 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5277 IEM_MC_ARG(uint64_t, u64Src, 1);
5278 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5279 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5281
5282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5284 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5285 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5286 IEM_MC_FETCH_EFLAGS(EFlags);
5287 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5288 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5289
5290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5291 IEM_MC_COMMIT_EFLAGS(EFlags);
5292 IEM_MC_ADVANCE_RIP();
5293 IEM_MC_END();
5294 return VINF_SUCCESS;
5295
5296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5297 }
5298 }
5299}
5300
5301
5302
5303/** Opcode 0x0f 0xa4. */
5304FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5305{
5306 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5307 IEMOP_HLP_MIN_386();
5308 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5309}
5310
5311
5312/** Opcode 0x0f 0xa5. */
5313FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5314{
5315 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5316 IEMOP_HLP_MIN_386();
5317 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5318}
5319
5320
5321/** Opcode 0x0f 0xa8. */
5322FNIEMOP_DEF(iemOp_push_gs)
5323{
5324 IEMOP_MNEMONIC("push gs");
5325 IEMOP_HLP_MIN_386();
5326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5327 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5328}
5329
5330
5331/** Opcode 0x0f 0xa9. */
5332FNIEMOP_DEF(iemOp_pop_gs)
5333{
5334 IEMOP_MNEMONIC("pop gs");
5335 IEMOP_HLP_MIN_386();
5336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5337 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5338}
5339
5340
5341/** Opcode 0x0f 0xaa. */
5342FNIEMOP_STUB(iemOp_rsm);
5343//IEMOP_HLP_MIN_386();
5344
5345
5346/** Opcode 0x0f 0xab. */
5347FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5348{
5349 IEMOP_MNEMONIC("bts Ev,Gv");
5350 IEMOP_HLP_MIN_386();
5351 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5352}
5353
5354
5355/** Opcode 0x0f 0xac. */
5356FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5357{
5358 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5359 IEMOP_HLP_MIN_386();
5360 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5361}
5362
5363
5364/** Opcode 0x0f 0xad. */
5365FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5366{
5367 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5368 IEMOP_HLP_MIN_386();
5369 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5370}
5371
5372
5373/** Opcode 0x0f 0xae mem/0. */
5374FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5375{
5376 IEMOP_MNEMONIC("fxsave m512");
5377 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5378 return IEMOP_RAISE_INVALID_OPCODE();
5379
5380 IEM_MC_BEGIN(3, 1);
5381 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5382 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5383 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5386 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5387 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5388 IEM_MC_END();
5389 return VINF_SUCCESS;
5390}
5391
5392
5393/** Opcode 0x0f 0xae mem/1. */
5394FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5395{
5396 IEMOP_MNEMONIC("fxrstor m512");
5397 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5398 return IEMOP_RAISE_INVALID_OPCODE();
5399
5400 IEM_MC_BEGIN(3, 1);
5401 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5402 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5403 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5406 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5407 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5408 IEM_MC_END();
5409 return VINF_SUCCESS;
5410}
5411
5412
5413/** Opcode 0x0f 0xae mem/2. */
5414FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5415
5416/** Opcode 0x0f 0xae mem/3. */
5417FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5418
5419/** Opcode 0x0f 0xae mem/4. */
5420FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5421
5422/** Opcode 0x0f 0xae mem/5. */
5423FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5424
5425/** Opcode 0x0f 0xae mem/6. */
5426FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5427
5428/** Opcode 0x0f 0xae mem/7. */
5429FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5430
5431
5432/** Opcode 0x0f 0xae 11b/5. */
5433FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5434{
5435 IEMOP_MNEMONIC("lfence");
5436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5437 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5438 return IEMOP_RAISE_INVALID_OPCODE();
5439
5440 IEM_MC_BEGIN(0, 0);
5441 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5442 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5443 else
5444 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5445 IEM_MC_ADVANCE_RIP();
5446 IEM_MC_END();
5447 return VINF_SUCCESS;
5448}
5449
5450
5451/** Opcode 0x0f 0xae 11b/6. */
5452FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5453{
5454 IEMOP_MNEMONIC("mfence");
5455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5456 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5457 return IEMOP_RAISE_INVALID_OPCODE();
5458
5459 IEM_MC_BEGIN(0, 0);
5460 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5461 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5462 else
5463 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5464 IEM_MC_ADVANCE_RIP();
5465 IEM_MC_END();
5466 return VINF_SUCCESS;
5467}
5468
5469
5470/** Opcode 0x0f 0xae 11b/7. */
5471FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5472{
5473 IEMOP_MNEMONIC("sfence");
5474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5475 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5476 return IEMOP_RAISE_INVALID_OPCODE();
5477
5478 IEM_MC_BEGIN(0, 0);
5479 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5480 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5481 else
5482 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5483 IEM_MC_ADVANCE_RIP();
5484 IEM_MC_END();
5485 return VINF_SUCCESS;
5486}
5487
5488
5489/** Opcode 0xf3 0x0f 0xae 11b/0. */
5490FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5491
5492/** Opcode 0xf3 0x0f 0xae 11b/1. */
5493FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5494
5495/** Opcode 0xf3 0x0f 0xae 11b/2. */
5496FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5497
5498/** Opcode 0xf3 0x0f 0xae 11b/3. */
5499FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5500
5501
5502/** Opcode 0x0f 0xae. */
5503FNIEMOP_DEF(iemOp_Grp15)
5504{
5505 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5507 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5508 {
5509 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5510 {
5511 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5512 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5513 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5514 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5515 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5516 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5517 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5518 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5520 }
5521 }
5522 else
5523 {
5524 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5525 {
5526 case 0:
5527 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5528 {
5529 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5530 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5531 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5532 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5533 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5534 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5535 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5536 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5538 }
5539 break;
5540
5541 case IEM_OP_PRF_REPZ:
5542 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5543 {
5544 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5545 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5546 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5547 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5548 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5549 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5550 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5551 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5553 }
5554 break;
5555
5556 default:
5557 return IEMOP_RAISE_INVALID_OPCODE();
5558 }
5559 }
5560}
5561
5562
5563/** Opcode 0x0f 0xaf. */
5564FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5565{
5566 IEMOP_MNEMONIC("imul Gv,Ev");
5567 IEMOP_HLP_MIN_386();
5568 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5569 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5570}
5571
5572
5573/** Opcode 0x0f 0xb0. */
5574FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5575{
5576 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5577 IEMOP_HLP_MIN_486();
5578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5579
5580 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5581 {
5582 IEMOP_HLP_DONE_DECODING();
5583 IEM_MC_BEGIN(4, 0);
5584 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5585 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5586 IEM_MC_ARG(uint8_t, u8Src, 2);
5587 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5588
5589 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5590 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5591 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5592 IEM_MC_REF_EFLAGS(pEFlags);
5593 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5594 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5595 else
5596 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5597
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 }
5601 else
5602 {
5603 IEM_MC_BEGIN(4, 3);
5604 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5605 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5606 IEM_MC_ARG(uint8_t, u8Src, 2);
5607 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5609 IEM_MC_LOCAL(uint8_t, u8Al);
5610
5611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5612 IEMOP_HLP_DONE_DECODING();
5613 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5614 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5615 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5616 IEM_MC_FETCH_EFLAGS(EFlags);
5617 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5618 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5619 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5620 else
5621 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5622
5623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5624 IEM_MC_COMMIT_EFLAGS(EFlags);
5625 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5626 IEM_MC_ADVANCE_RIP();
5627 IEM_MC_END();
5628 }
5629 return VINF_SUCCESS;
5630}
5631
5632/** Opcode 0x0f 0xb1. */
5633FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5634{
5635 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5636 IEMOP_HLP_MIN_486();
5637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5638
5639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5640 {
5641 IEMOP_HLP_DONE_DECODING();
5642 switch (pVCpu->iem.s.enmEffOpSize)
5643 {
5644 case IEMMODE_16BIT:
5645 IEM_MC_BEGIN(4, 0);
5646 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5647 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5648 IEM_MC_ARG(uint16_t, u16Src, 2);
5649 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5650
5651 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5652 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5653 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5654 IEM_MC_REF_EFLAGS(pEFlags);
5655 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5656 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5657 else
5658 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5659
5660 IEM_MC_ADVANCE_RIP();
5661 IEM_MC_END();
5662 return VINF_SUCCESS;
5663
5664 case IEMMODE_32BIT:
5665 IEM_MC_BEGIN(4, 0);
5666 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5667 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5668 IEM_MC_ARG(uint32_t, u32Src, 2);
5669 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5670
5671 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5672 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5673 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5674 IEM_MC_REF_EFLAGS(pEFlags);
5675 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5676 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5677 else
5678 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5679
5680 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5681 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 return VINF_SUCCESS;
5685
5686 case IEMMODE_64BIT:
5687 IEM_MC_BEGIN(4, 0);
5688 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5689 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5690#ifdef RT_ARCH_X86
5691 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5692#else
5693 IEM_MC_ARG(uint64_t, u64Src, 2);
5694#endif
5695 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5696
5697 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5698 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5699 IEM_MC_REF_EFLAGS(pEFlags);
5700#ifdef RT_ARCH_X86
5701 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5702 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5703 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5704 else
5705 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5706#else
5707 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5708 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5709 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5710 else
5711 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5712#endif
5713
5714 IEM_MC_ADVANCE_RIP();
5715 IEM_MC_END();
5716 return VINF_SUCCESS;
5717
5718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5719 }
5720 }
5721 else
5722 {
5723 switch (pVCpu->iem.s.enmEffOpSize)
5724 {
5725 case IEMMODE_16BIT:
5726 IEM_MC_BEGIN(4, 3);
5727 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5728 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5729 IEM_MC_ARG(uint16_t, u16Src, 2);
5730 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5732 IEM_MC_LOCAL(uint16_t, u16Ax);
5733
5734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5735 IEMOP_HLP_DONE_DECODING();
5736 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5737 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5738 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5739 IEM_MC_FETCH_EFLAGS(EFlags);
5740 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5741 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5742 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5743 else
5744 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5745
5746 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5747 IEM_MC_COMMIT_EFLAGS(EFlags);
5748 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5749 IEM_MC_ADVANCE_RIP();
5750 IEM_MC_END();
5751 return VINF_SUCCESS;
5752
5753 case IEMMODE_32BIT:
5754 IEM_MC_BEGIN(4, 3);
5755 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5756 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5757 IEM_MC_ARG(uint32_t, u32Src, 2);
5758 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5760 IEM_MC_LOCAL(uint32_t, u32Eax);
5761
5762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5763 IEMOP_HLP_DONE_DECODING();
5764 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5765 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5766 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5767 IEM_MC_FETCH_EFLAGS(EFlags);
5768 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5769 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5770 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5771 else
5772 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5773
5774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5775 IEM_MC_COMMIT_EFLAGS(EFlags);
5776 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5777 IEM_MC_ADVANCE_RIP();
5778 IEM_MC_END();
5779 return VINF_SUCCESS;
5780
5781 case IEMMODE_64BIT:
5782 IEM_MC_BEGIN(4, 3);
5783 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5784 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5785#ifdef RT_ARCH_X86
5786 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5787#else
5788 IEM_MC_ARG(uint64_t, u64Src, 2);
5789#endif
5790 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5792 IEM_MC_LOCAL(uint64_t, u64Rax);
5793
5794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5795 IEMOP_HLP_DONE_DECODING();
5796 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5797 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5798 IEM_MC_FETCH_EFLAGS(EFlags);
5799 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5800#ifdef RT_ARCH_X86
5801 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5802 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5803 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5804 else
5805 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5806#else
5807 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5808 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5809 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5810 else
5811 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5812#endif
5813
5814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5815 IEM_MC_COMMIT_EFLAGS(EFlags);
5816 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5817 IEM_MC_ADVANCE_RIP();
5818 IEM_MC_END();
5819 return VINF_SUCCESS;
5820
5821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5822 }
5823 }
5824}
5825
5826
5827FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5828{
5829 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5830 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5831
5832 switch (pVCpu->iem.s.enmEffOpSize)
5833 {
5834 case IEMMODE_16BIT:
5835 IEM_MC_BEGIN(5, 1);
5836 IEM_MC_ARG(uint16_t, uSel, 0);
5837 IEM_MC_ARG(uint16_t, offSeg, 1);
5838 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5839 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5840 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5841 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5844 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5845 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5846 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5847 IEM_MC_END();
5848 return VINF_SUCCESS;
5849
5850 case IEMMODE_32BIT:
5851 IEM_MC_BEGIN(5, 1);
5852 IEM_MC_ARG(uint16_t, uSel, 0);
5853 IEM_MC_ARG(uint32_t, offSeg, 1);
5854 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5855 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5856 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5857 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5860 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5861 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5862 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5863 IEM_MC_END();
5864 return VINF_SUCCESS;
5865
5866 case IEMMODE_64BIT:
5867 IEM_MC_BEGIN(5, 1);
5868 IEM_MC_ARG(uint16_t, uSel, 0);
5869 IEM_MC_ARG(uint64_t, offSeg, 1);
5870 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5871 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5872 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5873 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5876 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5877 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5878 else
5879 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5880 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5881 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5882 IEM_MC_END();
5883 return VINF_SUCCESS;
5884
5885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5886 }
5887}
5888
5889
5890/** Opcode 0x0f 0xb2. */
5891FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5892{
5893 IEMOP_MNEMONIC("lss Gv,Mp");
5894 IEMOP_HLP_MIN_386();
5895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5896 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5897 return IEMOP_RAISE_INVALID_OPCODE();
5898 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5899}
5900
5901
5902/** Opcode 0x0f 0xb3. */
5903FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5904{
5905 IEMOP_MNEMONIC("btr Ev,Gv");
5906 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5907}
5908
5909
5910/** Opcode 0x0f 0xb4. */
5911FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5912{
5913 IEMOP_MNEMONIC("lfs Gv,Mp");
5914 IEMOP_HLP_MIN_386();
5915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5917 return IEMOP_RAISE_INVALID_OPCODE();
5918 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5919}
5920
5921
5922/** Opcode 0x0f 0xb5. */
5923FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5924{
5925 IEMOP_MNEMONIC("lgs Gv,Mp");
5926 IEMOP_HLP_MIN_386();
5927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5929 return IEMOP_RAISE_INVALID_OPCODE();
5930 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5931}
5932
5933
5934/** Opcode 0x0f 0xb6. */
5935FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5936{
5937 IEMOP_MNEMONIC("movzx Gv,Eb");
5938 IEMOP_HLP_MIN_386();
5939
5940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5941
5942 /*
5943 * If rm is denoting a register, no more instruction bytes.
5944 */
5945 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5946 {
5947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5948 switch (pVCpu->iem.s.enmEffOpSize)
5949 {
5950 case IEMMODE_16BIT:
5951 IEM_MC_BEGIN(0, 1);
5952 IEM_MC_LOCAL(uint16_t, u16Value);
5953 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5954 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5955 IEM_MC_ADVANCE_RIP();
5956 IEM_MC_END();
5957 return VINF_SUCCESS;
5958
5959 case IEMMODE_32BIT:
5960 IEM_MC_BEGIN(0, 1);
5961 IEM_MC_LOCAL(uint32_t, u32Value);
5962 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5963 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5964 IEM_MC_ADVANCE_RIP();
5965 IEM_MC_END();
5966 return VINF_SUCCESS;
5967
5968 case IEMMODE_64BIT:
5969 IEM_MC_BEGIN(0, 1);
5970 IEM_MC_LOCAL(uint64_t, u64Value);
5971 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5972 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5973 IEM_MC_ADVANCE_RIP();
5974 IEM_MC_END();
5975 return VINF_SUCCESS;
5976
5977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5978 }
5979 }
5980 else
5981 {
5982 /*
5983 * We're loading a register from memory.
5984 */
5985 switch (pVCpu->iem.s.enmEffOpSize)
5986 {
5987 case IEMMODE_16BIT:
5988 IEM_MC_BEGIN(0, 2);
5989 IEM_MC_LOCAL(uint16_t, u16Value);
5990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5993 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5994 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5995 IEM_MC_ADVANCE_RIP();
5996 IEM_MC_END();
5997 return VINF_SUCCESS;
5998
5999 case IEMMODE_32BIT:
6000 IEM_MC_BEGIN(0, 2);
6001 IEM_MC_LOCAL(uint32_t, u32Value);
6002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6005 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6006 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6007 IEM_MC_ADVANCE_RIP();
6008 IEM_MC_END();
6009 return VINF_SUCCESS;
6010
6011 case IEMMODE_64BIT:
6012 IEM_MC_BEGIN(0, 2);
6013 IEM_MC_LOCAL(uint64_t, u64Value);
6014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6017 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6018 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6019 IEM_MC_ADVANCE_RIP();
6020 IEM_MC_END();
6021 return VINF_SUCCESS;
6022
6023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6024 }
6025 }
6026}
6027
6028
6029/** Opcode 0x0f 0xb7. */
6030FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6031{
6032 IEMOP_MNEMONIC("movzx Gv,Ew");
6033 IEMOP_HLP_MIN_386();
6034
6035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6036
6037 /** @todo Not entirely sure how the operand size prefix is handled here,
6038 * assuming that it will be ignored. Would be nice to have a few
6039 * test for this. */
6040 /*
6041 * If rm is denoting a register, no more instruction bytes.
6042 */
6043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6044 {
6045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6046 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6047 {
6048 IEM_MC_BEGIN(0, 1);
6049 IEM_MC_LOCAL(uint32_t, u32Value);
6050 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6051 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6052 IEM_MC_ADVANCE_RIP();
6053 IEM_MC_END();
6054 }
6055 else
6056 {
6057 IEM_MC_BEGIN(0, 1);
6058 IEM_MC_LOCAL(uint64_t, u64Value);
6059 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6060 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6061 IEM_MC_ADVANCE_RIP();
6062 IEM_MC_END();
6063 }
6064 }
6065 else
6066 {
6067 /*
6068 * We're loading a register from memory.
6069 */
6070 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6071 {
6072 IEM_MC_BEGIN(0, 2);
6073 IEM_MC_LOCAL(uint32_t, u32Value);
6074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6077 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6078 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6079 IEM_MC_ADVANCE_RIP();
6080 IEM_MC_END();
6081 }
6082 else
6083 {
6084 IEM_MC_BEGIN(0, 2);
6085 IEM_MC_LOCAL(uint64_t, u64Value);
6086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6090 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6091 IEM_MC_ADVANCE_RIP();
6092 IEM_MC_END();
6093 }
6094 }
6095 return VINF_SUCCESS;
6096}
6097
6098
6099/** Opcode 0x0f 0xb8. */
6100FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
6101
6102
6103/** Opcode 0x0f 0xb9. */
6104FNIEMOP_DEF(iemOp_Grp10)
6105{
6106 Log(("iemOp_Grp10 -> #UD\n"));
6107 return IEMOP_RAISE_INVALID_OPCODE();
6108}
6109
6110
6111/** Opcode 0x0f 0xba. */
6112FNIEMOP_DEF(iemOp_Grp8)
6113{
6114 IEMOP_HLP_MIN_386();
6115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6116 PCIEMOPBINSIZES pImpl;
6117 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6118 {
6119 case 0: case 1: case 2: case 3:
6120 return IEMOP_RAISE_INVALID_OPCODE();
6121 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
6122 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
6123 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
6124 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
6125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6126 }
6127 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6128
6129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6130 {
6131 /* register destination. */
6132 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6134
6135 switch (pVCpu->iem.s.enmEffOpSize)
6136 {
6137 case IEMMODE_16BIT:
6138 IEM_MC_BEGIN(3, 0);
6139 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6140 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6141 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6142
6143 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6144 IEM_MC_REF_EFLAGS(pEFlags);
6145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6146
6147 IEM_MC_ADVANCE_RIP();
6148 IEM_MC_END();
6149 return VINF_SUCCESS;
6150
6151 case IEMMODE_32BIT:
6152 IEM_MC_BEGIN(3, 0);
6153 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6154 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6156
6157 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6158 IEM_MC_REF_EFLAGS(pEFlags);
6159 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6160
6161 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6162 IEM_MC_ADVANCE_RIP();
6163 IEM_MC_END();
6164 return VINF_SUCCESS;
6165
6166 case IEMMODE_64BIT:
6167 IEM_MC_BEGIN(3, 0);
6168 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6169 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6170 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6171
6172 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6173 IEM_MC_REF_EFLAGS(pEFlags);
6174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6175
6176 IEM_MC_ADVANCE_RIP();
6177 IEM_MC_END();
6178 return VINF_SUCCESS;
6179
6180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6181 }
6182 }
6183 else
6184 {
6185 /* memory destination. */
6186
6187 uint32_t fAccess;
6188 if (pImpl->pfnLockedU16)
6189 fAccess = IEM_ACCESS_DATA_RW;
6190 else /* BT */
6191 fAccess = IEM_ACCESS_DATA_R;
6192
6193 /** @todo test negative bit offsets! */
6194 switch (pVCpu->iem.s.enmEffOpSize)
6195 {
6196 case IEMMODE_16BIT:
6197 IEM_MC_BEGIN(3, 1);
6198 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6199 IEM_MC_ARG(uint16_t, u16Src, 1);
6200 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6202
6203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6204 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6205 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6206 if (pImpl->pfnLockedU16)
6207 IEMOP_HLP_DONE_DECODING();
6208 else
6209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6210 IEM_MC_FETCH_EFLAGS(EFlags);
6211 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6212 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6213 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6214 else
6215 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6216 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6217
6218 IEM_MC_COMMIT_EFLAGS(EFlags);
6219 IEM_MC_ADVANCE_RIP();
6220 IEM_MC_END();
6221 return VINF_SUCCESS;
6222
6223 case IEMMODE_32BIT:
6224 IEM_MC_BEGIN(3, 1);
6225 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6226 IEM_MC_ARG(uint32_t, u32Src, 1);
6227 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6229
6230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6231 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6232 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6233 if (pImpl->pfnLockedU16)
6234 IEMOP_HLP_DONE_DECODING();
6235 else
6236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6237 IEM_MC_FETCH_EFLAGS(EFlags);
6238 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6239 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6241 else
6242 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6243 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6244
6245 IEM_MC_COMMIT_EFLAGS(EFlags);
6246 IEM_MC_ADVANCE_RIP();
6247 IEM_MC_END();
6248 return VINF_SUCCESS;
6249
6250 case IEMMODE_64BIT:
6251 IEM_MC_BEGIN(3, 1);
6252 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6253 IEM_MC_ARG(uint64_t, u64Src, 1);
6254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6256
6257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6258 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6259 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6260 if (pImpl->pfnLockedU16)
6261 IEMOP_HLP_DONE_DECODING();
6262 else
6263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6264 IEM_MC_FETCH_EFLAGS(EFlags);
6265 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6266 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6267 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6268 else
6269 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6271
6272 IEM_MC_COMMIT_EFLAGS(EFlags);
6273 IEM_MC_ADVANCE_RIP();
6274 IEM_MC_END();
6275 return VINF_SUCCESS;
6276
6277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6278 }
6279 }
6280
6281}
6282
6283
6284/** Opcode 0x0f 0xbb. */
6285FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6286{
6287 IEMOP_MNEMONIC("btc Ev,Gv");
6288 IEMOP_HLP_MIN_386();
6289 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6290}
6291
6292
6293/** Opcode 0x0f 0xbc. */
6294FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6295{
6296 IEMOP_MNEMONIC("bsf Gv,Ev");
6297 IEMOP_HLP_MIN_386();
6298 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6299 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6300}
6301
6302
6303/** Opcode 0x0f 0xbd. */
6304FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6305{
6306 IEMOP_MNEMONIC("bsr Gv,Ev");
6307 IEMOP_HLP_MIN_386();
6308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6309 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6310}
6311
6312
6313/** Opcode 0x0f 0xbe. */
6314FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6315{
6316 IEMOP_MNEMONIC("movsx Gv,Eb");
6317 IEMOP_HLP_MIN_386();
6318
6319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6320
6321 /*
6322 * If rm is denoting a register, no more instruction bytes.
6323 */
6324 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6325 {
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6327 switch (pVCpu->iem.s.enmEffOpSize)
6328 {
6329 case IEMMODE_16BIT:
6330 IEM_MC_BEGIN(0, 1);
6331 IEM_MC_LOCAL(uint16_t, u16Value);
6332 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6333 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6334 IEM_MC_ADVANCE_RIP();
6335 IEM_MC_END();
6336 return VINF_SUCCESS;
6337
6338 case IEMMODE_32BIT:
6339 IEM_MC_BEGIN(0, 1);
6340 IEM_MC_LOCAL(uint32_t, u32Value);
6341 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6342 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6343 IEM_MC_ADVANCE_RIP();
6344 IEM_MC_END();
6345 return VINF_SUCCESS;
6346
6347 case IEMMODE_64BIT:
6348 IEM_MC_BEGIN(0, 1);
6349 IEM_MC_LOCAL(uint64_t, u64Value);
6350 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6351 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6357 }
6358 }
6359 else
6360 {
6361 /*
6362 * We're loading a register from memory.
6363 */
6364 switch (pVCpu->iem.s.enmEffOpSize)
6365 {
6366 case IEMMODE_16BIT:
6367 IEM_MC_BEGIN(0, 2);
6368 IEM_MC_LOCAL(uint16_t, u16Value);
6369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6372 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6373 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6374 IEM_MC_ADVANCE_RIP();
6375 IEM_MC_END();
6376 return VINF_SUCCESS;
6377
6378 case IEMMODE_32BIT:
6379 IEM_MC_BEGIN(0, 2);
6380 IEM_MC_LOCAL(uint32_t, u32Value);
6381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6385 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6386 IEM_MC_ADVANCE_RIP();
6387 IEM_MC_END();
6388 return VINF_SUCCESS;
6389
6390 case IEMMODE_64BIT:
6391 IEM_MC_BEGIN(0, 2);
6392 IEM_MC_LOCAL(uint64_t, u64Value);
6393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6396 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6397 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6398 IEM_MC_ADVANCE_RIP();
6399 IEM_MC_END();
6400 return VINF_SUCCESS;
6401
6402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6403 }
6404 }
6405}
6406
6407
6408/** Opcode 0x0f 0xbf. */
6409FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6410{
6411 IEMOP_MNEMONIC("movsx Gv,Ew");
6412 IEMOP_HLP_MIN_386();
6413
6414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6415
6416 /** @todo Not entirely sure how the operand size prefix is handled here,
6417 * assuming that it will be ignored. Would be nice to have a few
6418 * test for this. */
6419 /*
6420 * If rm is denoting a register, no more instruction bytes.
6421 */
6422 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6423 {
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6426 {
6427 IEM_MC_BEGIN(0, 1);
6428 IEM_MC_LOCAL(uint32_t, u32Value);
6429 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6430 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6431 IEM_MC_ADVANCE_RIP();
6432 IEM_MC_END();
6433 }
6434 else
6435 {
6436 IEM_MC_BEGIN(0, 1);
6437 IEM_MC_LOCAL(uint64_t, u64Value);
6438 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6439 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6440 IEM_MC_ADVANCE_RIP();
6441 IEM_MC_END();
6442 }
6443 }
6444 else
6445 {
6446 /*
6447 * We're loading a register from memory.
6448 */
6449 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6450 {
6451 IEM_MC_BEGIN(0, 2);
6452 IEM_MC_LOCAL(uint32_t, u32Value);
6453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6456 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6457 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6458 IEM_MC_ADVANCE_RIP();
6459 IEM_MC_END();
6460 }
6461 else
6462 {
6463 IEM_MC_BEGIN(0, 2);
6464 IEM_MC_LOCAL(uint64_t, u64Value);
6465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6468 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6469 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6470 IEM_MC_ADVANCE_RIP();
6471 IEM_MC_END();
6472 }
6473 }
6474 return VINF_SUCCESS;
6475}
6476
6477
6478/** Opcode 0x0f 0xc0. */
6479FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6480{
6481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6482 IEMOP_HLP_MIN_486();
6483 IEMOP_MNEMONIC("xadd Eb,Gb");
6484
6485 /*
6486 * If rm is denoting a register, no more instruction bytes.
6487 */
6488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6489 {
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6491
6492 IEM_MC_BEGIN(3, 0);
6493 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6494 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6495 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6496
6497 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6498 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6499 IEM_MC_REF_EFLAGS(pEFlags);
6500 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6501
6502 IEM_MC_ADVANCE_RIP();
6503 IEM_MC_END();
6504 }
6505 else
6506 {
6507 /*
6508 * We're accessing memory.
6509 */
6510 IEM_MC_BEGIN(3, 3);
6511 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6512 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6513 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6514 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6516
6517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6518 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6519 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6520 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6521 IEM_MC_FETCH_EFLAGS(EFlags);
6522 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6524 else
6525 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6526
6527 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6528 IEM_MC_COMMIT_EFLAGS(EFlags);
6529 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6530 IEM_MC_ADVANCE_RIP();
6531 IEM_MC_END();
6532 return VINF_SUCCESS;
6533 }
6534 return VINF_SUCCESS;
6535}
6536
6537
6538/** Opcode 0x0f 0xc1. */
6539FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6540{
6541 IEMOP_MNEMONIC("xadd Ev,Gv");
6542 IEMOP_HLP_MIN_486();
6543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6544
6545 /*
6546 * If rm is denoting a register, no more instruction bytes.
6547 */
6548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6549 {
6550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6551
6552 switch (pVCpu->iem.s.enmEffOpSize)
6553 {
6554 case IEMMODE_16BIT:
6555 IEM_MC_BEGIN(3, 0);
6556 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6557 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6558 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6559
6560 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6561 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6562 IEM_MC_REF_EFLAGS(pEFlags);
6563 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6564
6565 IEM_MC_ADVANCE_RIP();
6566 IEM_MC_END();
6567 return VINF_SUCCESS;
6568
6569 case IEMMODE_32BIT:
6570 IEM_MC_BEGIN(3, 0);
6571 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6572 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6573 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6574
6575 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6576 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6577 IEM_MC_REF_EFLAGS(pEFlags);
6578 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6579
6580 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6581 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6582 IEM_MC_ADVANCE_RIP();
6583 IEM_MC_END();
6584 return VINF_SUCCESS;
6585
6586 case IEMMODE_64BIT:
6587 IEM_MC_BEGIN(3, 0);
6588 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6589 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6590 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6591
6592 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6593 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6594 IEM_MC_REF_EFLAGS(pEFlags);
6595 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6596
6597 IEM_MC_ADVANCE_RIP();
6598 IEM_MC_END();
6599 return VINF_SUCCESS;
6600
6601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6602 }
6603 }
6604 else
6605 {
6606 /*
6607 * We're accessing memory.
6608 */
6609 switch (pVCpu->iem.s.enmEffOpSize)
6610 {
6611 case IEMMODE_16BIT:
6612 IEM_MC_BEGIN(3, 3);
6613 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6614 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6615 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6616 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6618
6619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6620 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6621 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6622 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6623 IEM_MC_FETCH_EFLAGS(EFlags);
6624 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6625 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6626 else
6627 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6628
6629 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6630 IEM_MC_COMMIT_EFLAGS(EFlags);
6631 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6632 IEM_MC_ADVANCE_RIP();
6633 IEM_MC_END();
6634 return VINF_SUCCESS;
6635
6636 case IEMMODE_32BIT:
6637 IEM_MC_BEGIN(3, 3);
6638 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6639 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6640 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6641 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6643
6644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6645 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6646 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6647 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6648 IEM_MC_FETCH_EFLAGS(EFlags);
6649 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6651 else
6652 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6653
6654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6655 IEM_MC_COMMIT_EFLAGS(EFlags);
6656 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6657 IEM_MC_ADVANCE_RIP();
6658 IEM_MC_END();
6659 return VINF_SUCCESS;
6660
6661 case IEMMODE_64BIT:
6662 IEM_MC_BEGIN(3, 3);
6663 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6664 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6665 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6666 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6668
6669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6670 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6671 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6672 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6673 IEM_MC_FETCH_EFLAGS(EFlags);
6674 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6675 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6676 else
6677 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6678
6679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6680 IEM_MC_COMMIT_EFLAGS(EFlags);
6681 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6682 IEM_MC_ADVANCE_RIP();
6683 IEM_MC_END();
6684 return VINF_SUCCESS;
6685
6686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6687 }
6688 }
6689}
6690
6691/** Opcode 0x0f 0xc2. */
6692FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6693
6694
6695/** Opcode 0x0f 0xc3. */
6696FNIEMOP_DEF(iemOp_movnti_My_Gy)
6697{
6698 IEMOP_MNEMONIC("movnti My,Gy");
6699
6700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6701
6702 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6703 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6704 {
6705 switch (pVCpu->iem.s.enmEffOpSize)
6706 {
6707 case IEMMODE_32BIT:
6708 IEM_MC_BEGIN(0, 2);
6709 IEM_MC_LOCAL(uint32_t, u32Value);
6710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6711
6712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6714 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6715 return IEMOP_RAISE_INVALID_OPCODE();
6716
6717 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6718 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6719 IEM_MC_ADVANCE_RIP();
6720 IEM_MC_END();
6721 break;
6722
6723 case IEMMODE_64BIT:
6724 IEM_MC_BEGIN(0, 2);
6725 IEM_MC_LOCAL(uint64_t, u64Value);
6726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6727
6728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6730 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6731 return IEMOP_RAISE_INVALID_OPCODE();
6732
6733 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6734 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6735 IEM_MC_ADVANCE_RIP();
6736 IEM_MC_END();
6737 break;
6738
6739 case IEMMODE_16BIT:
6740 /** @todo check this form. */
6741 return IEMOP_RAISE_INVALID_OPCODE();
6742 }
6743 }
6744 else
6745 return IEMOP_RAISE_INVALID_OPCODE();
6746 return VINF_SUCCESS;
6747}
6748
6749
6750/** Opcode 0x0f 0xc4. */
6751FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6752
6753/** Opcode 0x0f 0xc5. */
6754FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6755
6756/** Opcode 0x0f 0xc6. */
6757FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6758
6759
6760/** Opcode 0x0f 0xc7 !11/1. */
6761FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6762{
6763 IEMOP_MNEMONIC("cmpxchg8b Mq");
6764
6765 IEM_MC_BEGIN(4, 3);
6766 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6767 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6768 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6769 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6770 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6771 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6773
6774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6775 IEMOP_HLP_DONE_DECODING();
6776 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6777
6778 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6779 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6780 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6781
6782 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6783 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6784 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6785
6786 IEM_MC_FETCH_EFLAGS(EFlags);
6787 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6788 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6789 else
6790 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6791
6792 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6793 IEM_MC_COMMIT_EFLAGS(EFlags);
6794 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6795 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6796 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6797 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6798 IEM_MC_ENDIF();
6799 IEM_MC_ADVANCE_RIP();
6800
6801 IEM_MC_END();
6802 return VINF_SUCCESS;
6803}
6804
6805
6806/** Opcode REX.W 0x0f 0xc7 !11/1. */
6807FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6808
6809/** Opcode 0x0f 0xc7 11/6. */
6810FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6811
6812/** Opcode 0x0f 0xc7 !11/6. */
6813FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6814
6815/** Opcode 0x66 0x0f 0xc7 !11/6. */
6816FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6817
6818/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6819FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6820
6821/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6822FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6823
6824
6825/** Opcode 0x0f 0xc7. */
6826FNIEMOP_DEF(iemOp_Grp9)
6827{
6828 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6829 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6830 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6831 {
6832 case 0: case 2: case 3: case 4: case 5:
6833 return IEMOP_RAISE_INVALID_OPCODE();
6834 case 1:
6835 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6836 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6837 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6838 return IEMOP_RAISE_INVALID_OPCODE();
6839 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6840 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6841 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6842 case 6:
6843 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6844 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6845 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6846 {
6847 case 0:
6848 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6849 case IEM_OP_PRF_SIZE_OP:
6850 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6851 case IEM_OP_PRF_REPZ:
6852 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6853 default:
6854 return IEMOP_RAISE_INVALID_OPCODE();
6855 }
6856 case 7:
6857 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6858 {
6859 case 0:
6860 case IEM_OP_PRF_REPZ:
6861 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6862 default:
6863 return IEMOP_RAISE_INVALID_OPCODE();
6864 }
6865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6866 }
6867}
6868
6869
6870/**
6871 * Common 'bswap register' helper.
6872 */
6873FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6874{
6875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6876 switch (pVCpu->iem.s.enmEffOpSize)
6877 {
6878 case IEMMODE_16BIT:
6879 IEM_MC_BEGIN(1, 0);
6880 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6881 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6882 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6883 IEM_MC_ADVANCE_RIP();
6884 IEM_MC_END();
6885 return VINF_SUCCESS;
6886
6887 case IEMMODE_32BIT:
6888 IEM_MC_BEGIN(1, 0);
6889 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6890 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6891 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6892 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6893 IEM_MC_ADVANCE_RIP();
6894 IEM_MC_END();
6895 return VINF_SUCCESS;
6896
6897 case IEMMODE_64BIT:
6898 IEM_MC_BEGIN(1, 0);
6899 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6900 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6901 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6902 IEM_MC_ADVANCE_RIP();
6903 IEM_MC_END();
6904 return VINF_SUCCESS;
6905
6906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6907 }
6908}
6909
6910
6911/** Opcode 0x0f 0xc8. */
6912FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6913{
6914 IEMOP_MNEMONIC("bswap rAX/r8");
6915 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6916 prefix. REX.B is the correct prefix it appears. For a parallel
6917 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6918 IEMOP_HLP_MIN_486();
6919 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6920}
6921
6922
6923/** Opcode 0x0f 0xc9. */
6924FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6925{
6926 IEMOP_MNEMONIC("bswap rCX/r9");
6927 IEMOP_HLP_MIN_486();
6928 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6929}
6930
6931
6932/** Opcode 0x0f 0xca. */
6933FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6934{
6935 IEMOP_MNEMONIC("bswap rDX/r9");
6936 IEMOP_HLP_MIN_486();
6937 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6938}
6939
6940
6941/** Opcode 0x0f 0xcb. */
6942FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6943{
6944 IEMOP_MNEMONIC("bswap rBX/r9");
6945 IEMOP_HLP_MIN_486();
6946 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6947}
6948
6949
6950/** Opcode 0x0f 0xcc. */
6951FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6952{
6953 IEMOP_MNEMONIC("bswap rSP/r12");
6954 IEMOP_HLP_MIN_486();
6955 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6956}
6957
6958
6959/** Opcode 0x0f 0xcd. */
6960FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6961{
6962 IEMOP_MNEMONIC("bswap rBP/r13");
6963 IEMOP_HLP_MIN_486();
6964 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6965}
6966
6967
6968/** Opcode 0x0f 0xce. */
6969FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6970{
6971 IEMOP_MNEMONIC("bswap rSI/r14");
6972 IEMOP_HLP_MIN_486();
6973 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6974}
6975
6976
6977/** Opcode 0x0f 0xcf. */
6978FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6979{
6980 IEMOP_MNEMONIC("bswap rDI/r15");
6981 IEMOP_HLP_MIN_486();
6982 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6983}
6984
6985
6986
6987/** Opcode 0x0f 0xd0. */
6988FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6989/** Opcode 0x0f 0xd1. */
6990FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6991/** Opcode 0x0f 0xd2. */
6992FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6993/** Opcode 0x0f 0xd3. */
6994FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6995/** Opcode 0x0f 0xd4. */
6996FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6997/** Opcode 0x0f 0xd5. */
6998FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6999/** Opcode 0x0f 0xd6. */
7000FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
7001
7002
7003/** Opcode 0x0f 0xd7. */
7004FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7005{
7006 /* Docs says register only. */
7007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7009 return IEMOP_RAISE_INVALID_OPCODE();
7010
7011 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7012 /** @todo testcase: Check that the instruction implicitly clears the high
7013 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7014 * and opcode modifications are made to work with the whole width (not
7015 * just 128). */
7016 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7017 {
7018 case IEM_OP_PRF_SIZE_OP: /* SSE */
7019 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
7020 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7021 IEM_MC_BEGIN(2, 0);
7022 IEM_MC_ARG(uint64_t *, pDst, 0);
7023 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7024 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7025 IEM_MC_PREPARE_SSE_USAGE();
7026 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7027 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7028 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7029 IEM_MC_ADVANCE_RIP();
7030 IEM_MC_END();
7031 return VINF_SUCCESS;
7032
7033 case 0: /* MMX */
7034 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
7035 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7036 IEM_MC_BEGIN(2, 0);
7037 IEM_MC_ARG(uint64_t *, pDst, 0);
7038 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7039 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7040 IEM_MC_PREPARE_FPU_USAGE();
7041 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7042 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7043 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7044 IEM_MC_ADVANCE_RIP();
7045 IEM_MC_END();
7046 return VINF_SUCCESS;
7047
7048 default:
7049 return IEMOP_RAISE_INVALID_OPCODE();
7050 }
7051}
7052
7053
7054/** Opcode 0x0f 0xd8. */
7055FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
7056/** Opcode 0x0f 0xd9. */
7057FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
7058/** Opcode 0x0f 0xda. */
7059FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
7060/** Opcode 0x0f 0xdb. */
7061FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
7062/** Opcode 0x0f 0xdc. */
7063FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
7064/** Opcode 0x0f 0xdd. */
7065FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
7066/** Opcode 0x0f 0xde. */
7067FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
7068/** Opcode 0x0f 0xdf. */
7069FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
7070/** Opcode 0x0f 0xe0. */
7071FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
7072/** Opcode 0x0f 0xe1. */
7073FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
7074/** Opcode 0x0f 0xe2. */
7075FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
7076/** Opcode 0x0f 0xe3. */
7077FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
7078/** Opcode 0x0f 0xe4. */
7079FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
7080/** Opcode 0x0f 0xe5. */
7081FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
7082/** Opcode 0x0f 0xe6. */
7083FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
7084
7085
7086/** Opcode 0x0f 0xe7. */
7087FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7088{
7089 IEMOP_MNEMONIC(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntq mr,r" : "movntdq mr,r");
7090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7091 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7092 {
7093 /*
7094 * Register, memory.
7095 */
7096/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7097 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7098 {
7099
7100 case IEM_OP_PRF_SIZE_OP: /* SSE */
7101 IEM_MC_BEGIN(0, 2);
7102 IEM_MC_LOCAL(uint128_t, uSrc);
7103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7104
7105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7107 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7109
7110 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7111 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7112
7113 IEM_MC_ADVANCE_RIP();
7114 IEM_MC_END();
7115 break;
7116
7117 case 0: /* MMX */
7118 IEM_MC_BEGIN(0, 2);
7119 IEM_MC_LOCAL(uint64_t, uSrc);
7120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7121
7122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7124 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7125 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7126
7127 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7128 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7129
7130 IEM_MC_ADVANCE_RIP();
7131 IEM_MC_END();
7132 break;
7133
7134 default:
7135 return IEMOP_RAISE_INVALID_OPCODE();
7136 }
7137 }
7138 /* The register, register encoding is invalid. */
7139 else
7140 return IEMOP_RAISE_INVALID_OPCODE();
7141 return VINF_SUCCESS;
7142}
7143
7144
7145/** Opcode 0x0f 0xe8. */
7146FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
7147/** Opcode 0x0f 0xe9. */
7148FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
7149/** Opcode 0x0f 0xea. */
7150FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
7151/** Opcode 0x0f 0xeb. */
7152FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
7153/** Opcode 0x0f 0xec. */
7154FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
7155/** Opcode 0x0f 0xed. */
7156FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
7157/** Opcode 0x0f 0xee. */
7158FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
7159
7160
7161/** Opcode 0x0f 0xef. */
7162FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7163{
7164 IEMOP_MNEMONIC("pxor");
7165 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7166}
7167
7168
7169/** Opcode 0x0f 0xf0. */
7170FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7171/** Opcode 0x0f 0xf1. */
7172FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7173/** Opcode 0x0f 0xf2. */
7174FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7175/** Opcode 0x0f 0xf3. */
7176FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7177/** Opcode 0x0f 0xf4. */
7178FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7179/** Opcode 0x0f 0xf5. */
7180FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7181/** Opcode 0x0f 0xf6. */
7182FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7183/** Opcode 0x0f 0xf7. */
7184FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7185/** Opcode 0x0f 0xf8. */
7186FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7187/** Opcode 0x0f 0xf9. */
7188FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7189/** Opcode 0x0f 0xfa. */
7190FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7191/** Opcode 0x0f 0xfb. */
7192FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7193/** Opcode 0x0f 0xfc. */
7194FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7195/** Opcode 0x0f 0xfd. */
7196FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7197/** Opcode 0x0f 0xfe. */
7198FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7199
7200
7201IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[256] =
7202{
7203 /* 0x00 */ iemOp_Grp6,
7204 /* 0x01 */ iemOp_Grp7,
7205 /* 0x02 */ iemOp_lar_Gv_Ew,
7206 /* 0x03 */ iemOp_lsl_Gv_Ew,
7207 /* 0x04 */ iemOp_Invalid,
7208 /* 0x05 */ iemOp_syscall,
7209 /* 0x06 */ iemOp_clts,
7210 /* 0x07 */ iemOp_sysret,
7211 /* 0x08 */ iemOp_invd,
7212 /* 0x09 */ iemOp_wbinvd,
7213 /* 0x0a */ iemOp_Invalid,
7214 /* 0x0b */ iemOp_ud2,
7215 /* 0x0c */ iemOp_Invalid,
7216 /* 0x0d */ iemOp_nop_Ev_GrpP,
7217 /* 0x0e */ iemOp_femms,
7218 /* 0x0f */ iemOp_3Dnow,
7219 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7220 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7221 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7222 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7223 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7224 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7225 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7226 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7227 /* 0x18 */ iemOp_prefetch_Grp16,
7228 /* 0x19 */ iemOp_nop_Ev,
7229 /* 0x1a */ iemOp_nop_Ev,
7230 /* 0x1b */ iemOp_nop_Ev,
7231 /* 0x1c */ iemOp_nop_Ev,
7232 /* 0x1d */ iemOp_nop_Ev,
7233 /* 0x1e */ iemOp_nop_Ev,
7234 /* 0x1f */ iemOp_nop_Ev,
7235 /* 0x20 */ iemOp_mov_Rd_Cd,
7236 /* 0x21 */ iemOp_mov_Rd_Dd,
7237 /* 0x22 */ iemOp_mov_Cd_Rd,
7238 /* 0x23 */ iemOp_mov_Dd_Rd,
7239 /* 0x24 */ iemOp_mov_Rd_Td,
7240 /* 0x25 */ iemOp_Invalid,
7241 /* 0x26 */ iemOp_mov_Td_Rd,
7242 /* 0x27 */ iemOp_Invalid,
7243 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7244 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7245 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7246 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7247 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7248 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7249 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7250 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7251 /* 0x30 */ iemOp_wrmsr,
7252 /* 0x31 */ iemOp_rdtsc,
7253 /* 0x32 */ iemOp_rdmsr,
7254 /* 0x33 */ iemOp_rdpmc,
7255 /* 0x34 */ iemOp_sysenter,
7256 /* 0x35 */ iemOp_sysexit,
7257 /* 0x36 */ iemOp_Invalid,
7258 /* 0x37 */ iemOp_getsec,
7259 /* 0x38 */ iemOp_3byte_Esc_A4,
7260 /* 0x39 */ iemOp_Invalid,
7261 /* 0x3a */ iemOp_3byte_Esc_A5,
7262 /* 0x3b */ iemOp_Invalid,
7263 /* 0x3c */ iemOp_Invalid,
7264 /* 0x3d */ iemOp_Invalid,
7265 /* 0x3e */ iemOp_Invalid,
7266 /* 0x3f */ iemOp_Invalid,
7267 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7268 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7269 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7270 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7271 /* 0x44 */ iemOp_cmove_Gv_Ev,
7272 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7273 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7274 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7275 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7276 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7277 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7278 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7279 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7280 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7281 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7282 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7283 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7284 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7285 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7286 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7287 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7288 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7289 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7290 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7291 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7292 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7293 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7294 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7295 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7296 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7297 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7298 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7299 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7300 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7301 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7302 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7303 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7304 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7305 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7306 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7307 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7308 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7309 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7310 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7311 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7312 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7313 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7314 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7315 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7316 /* 0x71 */ iemOp_Grp12,
7317 /* 0x72 */ iemOp_Grp13,
7318 /* 0x73 */ iemOp_Grp14,
7319 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7320 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7321 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7322 /* 0x77 */ iemOp_emms,
7323 /* 0x78 */ iemOp_vmread_AmdGrp17,
7324 /* 0x79 */ iemOp_vmwrite,
7325 /* 0x7a */ iemOp_Invalid,
7326 /* 0x7b */ iemOp_Invalid,
7327 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7328 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7329 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7330 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7331 /* 0x80 */ iemOp_jo_Jv,
7332 /* 0x81 */ iemOp_jno_Jv,
7333 /* 0x82 */ iemOp_jc_Jv,
7334 /* 0x83 */ iemOp_jnc_Jv,
7335 /* 0x84 */ iemOp_je_Jv,
7336 /* 0x85 */ iemOp_jne_Jv,
7337 /* 0x86 */ iemOp_jbe_Jv,
7338 /* 0x87 */ iemOp_jnbe_Jv,
7339 /* 0x88 */ iemOp_js_Jv,
7340 /* 0x89 */ iemOp_jns_Jv,
7341 /* 0x8a */ iemOp_jp_Jv,
7342 /* 0x8b */ iemOp_jnp_Jv,
7343 /* 0x8c */ iemOp_jl_Jv,
7344 /* 0x8d */ iemOp_jnl_Jv,
7345 /* 0x8e */ iemOp_jle_Jv,
7346 /* 0x8f */ iemOp_jnle_Jv,
7347 /* 0x90 */ iemOp_seto_Eb,
7348 /* 0x91 */ iemOp_setno_Eb,
7349 /* 0x92 */ iemOp_setc_Eb,
7350 /* 0x93 */ iemOp_setnc_Eb,
7351 /* 0x94 */ iemOp_sete_Eb,
7352 /* 0x95 */ iemOp_setne_Eb,
7353 /* 0x96 */ iemOp_setbe_Eb,
7354 /* 0x97 */ iemOp_setnbe_Eb,
7355 /* 0x98 */ iemOp_sets_Eb,
7356 /* 0x99 */ iemOp_setns_Eb,
7357 /* 0x9a */ iemOp_setp_Eb,
7358 /* 0x9b */ iemOp_setnp_Eb,
7359 /* 0x9c */ iemOp_setl_Eb,
7360 /* 0x9d */ iemOp_setnl_Eb,
7361 /* 0x9e */ iemOp_setle_Eb,
7362 /* 0x9f */ iemOp_setnle_Eb,
7363 /* 0xa0 */ iemOp_push_fs,
7364 /* 0xa1 */ iemOp_pop_fs,
7365 /* 0xa2 */ iemOp_cpuid,
7366 /* 0xa3 */ iemOp_bt_Ev_Gv,
7367 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7368 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7369 /* 0xa6 */ iemOp_Invalid,
7370 /* 0xa7 */ iemOp_Invalid,
7371 /* 0xa8 */ iemOp_push_gs,
7372 /* 0xa9 */ iemOp_pop_gs,
7373 /* 0xaa */ iemOp_rsm,
7374 /* 0xab */ iemOp_bts_Ev_Gv,
7375 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7376 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7377 /* 0xae */ iemOp_Grp15,
7378 /* 0xaf */ iemOp_imul_Gv_Ev,
7379 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7380 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7381 /* 0xb2 */ iemOp_lss_Gv_Mp,
7382 /* 0xb3 */ iemOp_btr_Ev_Gv,
7383 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7384 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7385 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7386 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7387 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7388 /* 0xb9 */ iemOp_Grp10,
7389 /* 0xba */ iemOp_Grp8,
7390 /* 0xbd */ iemOp_btc_Ev_Gv,
7391 /* 0xbc */ iemOp_bsf_Gv_Ev,
7392 /* 0xbd */ iemOp_bsr_Gv_Ev,
7393 /* 0xbe */ iemOp_movsx_Gv_Eb,
7394 /* 0xbf */ iemOp_movsx_Gv_Ew,
7395 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7396 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7397 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7398 /* 0xc3 */ iemOp_movnti_My_Gy,
7399 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7400 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7401 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7402 /* 0xc7 */ iemOp_Grp9,
7403 /* 0xc8 */ iemOp_bswap_rAX_r8,
7404 /* 0xc9 */ iemOp_bswap_rCX_r9,
7405 /* 0xca */ iemOp_bswap_rDX_r10,
7406 /* 0xcb */ iemOp_bswap_rBX_r11,
7407 /* 0xcc */ iemOp_bswap_rSP_r12,
7408 /* 0xcd */ iemOp_bswap_rBP_r13,
7409 /* 0xce */ iemOp_bswap_rSI_r14,
7410 /* 0xcf */ iemOp_bswap_rDI_r15,
7411 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7412 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7413 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7414 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7415 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7416 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7417 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7418 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7419 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7420 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7421 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7422 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7423 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7424 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7425 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7426 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7427 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7428 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7429 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7430 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7431 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7432 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7433 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7434 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7435 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7436 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7437 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7438 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7439 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7440 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7441 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7442 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7443 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7444 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7445 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7446 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7447 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7448 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7449 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7450 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7451 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7452 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7453 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7454 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7455 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7456 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7457 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7458 /* 0xff */ iemOp_Invalid
7459};
7460
7461/** @} */
7462
7463
7464/** @name One byte opcodes.
7465 *
7466 * @{
7467 */
7468
7469/** Opcode 0x00. */
7470FNIEMOP_DEF(iemOp_add_Eb_Gb)
7471{
7472 IEMOP_MNEMONIC("add Eb,Gb");
7473 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7474}
7475
7476
7477/** Opcode 0x01. */
7478FNIEMOP_DEF(iemOp_add_Ev_Gv)
7479{
7480 IEMOP_MNEMONIC("add Ev,Gv");
7481 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7482}
7483
7484
7485/** Opcode 0x02. */
7486FNIEMOP_DEF(iemOp_add_Gb_Eb)
7487{
7488 IEMOP_MNEMONIC("add Gb,Eb");
7489 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7490}
7491
7492
7493/** Opcode 0x03. */
7494FNIEMOP_DEF(iemOp_add_Gv_Ev)
7495{
7496 IEMOP_MNEMONIC("add Gv,Ev");
7497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7498}
7499
7500
7501/** Opcode 0x04. */
7502FNIEMOP_DEF(iemOp_add_Al_Ib)
7503{
7504 IEMOP_MNEMONIC("add al,Ib");
7505 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7506}
7507
7508
7509/** Opcode 0x05. */
7510FNIEMOP_DEF(iemOp_add_eAX_Iz)
7511{
7512 IEMOP_MNEMONIC("add rAX,Iz");
7513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7514}
7515
7516
7517/** Opcode 0x06. */
7518FNIEMOP_DEF(iemOp_push_ES)
7519{
7520 IEMOP_MNEMONIC("push es");
7521 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7522}
7523
7524
7525/** Opcode 0x07. */
7526FNIEMOP_DEF(iemOp_pop_ES)
7527{
7528 IEMOP_MNEMONIC("pop es");
7529 IEMOP_HLP_NO_64BIT();
7530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7531 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
7532}
7533
7534
7535/** Opcode 0x08. */
7536FNIEMOP_DEF(iemOp_or_Eb_Gb)
7537{
7538 IEMOP_MNEMONIC("or Eb,Gb");
7539 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7540 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7541}
7542
7543
7544/** Opcode 0x09. */
7545FNIEMOP_DEF(iemOp_or_Ev_Gv)
7546{
7547 IEMOP_MNEMONIC("or Ev,Gv ");
7548 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7549 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7550}
7551
7552
7553/** Opcode 0x0a. */
7554FNIEMOP_DEF(iemOp_or_Gb_Eb)
7555{
7556 IEMOP_MNEMONIC("or Gb,Eb");
7557 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7558 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7559}
7560
7561
7562/** Opcode 0x0b. */
7563FNIEMOP_DEF(iemOp_or_Gv_Ev)
7564{
7565 IEMOP_MNEMONIC("or Gv,Ev");
7566 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7567 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7568}
7569
7570
7571/** Opcode 0x0c. */
7572FNIEMOP_DEF(iemOp_or_Al_Ib)
7573{
7574 IEMOP_MNEMONIC("or al,Ib");
7575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7577}
7578
7579
7580/** Opcode 0x0d. */
7581FNIEMOP_DEF(iemOp_or_eAX_Iz)
7582{
7583 IEMOP_MNEMONIC("or rAX,Iz");
7584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7585 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7586}
7587
7588
7589/** Opcode 0x0e. */
7590FNIEMOP_DEF(iemOp_push_CS)
7591{
7592 IEMOP_MNEMONIC("push cs");
7593 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7594}
7595
7596
7597/** Opcode 0x0f. */
7598FNIEMOP_DEF(iemOp_2byteEscape)
7599{
7600 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7601 /** @todo PUSH CS on 8086, undefined on 80186. */
7602 IEMOP_HLP_MIN_286();
7603 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7604}
7605
7606/** Opcode 0x10. */
7607FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7608{
7609 IEMOP_MNEMONIC("adc Eb,Gb");
7610 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7611}
7612
7613
7614/** Opcode 0x11. */
7615FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7616{
7617 IEMOP_MNEMONIC("adc Ev,Gv");
7618 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7619}
7620
7621
7622/** Opcode 0x12. */
7623FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7624{
7625 IEMOP_MNEMONIC("adc Gb,Eb");
7626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7627}
7628
7629
7630/** Opcode 0x13. */
7631FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7632{
7633 IEMOP_MNEMONIC("adc Gv,Ev");
7634 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7635}
7636
7637
7638/** Opcode 0x14. */
7639FNIEMOP_DEF(iemOp_adc_Al_Ib)
7640{
7641 IEMOP_MNEMONIC("adc al,Ib");
7642 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7643}
7644
7645
7646/** Opcode 0x15. */
7647FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7648{
7649 IEMOP_MNEMONIC("adc rAX,Iz");
7650 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7651}
7652
7653
7654/** Opcode 0x16. */
7655FNIEMOP_DEF(iemOp_push_SS)
7656{
7657 IEMOP_MNEMONIC("push ss");
7658 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7659}
7660
7661
7662/** Opcode 0x17. */
7663FNIEMOP_DEF(iemOp_pop_SS)
7664{
7665 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7667 IEMOP_HLP_NO_64BIT();
7668 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
7669}
7670
7671
7672/** Opcode 0x18. */
7673FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7674{
7675 IEMOP_MNEMONIC("sbb Eb,Gb");
7676 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7677}
7678
7679
7680/** Opcode 0x19. */
7681FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7682{
7683 IEMOP_MNEMONIC("sbb Ev,Gv");
7684 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7685}
7686
7687
7688/** Opcode 0x1a. */
7689FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7690{
7691 IEMOP_MNEMONIC("sbb Gb,Eb");
7692 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7693}
7694
7695
7696/** Opcode 0x1b. */
7697FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7698{
7699 IEMOP_MNEMONIC("sbb Gv,Ev");
7700 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7701}
7702
7703
7704/** Opcode 0x1c. */
7705FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7706{
7707 IEMOP_MNEMONIC("sbb al,Ib");
7708 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7709}
7710
7711
7712/** Opcode 0x1d. */
7713FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7714{
7715 IEMOP_MNEMONIC("sbb rAX,Iz");
7716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7717}
7718
7719
7720/** Opcode 0x1e. */
7721FNIEMOP_DEF(iemOp_push_DS)
7722{
7723 IEMOP_MNEMONIC("push ds");
7724 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7725}
7726
7727
7728/** Opcode 0x1f. */
7729FNIEMOP_DEF(iemOp_pop_DS)
7730{
7731 IEMOP_MNEMONIC("pop ds");
7732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7733 IEMOP_HLP_NO_64BIT();
7734 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
7735}
7736
7737
7738/** Opcode 0x20. */
7739FNIEMOP_DEF(iemOp_and_Eb_Gb)
7740{
7741 IEMOP_MNEMONIC("and Eb,Gb");
7742 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7743 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7744}
7745
7746
7747/** Opcode 0x21. */
7748FNIEMOP_DEF(iemOp_and_Ev_Gv)
7749{
7750 IEMOP_MNEMONIC("and Ev,Gv");
7751 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7752 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7753}
7754
7755
7756/** Opcode 0x22. */
7757FNIEMOP_DEF(iemOp_and_Gb_Eb)
7758{
7759 IEMOP_MNEMONIC("and Gb,Eb");
7760 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7761 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7762}
7763
7764
7765/** Opcode 0x23. */
7766FNIEMOP_DEF(iemOp_and_Gv_Ev)
7767{
7768 IEMOP_MNEMONIC("and Gv,Ev");
7769 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7770 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7771}
7772
7773
7774/** Opcode 0x24. */
7775FNIEMOP_DEF(iemOp_and_Al_Ib)
7776{
7777 IEMOP_MNEMONIC("and al,Ib");
7778 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7779 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7780}
7781
7782
7783/** Opcode 0x25. */
7784FNIEMOP_DEF(iemOp_and_eAX_Iz)
7785{
7786 IEMOP_MNEMONIC("and rAX,Iz");
7787 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7788 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7789}
7790
7791
7792/** Opcode 0x26. */
7793FNIEMOP_DEF(iemOp_seg_ES)
7794{
7795 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7796 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
7797 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
7798
7799 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7800 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7801}
7802
7803
7804/** Opcode 0x27. */
7805FNIEMOP_DEF(iemOp_daa)
7806{
7807 IEMOP_MNEMONIC("daa AL");
7808 IEMOP_HLP_NO_64BIT();
7809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7810 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7812}
7813
7814
7815/** Opcode 0x28. */
7816FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7817{
7818 IEMOP_MNEMONIC("sub Eb,Gb");
7819 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7820}
7821
7822
7823/** Opcode 0x29. */
7824FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7825{
7826 IEMOP_MNEMONIC("sub Ev,Gv");
7827 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7828}
7829
7830
7831/** Opcode 0x2a. */
7832FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7833{
7834 IEMOP_MNEMONIC("sub Gb,Eb");
7835 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7836}
7837
7838
7839/** Opcode 0x2b. */
7840FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7841{
7842 IEMOP_MNEMONIC("sub Gv,Ev");
7843 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7844}
7845
7846
7847/** Opcode 0x2c. */
7848FNIEMOP_DEF(iemOp_sub_Al_Ib)
7849{
7850 IEMOP_MNEMONIC("sub al,Ib");
7851 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7852}
7853
7854
7855/** Opcode 0x2d. */
7856FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7857{
7858 IEMOP_MNEMONIC("sub rAX,Iz");
7859 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7860}
7861
7862
7863/** Opcode 0x2e. */
7864FNIEMOP_DEF(iemOp_seg_CS)
7865{
7866 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7867 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
7868 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
7869
7870 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7871 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7872}
7873
7874
7875/** Opcode 0x2f. */
7876FNIEMOP_DEF(iemOp_das)
7877{
7878 IEMOP_MNEMONIC("das AL");
7879 IEMOP_HLP_NO_64BIT();
7880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7881 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7882 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7883}
7884
7885
7886/** Opcode 0x30. */
7887FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7888{
7889 IEMOP_MNEMONIC("xor Eb,Gb");
7890 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7891 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7892}
7893
7894
7895/** Opcode 0x31. */
7896FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7897{
7898 IEMOP_MNEMONIC("xor Ev,Gv");
7899 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7900 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7901}
7902
7903
7904/** Opcode 0x32. */
7905FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7906{
7907 IEMOP_MNEMONIC("xor Gb,Eb");
7908 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7909 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7910}
7911
7912
7913/** Opcode 0x33. */
7914FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7915{
7916 IEMOP_MNEMONIC("xor Gv,Ev");
7917 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7918 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7919}
7920
7921
7922/** Opcode 0x34. */
7923FNIEMOP_DEF(iemOp_xor_Al_Ib)
7924{
7925 IEMOP_MNEMONIC("xor al,Ib");
7926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7927 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7928}
7929
7930
7931/** Opcode 0x35. */
7932FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7933{
7934 IEMOP_MNEMONIC("xor rAX,Iz");
7935 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7936 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7937}
7938
7939
7940/** Opcode 0x36. */
7941FNIEMOP_DEF(iemOp_seg_SS)
7942{
7943 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7944 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
7945 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
7946
7947 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7948 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7949}
7950
7951
7952/** Opcode 0x37. */
7953FNIEMOP_STUB(iemOp_aaa);
7954
7955
7956/** Opcode 0x38. */
7957FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7958{
7959 IEMOP_MNEMONIC("cmp Eb,Gb");
7960 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7961}
7962
7963
7964/** Opcode 0x39. */
7965FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7966{
7967 IEMOP_MNEMONIC("cmp Ev,Gv");
7968 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7969}
7970
7971
7972/** Opcode 0x3a. */
7973FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7974{
7975 IEMOP_MNEMONIC("cmp Gb,Eb");
7976 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7977}
7978
7979
7980/** Opcode 0x3b. */
7981FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7982{
7983 IEMOP_MNEMONIC("cmp Gv,Ev");
7984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7985}
7986
7987
7988/** Opcode 0x3c. */
7989FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7990{
7991 IEMOP_MNEMONIC("cmp al,Ib");
7992 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7993}
7994
7995
7996/** Opcode 0x3d. */
7997FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7998{
7999 IEMOP_MNEMONIC("cmp rAX,Iz");
8000 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8001}
8002
8003
8004/** Opcode 0x3e. */
8005FNIEMOP_DEF(iemOp_seg_DS)
8006{
8007 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8008 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8009 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8010
8011 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8012 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8013}
8014
8015
8016/** Opcode 0x3f. */
8017FNIEMOP_STUB(iemOp_aas);
8018
8019/**
8020 * Common 'inc/dec/not/neg register' helper.
8021 */
8022FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8023{
8024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8025 switch (pVCpu->iem.s.enmEffOpSize)
8026 {
8027 case IEMMODE_16BIT:
8028 IEM_MC_BEGIN(2, 0);
8029 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8030 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8031 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8032 IEM_MC_REF_EFLAGS(pEFlags);
8033 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8034 IEM_MC_ADVANCE_RIP();
8035 IEM_MC_END();
8036 return VINF_SUCCESS;
8037
8038 case IEMMODE_32BIT:
8039 IEM_MC_BEGIN(2, 0);
8040 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8041 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8042 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8043 IEM_MC_REF_EFLAGS(pEFlags);
8044 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8045 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8046 IEM_MC_ADVANCE_RIP();
8047 IEM_MC_END();
8048 return VINF_SUCCESS;
8049
8050 case IEMMODE_64BIT:
8051 IEM_MC_BEGIN(2, 0);
8052 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8053 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8054 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8055 IEM_MC_REF_EFLAGS(pEFlags);
8056 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8057 IEM_MC_ADVANCE_RIP();
8058 IEM_MC_END();
8059 return VINF_SUCCESS;
8060 }
8061 return VINF_SUCCESS;
8062}
8063
8064
8065/** Opcode 0x40. */
8066FNIEMOP_DEF(iemOp_inc_eAX)
8067{
8068 /*
8069 * This is a REX prefix in 64-bit mode.
8070 */
8071 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8072 {
8073 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8074 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8075
8076 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8077 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8078 }
8079
8080 IEMOP_MNEMONIC("inc eAX");
8081 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8082}
8083
8084
8085/** Opcode 0x41. */
8086FNIEMOP_DEF(iemOp_inc_eCX)
8087{
8088 /*
8089 * This is a REX prefix in 64-bit mode.
8090 */
8091 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8092 {
8093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8094 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8095 pVCpu->iem.s.uRexB = 1 << 3;
8096
8097 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8098 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8099 }
8100
8101 IEMOP_MNEMONIC("inc eCX");
8102 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8103}
8104
8105
8106/** Opcode 0x42. */
8107FNIEMOP_DEF(iemOp_inc_eDX)
8108{
8109 /*
8110 * This is a REX prefix in 64-bit mode.
8111 */
8112 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8113 {
8114 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8116 pVCpu->iem.s.uRexIndex = 1 << 3;
8117
8118 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8119 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8120 }
8121
8122 IEMOP_MNEMONIC("inc eDX");
8123 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8124}
8125
8126
8127
8128/** Opcode 0x43. */
8129FNIEMOP_DEF(iemOp_inc_eBX)
8130{
8131 /*
8132 * This is a REX prefix in 64-bit mode.
8133 */
8134 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8135 {
8136 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8137 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8138 pVCpu->iem.s.uRexB = 1 << 3;
8139 pVCpu->iem.s.uRexIndex = 1 << 3;
8140
8141 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8142 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8143 }
8144
8145 IEMOP_MNEMONIC("inc eBX");
8146 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8147}
8148
8149
8150/** Opcode 0x44. */
8151FNIEMOP_DEF(iemOp_inc_eSP)
8152{
8153 /*
8154 * This is a REX prefix in 64-bit mode.
8155 */
8156 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8157 {
8158 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8159 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8160 pVCpu->iem.s.uRexReg = 1 << 3;
8161
8162 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8163 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8164 }
8165
8166 IEMOP_MNEMONIC("inc eSP");
8167 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8168}
8169
8170
8171/** Opcode 0x45. */
8172FNIEMOP_DEF(iemOp_inc_eBP)
8173{
8174 /*
8175 * This is a REX prefix in 64-bit mode.
8176 */
8177 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8178 {
8179 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8180 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8181 pVCpu->iem.s.uRexReg = 1 << 3;
8182 pVCpu->iem.s.uRexB = 1 << 3;
8183
8184 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8185 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8186 }
8187
8188 IEMOP_MNEMONIC("inc eBP");
8189 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8190}
8191
8192
8193/** Opcode 0x46. */
8194FNIEMOP_DEF(iemOp_inc_eSI)
8195{
8196 /*
8197 * This is a REX prefix in 64-bit mode.
8198 */
8199 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8200 {
8201 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8202 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8203 pVCpu->iem.s.uRexReg = 1 << 3;
8204 pVCpu->iem.s.uRexIndex = 1 << 3;
8205
8206 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8207 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8208 }
8209
8210 IEMOP_MNEMONIC("inc eSI");
8211 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8212}
8213
8214
8215/** Opcode 0x47. */
8216FNIEMOP_DEF(iemOp_inc_eDI)
8217{
8218 /*
8219 * This is a REX prefix in 64-bit mode.
8220 */
8221 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8222 {
8223 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8224 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8225 pVCpu->iem.s.uRexReg = 1 << 3;
8226 pVCpu->iem.s.uRexB = 1 << 3;
8227 pVCpu->iem.s.uRexIndex = 1 << 3;
8228
8229 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8230 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8231 }
8232
8233 IEMOP_MNEMONIC("inc eDI");
8234 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8235}
8236
8237
8238/** Opcode 0x48. */
8239FNIEMOP_DEF(iemOp_dec_eAX)
8240{
8241 /*
8242 * This is a REX prefix in 64-bit mode.
8243 */
8244 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8245 {
8246 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8247 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8248 iemRecalEffOpSize(pVCpu);
8249
8250 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8251 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8252 }
8253
8254 IEMOP_MNEMONIC("dec eAX");
8255 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8256}
8257
8258
8259/** Opcode 0x49. */
8260FNIEMOP_DEF(iemOp_dec_eCX)
8261{
8262 /*
8263 * This is a REX prefix in 64-bit mode.
8264 */
8265 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8266 {
8267 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8268 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8269 pVCpu->iem.s.uRexB = 1 << 3;
8270 iemRecalEffOpSize(pVCpu);
8271
8272 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8273 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8274 }
8275
8276 IEMOP_MNEMONIC("dec eCX");
8277 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8278}
8279
8280
8281/** Opcode 0x4a. */
8282FNIEMOP_DEF(iemOp_dec_eDX)
8283{
8284 /*
8285 * This is a REX prefix in 64-bit mode.
8286 */
8287 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8288 {
8289 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8290 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8291 pVCpu->iem.s.uRexIndex = 1 << 3;
8292 iemRecalEffOpSize(pVCpu);
8293
8294 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8295 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8296 }
8297
8298 IEMOP_MNEMONIC("dec eDX");
8299 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8300}
8301
8302
8303/** Opcode 0x4b. */
8304FNIEMOP_DEF(iemOp_dec_eBX)
8305{
8306 /*
8307 * This is a REX prefix in 64-bit mode.
8308 */
8309 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8310 {
8311 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8312 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8313 pVCpu->iem.s.uRexB = 1 << 3;
8314 pVCpu->iem.s.uRexIndex = 1 << 3;
8315 iemRecalEffOpSize(pVCpu);
8316
8317 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8318 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8319 }
8320
8321 IEMOP_MNEMONIC("dec eBX");
8322 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8323}
8324
8325
8326/** Opcode 0x4c. */
8327FNIEMOP_DEF(iemOp_dec_eSP)
8328{
8329 /*
8330 * This is a REX prefix in 64-bit mode.
8331 */
8332 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8333 {
8334 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8335 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8336 pVCpu->iem.s.uRexReg = 1 << 3;
8337 iemRecalEffOpSize(pVCpu);
8338
8339 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8340 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8341 }
8342
8343 IEMOP_MNEMONIC("dec eSP");
8344 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8345}
8346
8347
8348/** Opcode 0x4d. */
8349FNIEMOP_DEF(iemOp_dec_eBP)
8350{
8351 /*
8352 * This is a REX prefix in 64-bit mode.
8353 */
8354 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8355 {
8356 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8357 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8358 pVCpu->iem.s.uRexReg = 1 << 3;
8359 pVCpu->iem.s.uRexB = 1 << 3;
8360 iemRecalEffOpSize(pVCpu);
8361
8362 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8363 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8364 }
8365
8366 IEMOP_MNEMONIC("dec eBP");
8367 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8368}
8369
8370
8371/** Opcode 0x4e. */
8372FNIEMOP_DEF(iemOp_dec_eSI)
8373{
8374 /*
8375 * This is a REX prefix in 64-bit mode.
8376 */
8377 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8378 {
8379 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8380 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8381 pVCpu->iem.s.uRexReg = 1 << 3;
8382 pVCpu->iem.s.uRexIndex = 1 << 3;
8383 iemRecalEffOpSize(pVCpu);
8384
8385 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8386 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8387 }
8388
8389 IEMOP_MNEMONIC("dec eSI");
8390 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8391}
8392
8393
8394/** Opcode 0x4f. */
8395FNIEMOP_DEF(iemOp_dec_eDI)
8396{
8397 /*
8398 * This is a REX prefix in 64-bit mode.
8399 */
8400 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8401 {
8402 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8403 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8404 pVCpu->iem.s.uRexReg = 1 << 3;
8405 pVCpu->iem.s.uRexB = 1 << 3;
8406 pVCpu->iem.s.uRexIndex = 1 << 3;
8407 iemRecalEffOpSize(pVCpu);
8408
8409 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8410 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8411 }
8412
8413 IEMOP_MNEMONIC("dec eDI");
8414 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8415}
8416
8417
8418/**
8419 * Common 'push register' helper.
8420 */
8421FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8422{
8423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8424 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8425 {
8426 iReg |= pVCpu->iem.s.uRexB;
8427 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8428 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8429 }
8430
8431 switch (pVCpu->iem.s.enmEffOpSize)
8432 {
8433 case IEMMODE_16BIT:
8434 IEM_MC_BEGIN(0, 1);
8435 IEM_MC_LOCAL(uint16_t, u16Value);
8436 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8437 IEM_MC_PUSH_U16(u16Value);
8438 IEM_MC_ADVANCE_RIP();
8439 IEM_MC_END();
8440 break;
8441
8442 case IEMMODE_32BIT:
8443 IEM_MC_BEGIN(0, 1);
8444 IEM_MC_LOCAL(uint32_t, u32Value);
8445 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8446 IEM_MC_PUSH_U32(u32Value);
8447 IEM_MC_ADVANCE_RIP();
8448 IEM_MC_END();
8449 break;
8450
8451 case IEMMODE_64BIT:
8452 IEM_MC_BEGIN(0, 1);
8453 IEM_MC_LOCAL(uint64_t, u64Value);
8454 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8455 IEM_MC_PUSH_U64(u64Value);
8456 IEM_MC_ADVANCE_RIP();
8457 IEM_MC_END();
8458 break;
8459 }
8460
8461 return VINF_SUCCESS;
8462}
8463
8464
8465/** Opcode 0x50. */
8466FNIEMOP_DEF(iemOp_push_eAX)
8467{
8468 IEMOP_MNEMONIC("push rAX");
8469 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8470}
8471
8472
8473/** Opcode 0x51. */
8474FNIEMOP_DEF(iemOp_push_eCX)
8475{
8476 IEMOP_MNEMONIC("push rCX");
8477 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8478}
8479
8480
8481/** Opcode 0x52. */
8482FNIEMOP_DEF(iemOp_push_eDX)
8483{
8484 IEMOP_MNEMONIC("push rDX");
8485 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8486}
8487
8488
8489/** Opcode 0x53. */
8490FNIEMOP_DEF(iemOp_push_eBX)
8491{
8492 IEMOP_MNEMONIC("push rBX");
8493 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8494}
8495
8496
8497/** Opcode 0x54. */
8498FNIEMOP_DEF(iemOp_push_eSP)
8499{
8500 IEMOP_MNEMONIC("push rSP");
8501 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
8502 {
8503 IEM_MC_BEGIN(0, 1);
8504 IEM_MC_LOCAL(uint16_t, u16Value);
8505 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8506 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8507 IEM_MC_PUSH_U16(u16Value);
8508 IEM_MC_ADVANCE_RIP();
8509 IEM_MC_END();
8510 }
8511 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8512}
8513
8514
8515/** Opcode 0x55. */
8516FNIEMOP_DEF(iemOp_push_eBP)
8517{
8518 IEMOP_MNEMONIC("push rBP");
8519 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8520}
8521
8522
8523/** Opcode 0x56. */
8524FNIEMOP_DEF(iemOp_push_eSI)
8525{
8526 IEMOP_MNEMONIC("push rSI");
8527 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8528}
8529
8530
8531/** Opcode 0x57. */
8532FNIEMOP_DEF(iemOp_push_eDI)
8533{
8534 IEMOP_MNEMONIC("push rDI");
8535 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8536}
8537
8538
8539/**
8540 * Common 'pop register' helper.
8541 */
8542FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8543{
8544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8545 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8546 {
8547 iReg |= pVCpu->iem.s.uRexB;
8548 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8549 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8550 }
8551
8552 switch (pVCpu->iem.s.enmEffOpSize)
8553 {
8554 case IEMMODE_16BIT:
8555 IEM_MC_BEGIN(0, 1);
8556 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8557 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8558 IEM_MC_POP_U16(pu16Dst);
8559 IEM_MC_ADVANCE_RIP();
8560 IEM_MC_END();
8561 break;
8562
8563 case IEMMODE_32BIT:
8564 IEM_MC_BEGIN(0, 1);
8565 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8566 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8567 IEM_MC_POP_U32(pu32Dst);
8568 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8569 IEM_MC_ADVANCE_RIP();
8570 IEM_MC_END();
8571 break;
8572
8573 case IEMMODE_64BIT:
8574 IEM_MC_BEGIN(0, 1);
8575 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8576 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8577 IEM_MC_POP_U64(pu64Dst);
8578 IEM_MC_ADVANCE_RIP();
8579 IEM_MC_END();
8580 break;
8581 }
8582
8583 return VINF_SUCCESS;
8584}
8585
8586
8587/** Opcode 0x58. */
8588FNIEMOP_DEF(iemOp_pop_eAX)
8589{
8590 IEMOP_MNEMONIC("pop rAX");
8591 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8592}
8593
8594
8595/** Opcode 0x59. */
8596FNIEMOP_DEF(iemOp_pop_eCX)
8597{
8598 IEMOP_MNEMONIC("pop rCX");
8599 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8600}
8601
8602
8603/** Opcode 0x5a. */
8604FNIEMOP_DEF(iemOp_pop_eDX)
8605{
8606 IEMOP_MNEMONIC("pop rDX");
8607 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8608}
8609
8610
8611/** Opcode 0x5b. */
8612FNIEMOP_DEF(iemOp_pop_eBX)
8613{
8614 IEMOP_MNEMONIC("pop rBX");
8615 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8616}
8617
8618
8619/** Opcode 0x5c. */
8620FNIEMOP_DEF(iemOp_pop_eSP)
8621{
8622 IEMOP_MNEMONIC("pop rSP");
8623 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8624 {
8625 if (pVCpu->iem.s.uRexB)
8626 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8627 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8628 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8629 }
8630
8631 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8632 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8633 /** @todo add testcase for this instruction. */
8634 switch (pVCpu->iem.s.enmEffOpSize)
8635 {
8636 case IEMMODE_16BIT:
8637 IEM_MC_BEGIN(0, 1);
8638 IEM_MC_LOCAL(uint16_t, u16Dst);
8639 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8640 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8641 IEM_MC_ADVANCE_RIP();
8642 IEM_MC_END();
8643 break;
8644
8645 case IEMMODE_32BIT:
8646 IEM_MC_BEGIN(0, 1);
8647 IEM_MC_LOCAL(uint32_t, u32Dst);
8648 IEM_MC_POP_U32(&u32Dst);
8649 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8650 IEM_MC_ADVANCE_RIP();
8651 IEM_MC_END();
8652 break;
8653
8654 case IEMMODE_64BIT:
8655 IEM_MC_BEGIN(0, 1);
8656 IEM_MC_LOCAL(uint64_t, u64Dst);
8657 IEM_MC_POP_U64(&u64Dst);
8658 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8659 IEM_MC_ADVANCE_RIP();
8660 IEM_MC_END();
8661 break;
8662 }
8663
8664 return VINF_SUCCESS;
8665}
8666
8667
8668/** Opcode 0x5d. */
8669FNIEMOP_DEF(iemOp_pop_eBP)
8670{
8671 IEMOP_MNEMONIC("pop rBP");
8672 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8673}
8674
8675
8676/** Opcode 0x5e. */
8677FNIEMOP_DEF(iemOp_pop_eSI)
8678{
8679 IEMOP_MNEMONIC("pop rSI");
8680 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8681}
8682
8683
8684/** Opcode 0x5f. */
8685FNIEMOP_DEF(iemOp_pop_eDI)
8686{
8687 IEMOP_MNEMONIC("pop rDI");
8688 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8689}
8690
8691
8692/** Opcode 0x60. */
8693FNIEMOP_DEF(iemOp_pusha)
8694{
8695 IEMOP_MNEMONIC("pusha");
8696 IEMOP_HLP_MIN_186();
8697 IEMOP_HLP_NO_64BIT();
8698 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8699 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8700 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8701 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8702}
8703
8704
8705/** Opcode 0x61. */
8706FNIEMOP_DEF(iemOp_popa)
8707{
8708 IEMOP_MNEMONIC("popa");
8709 IEMOP_HLP_MIN_186();
8710 IEMOP_HLP_NO_64BIT();
8711 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8712 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8713 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8714 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8715}
8716
8717
8718/** Opcode 0x62. */
8719FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8720// IEMOP_HLP_MIN_186();
8721
8722
8723/** Opcode 0x63 - non-64-bit modes. */
8724FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8725{
8726 IEMOP_MNEMONIC("arpl Ew,Gw");
8727 IEMOP_HLP_MIN_286();
8728 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8730
8731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8732 {
8733 /* Register */
8734 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8735 IEM_MC_BEGIN(3, 0);
8736 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8737 IEM_MC_ARG(uint16_t, u16Src, 1);
8738 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8739
8740 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8741 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8742 IEM_MC_REF_EFLAGS(pEFlags);
8743 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8744
8745 IEM_MC_ADVANCE_RIP();
8746 IEM_MC_END();
8747 }
8748 else
8749 {
8750 /* Memory */
8751 IEM_MC_BEGIN(3, 2);
8752 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8753 IEM_MC_ARG(uint16_t, u16Src, 1);
8754 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8756
8757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8758 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8759 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8760 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8761 IEM_MC_FETCH_EFLAGS(EFlags);
8762 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8763
8764 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8765 IEM_MC_COMMIT_EFLAGS(EFlags);
8766 IEM_MC_ADVANCE_RIP();
8767 IEM_MC_END();
8768 }
8769 return VINF_SUCCESS;
8770
8771}
8772
8773
8774/** Opcode 0x63.
8775 * @note This is a weird one. It works like a regular move instruction if
8776 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8777 * @todo This definitely needs a testcase to verify the odd cases. */
8778FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8779{
8780 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8781
8782 IEMOP_MNEMONIC("movsxd Gv,Ev");
8783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8784
8785 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8786 {
8787 /*
8788 * Register to register.
8789 */
8790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8791 IEM_MC_BEGIN(0, 1);
8792 IEM_MC_LOCAL(uint64_t, u64Value);
8793 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8794 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8795 IEM_MC_ADVANCE_RIP();
8796 IEM_MC_END();
8797 }
8798 else
8799 {
8800 /*
8801 * We're loading a register from memory.
8802 */
8803 IEM_MC_BEGIN(0, 2);
8804 IEM_MC_LOCAL(uint64_t, u64Value);
8805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8808 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8809 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8810 IEM_MC_ADVANCE_RIP();
8811 IEM_MC_END();
8812 }
8813 return VINF_SUCCESS;
8814}
8815
8816
8817/** Opcode 0x64. */
8818FNIEMOP_DEF(iemOp_seg_FS)
8819{
8820 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8821 IEMOP_HLP_MIN_386();
8822
8823 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
8824 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
8825
8826 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8827 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8828}
8829
8830
8831/** Opcode 0x65. */
8832FNIEMOP_DEF(iemOp_seg_GS)
8833{
8834 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8835 IEMOP_HLP_MIN_386();
8836
8837 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
8838 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
8839
8840 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8841 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8842}
8843
8844
8845/** Opcode 0x66. */
8846FNIEMOP_DEF(iemOp_op_size)
8847{
8848 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8849 IEMOP_HLP_MIN_386();
8850
8851 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
8852 iemRecalEffOpSize(pVCpu);
8853
8854 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8855 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8856}
8857
8858
8859/** Opcode 0x67. */
8860FNIEMOP_DEF(iemOp_addr_size)
8861{
8862 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8863 IEMOP_HLP_MIN_386();
8864
8865 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8866 switch (pVCpu->iem.s.enmDefAddrMode)
8867 {
8868 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8869 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
8870 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8871 default: AssertFailed();
8872 }
8873
8874 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8875 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8876}
8877
8878
8879/** Opcode 0x68. */
8880FNIEMOP_DEF(iemOp_push_Iz)
8881{
8882 IEMOP_MNEMONIC("push Iz");
8883 IEMOP_HLP_MIN_186();
8884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8885 switch (pVCpu->iem.s.enmEffOpSize)
8886 {
8887 case IEMMODE_16BIT:
8888 {
8889 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891 IEM_MC_BEGIN(0,0);
8892 IEM_MC_PUSH_U16(u16Imm);
8893 IEM_MC_ADVANCE_RIP();
8894 IEM_MC_END();
8895 return VINF_SUCCESS;
8896 }
8897
8898 case IEMMODE_32BIT:
8899 {
8900 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8902 IEM_MC_BEGIN(0,0);
8903 IEM_MC_PUSH_U32(u32Imm);
8904 IEM_MC_ADVANCE_RIP();
8905 IEM_MC_END();
8906 return VINF_SUCCESS;
8907 }
8908
8909 case IEMMODE_64BIT:
8910 {
8911 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8913 IEM_MC_BEGIN(0,0);
8914 IEM_MC_PUSH_U64(u64Imm);
8915 IEM_MC_ADVANCE_RIP();
8916 IEM_MC_END();
8917 return VINF_SUCCESS;
8918 }
8919
8920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8921 }
8922}
8923
8924
8925/** Opcode 0x69. */
8926FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8927{
8928 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8929 IEMOP_HLP_MIN_186();
8930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8932
8933 switch (pVCpu->iem.s.enmEffOpSize)
8934 {
8935 case IEMMODE_16BIT:
8936 {
8937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8938 {
8939 /* register operand */
8940 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8942
8943 IEM_MC_BEGIN(3, 1);
8944 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8945 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8946 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8947 IEM_MC_LOCAL(uint16_t, u16Tmp);
8948
8949 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8950 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8951 IEM_MC_REF_EFLAGS(pEFlags);
8952 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8953 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
8954
8955 IEM_MC_ADVANCE_RIP();
8956 IEM_MC_END();
8957 }
8958 else
8959 {
8960 /* memory operand */
8961 IEM_MC_BEGIN(3, 2);
8962 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8963 IEM_MC_ARG(uint16_t, u16Src, 1);
8964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8965 IEM_MC_LOCAL(uint16_t, u16Tmp);
8966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8967
8968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8969 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8970 IEM_MC_ASSIGN(u16Src, u16Imm);
8971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8972 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8973 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8974 IEM_MC_REF_EFLAGS(pEFlags);
8975 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8976 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
8977
8978 IEM_MC_ADVANCE_RIP();
8979 IEM_MC_END();
8980 }
8981 return VINF_SUCCESS;
8982 }
8983
8984 case IEMMODE_32BIT:
8985 {
8986 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8987 {
8988 /* register operand */
8989 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8991
8992 IEM_MC_BEGIN(3, 1);
8993 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8994 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8995 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8996 IEM_MC_LOCAL(uint32_t, u32Tmp);
8997
8998 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8999 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9000 IEM_MC_REF_EFLAGS(pEFlags);
9001 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9002 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9003
9004 IEM_MC_ADVANCE_RIP();
9005 IEM_MC_END();
9006 }
9007 else
9008 {
9009 /* memory operand */
9010 IEM_MC_BEGIN(3, 2);
9011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9012 IEM_MC_ARG(uint32_t, u32Src, 1);
9013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9014 IEM_MC_LOCAL(uint32_t, u32Tmp);
9015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9016
9017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9018 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9019 IEM_MC_ASSIGN(u32Src, u32Imm);
9020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9021 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9022 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9023 IEM_MC_REF_EFLAGS(pEFlags);
9024 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9025 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9026
9027 IEM_MC_ADVANCE_RIP();
9028 IEM_MC_END();
9029 }
9030 return VINF_SUCCESS;
9031 }
9032
9033 case IEMMODE_64BIT:
9034 {
9035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9036 {
9037 /* register operand */
9038 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9040
9041 IEM_MC_BEGIN(3, 1);
9042 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9043 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9044 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9045 IEM_MC_LOCAL(uint64_t, u64Tmp);
9046
9047 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9048 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9049 IEM_MC_REF_EFLAGS(pEFlags);
9050 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9051 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9052
9053 IEM_MC_ADVANCE_RIP();
9054 IEM_MC_END();
9055 }
9056 else
9057 {
9058 /* memory operand */
9059 IEM_MC_BEGIN(3, 2);
9060 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9061 IEM_MC_ARG(uint64_t, u64Src, 1);
9062 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9063 IEM_MC_LOCAL(uint64_t, u64Tmp);
9064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9065
9066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9067 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9068 IEM_MC_ASSIGN(u64Src, u64Imm);
9069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9070 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9071 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9072 IEM_MC_REF_EFLAGS(pEFlags);
9073 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9074 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9075
9076 IEM_MC_ADVANCE_RIP();
9077 IEM_MC_END();
9078 }
9079 return VINF_SUCCESS;
9080 }
9081 }
9082 AssertFailedReturn(VERR_IEM_IPE_9);
9083}
9084
9085
9086/** Opcode 0x6a. */
9087FNIEMOP_DEF(iemOp_push_Ib)
9088{
9089 IEMOP_MNEMONIC("push Ib");
9090 IEMOP_HLP_MIN_186();
9091 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9093 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9094
9095 IEM_MC_BEGIN(0,0);
9096 switch (pVCpu->iem.s.enmEffOpSize)
9097 {
9098 case IEMMODE_16BIT:
9099 IEM_MC_PUSH_U16(i8Imm);
9100 break;
9101 case IEMMODE_32BIT:
9102 IEM_MC_PUSH_U32(i8Imm);
9103 break;
9104 case IEMMODE_64BIT:
9105 IEM_MC_PUSH_U64(i8Imm);
9106 break;
9107 }
9108 IEM_MC_ADVANCE_RIP();
9109 IEM_MC_END();
9110 return VINF_SUCCESS;
9111}
9112
9113
9114/** Opcode 0x6b. */
9115FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9116{
9117 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9118 IEMOP_HLP_MIN_186();
9119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9120 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9121
9122 switch (pVCpu->iem.s.enmEffOpSize)
9123 {
9124 case IEMMODE_16BIT:
9125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9126 {
9127 /* register operand */
9128 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9130
9131 IEM_MC_BEGIN(3, 1);
9132 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9133 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9134 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9135 IEM_MC_LOCAL(uint16_t, u16Tmp);
9136
9137 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9138 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9139 IEM_MC_REF_EFLAGS(pEFlags);
9140 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9141 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9142
9143 IEM_MC_ADVANCE_RIP();
9144 IEM_MC_END();
9145 }
9146 else
9147 {
9148 /* memory operand */
9149 IEM_MC_BEGIN(3, 2);
9150 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9151 IEM_MC_ARG(uint16_t, u16Src, 1);
9152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9153 IEM_MC_LOCAL(uint16_t, u16Tmp);
9154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9155
9156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9157 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9158 IEM_MC_ASSIGN(u16Src, u16Imm);
9159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9160 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9161 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9162 IEM_MC_REF_EFLAGS(pEFlags);
9163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9164 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9165
9166 IEM_MC_ADVANCE_RIP();
9167 IEM_MC_END();
9168 }
9169 return VINF_SUCCESS;
9170
9171 case IEMMODE_32BIT:
9172 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9173 {
9174 /* register operand */
9175 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9177
9178 IEM_MC_BEGIN(3, 1);
9179 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9180 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9182 IEM_MC_LOCAL(uint32_t, u32Tmp);
9183
9184 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9185 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9186 IEM_MC_REF_EFLAGS(pEFlags);
9187 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9188 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9189
9190 IEM_MC_ADVANCE_RIP();
9191 IEM_MC_END();
9192 }
9193 else
9194 {
9195 /* memory operand */
9196 IEM_MC_BEGIN(3, 2);
9197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9198 IEM_MC_ARG(uint32_t, u32Src, 1);
9199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9200 IEM_MC_LOCAL(uint32_t, u32Tmp);
9201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9202
9203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9204 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9205 IEM_MC_ASSIGN(u32Src, u32Imm);
9206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9207 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9208 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9209 IEM_MC_REF_EFLAGS(pEFlags);
9210 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9211 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9212
9213 IEM_MC_ADVANCE_RIP();
9214 IEM_MC_END();
9215 }
9216 return VINF_SUCCESS;
9217
9218 case IEMMODE_64BIT:
9219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9220 {
9221 /* register operand */
9222 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9224
9225 IEM_MC_BEGIN(3, 1);
9226 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9227 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9228 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9229 IEM_MC_LOCAL(uint64_t, u64Tmp);
9230
9231 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9232 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9233 IEM_MC_REF_EFLAGS(pEFlags);
9234 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9235 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9236
9237 IEM_MC_ADVANCE_RIP();
9238 IEM_MC_END();
9239 }
9240 else
9241 {
9242 /* memory operand */
9243 IEM_MC_BEGIN(3, 2);
9244 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9245 IEM_MC_ARG(uint64_t, u64Src, 1);
9246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9247 IEM_MC_LOCAL(uint64_t, u64Tmp);
9248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9249
9250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9251 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9252 IEM_MC_ASSIGN(u64Src, u64Imm);
9253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9254 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9255 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9256 IEM_MC_REF_EFLAGS(pEFlags);
9257 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9258 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9259
9260 IEM_MC_ADVANCE_RIP();
9261 IEM_MC_END();
9262 }
9263 return VINF_SUCCESS;
9264 }
9265 AssertFailedReturn(VERR_IEM_IPE_8);
9266}
9267
9268
9269/** Opcode 0x6c. */
9270FNIEMOP_DEF(iemOp_insb_Yb_DX)
9271{
9272 IEMOP_HLP_MIN_186();
9273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9274 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9275 {
9276 IEMOP_MNEMONIC("rep ins Yb,DX");
9277 switch (pVCpu->iem.s.enmEffAddrMode)
9278 {
9279 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9280 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9281 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9283 }
9284 }
9285 else
9286 {
9287 IEMOP_MNEMONIC("ins Yb,DX");
9288 switch (pVCpu->iem.s.enmEffAddrMode)
9289 {
9290 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9291 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9292 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9294 }
9295 }
9296}
9297
9298
9299/** Opcode 0x6d. */
9300FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9301{
9302 IEMOP_HLP_MIN_186();
9303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9304 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9305 {
9306 IEMOP_MNEMONIC("rep ins Yv,DX");
9307 switch (pVCpu->iem.s.enmEffOpSize)
9308 {
9309 case IEMMODE_16BIT:
9310 switch (pVCpu->iem.s.enmEffAddrMode)
9311 {
9312 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9313 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9314 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9316 }
9317 break;
9318 case IEMMODE_64BIT:
9319 case IEMMODE_32BIT:
9320 switch (pVCpu->iem.s.enmEffAddrMode)
9321 {
9322 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9323 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9324 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9326 }
9327 break;
9328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9329 }
9330 }
9331 else
9332 {
9333 IEMOP_MNEMONIC("ins Yv,DX");
9334 switch (pVCpu->iem.s.enmEffOpSize)
9335 {
9336 case IEMMODE_16BIT:
9337 switch (pVCpu->iem.s.enmEffAddrMode)
9338 {
9339 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9340 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9341 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9343 }
9344 break;
9345 case IEMMODE_64BIT:
9346 case IEMMODE_32BIT:
9347 switch (pVCpu->iem.s.enmEffAddrMode)
9348 {
9349 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9350 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9351 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9353 }
9354 break;
9355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9356 }
9357 }
9358}
9359
9360
9361/** Opcode 0x6e. */
9362FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9363{
9364 IEMOP_HLP_MIN_186();
9365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9366 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9367 {
9368 IEMOP_MNEMONIC("rep outs DX,Yb");
9369 switch (pVCpu->iem.s.enmEffAddrMode)
9370 {
9371 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9372 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9373 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9375 }
9376 }
9377 else
9378 {
9379 IEMOP_MNEMONIC("outs DX,Yb");
9380 switch (pVCpu->iem.s.enmEffAddrMode)
9381 {
9382 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9383 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9384 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9386 }
9387 }
9388}
9389
9390
9391/** Opcode 0x6f. */
9392FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9393{
9394 IEMOP_HLP_MIN_186();
9395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9396 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9397 {
9398 IEMOP_MNEMONIC("rep outs DX,Yv");
9399 switch (pVCpu->iem.s.enmEffOpSize)
9400 {
9401 case IEMMODE_16BIT:
9402 switch (pVCpu->iem.s.enmEffAddrMode)
9403 {
9404 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9405 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9406 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9408 }
9409 break;
9410 case IEMMODE_64BIT:
9411 case IEMMODE_32BIT:
9412 switch (pVCpu->iem.s.enmEffAddrMode)
9413 {
9414 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9415 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9416 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9418 }
9419 break;
9420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9421 }
9422 }
9423 else
9424 {
9425 IEMOP_MNEMONIC("outs DX,Yv");
9426 switch (pVCpu->iem.s.enmEffOpSize)
9427 {
9428 case IEMMODE_16BIT:
9429 switch (pVCpu->iem.s.enmEffAddrMode)
9430 {
9431 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9432 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9433 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9435 }
9436 break;
9437 case IEMMODE_64BIT:
9438 case IEMMODE_32BIT:
9439 switch (pVCpu->iem.s.enmEffAddrMode)
9440 {
9441 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9442 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9443 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9445 }
9446 break;
9447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9448 }
9449 }
9450}
9451
9452
9453/** Opcode 0x70. */
9454FNIEMOP_DEF(iemOp_jo_Jb)
9455{
9456 IEMOP_MNEMONIC("jo Jb");
9457 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9460
9461 IEM_MC_BEGIN(0, 0);
9462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9463 IEM_MC_REL_JMP_S8(i8Imm);
9464 } IEM_MC_ELSE() {
9465 IEM_MC_ADVANCE_RIP();
9466 } IEM_MC_ENDIF();
9467 IEM_MC_END();
9468 return VINF_SUCCESS;
9469}
9470
9471
9472/** Opcode 0x71. */
9473FNIEMOP_DEF(iemOp_jno_Jb)
9474{
9475 IEMOP_MNEMONIC("jno Jb");
9476 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9478 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9479
9480 IEM_MC_BEGIN(0, 0);
9481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9482 IEM_MC_ADVANCE_RIP();
9483 } IEM_MC_ELSE() {
9484 IEM_MC_REL_JMP_S8(i8Imm);
9485 } IEM_MC_ENDIF();
9486 IEM_MC_END();
9487 return VINF_SUCCESS;
9488}
9489
9490/** Opcode 0x72. */
9491FNIEMOP_DEF(iemOp_jc_Jb)
9492{
9493 IEMOP_MNEMONIC("jc/jnae Jb");
9494 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9497
9498 IEM_MC_BEGIN(0, 0);
9499 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9500 IEM_MC_REL_JMP_S8(i8Imm);
9501 } IEM_MC_ELSE() {
9502 IEM_MC_ADVANCE_RIP();
9503 } IEM_MC_ENDIF();
9504 IEM_MC_END();
9505 return VINF_SUCCESS;
9506}
9507
9508
9509/** Opcode 0x73. */
9510FNIEMOP_DEF(iemOp_jnc_Jb)
9511{
9512 IEMOP_MNEMONIC("jnc/jnb Jb");
9513 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9515 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9516
9517 IEM_MC_BEGIN(0, 0);
9518 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9519 IEM_MC_ADVANCE_RIP();
9520 } IEM_MC_ELSE() {
9521 IEM_MC_REL_JMP_S8(i8Imm);
9522 } IEM_MC_ENDIF();
9523 IEM_MC_END();
9524 return VINF_SUCCESS;
9525}
9526
9527
9528/** Opcode 0x74. */
9529FNIEMOP_DEF(iemOp_je_Jb)
9530{
9531 IEMOP_MNEMONIC("je/jz Jb");
9532 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9535
9536 IEM_MC_BEGIN(0, 0);
9537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9538 IEM_MC_REL_JMP_S8(i8Imm);
9539 } IEM_MC_ELSE() {
9540 IEM_MC_ADVANCE_RIP();
9541 } IEM_MC_ENDIF();
9542 IEM_MC_END();
9543 return VINF_SUCCESS;
9544}
9545
9546
9547/** Opcode 0x75. */
9548FNIEMOP_DEF(iemOp_jne_Jb)
9549{
9550 IEMOP_MNEMONIC("jne/jnz Jb");
9551 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9554
9555 IEM_MC_BEGIN(0, 0);
9556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9557 IEM_MC_ADVANCE_RIP();
9558 } IEM_MC_ELSE() {
9559 IEM_MC_REL_JMP_S8(i8Imm);
9560 } IEM_MC_ENDIF();
9561 IEM_MC_END();
9562 return VINF_SUCCESS;
9563}
9564
9565
9566/** Opcode 0x76. */
9567FNIEMOP_DEF(iemOp_jbe_Jb)
9568{
9569 IEMOP_MNEMONIC("jbe/jna Jb");
9570 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9572 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9573
9574 IEM_MC_BEGIN(0, 0);
9575 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9576 IEM_MC_REL_JMP_S8(i8Imm);
9577 } IEM_MC_ELSE() {
9578 IEM_MC_ADVANCE_RIP();
9579 } IEM_MC_ENDIF();
9580 IEM_MC_END();
9581 return VINF_SUCCESS;
9582}
9583
9584
9585/** Opcode 0x77. */
9586FNIEMOP_DEF(iemOp_jnbe_Jb)
9587{
9588 IEMOP_MNEMONIC("jnbe/ja Jb");
9589 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9592
9593 IEM_MC_BEGIN(0, 0);
9594 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9595 IEM_MC_ADVANCE_RIP();
9596 } IEM_MC_ELSE() {
9597 IEM_MC_REL_JMP_S8(i8Imm);
9598 } IEM_MC_ENDIF();
9599 IEM_MC_END();
9600 return VINF_SUCCESS;
9601}
9602
9603
9604/** Opcode 0x78. */
9605FNIEMOP_DEF(iemOp_js_Jb)
9606{
9607 IEMOP_MNEMONIC("js Jb");
9608 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9610 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9611
9612 IEM_MC_BEGIN(0, 0);
9613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9614 IEM_MC_REL_JMP_S8(i8Imm);
9615 } IEM_MC_ELSE() {
9616 IEM_MC_ADVANCE_RIP();
9617 } IEM_MC_ENDIF();
9618 IEM_MC_END();
9619 return VINF_SUCCESS;
9620}
9621
9622
9623/** Opcode 0x79. */
9624FNIEMOP_DEF(iemOp_jns_Jb)
9625{
9626 IEMOP_MNEMONIC("jns Jb");
9627 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9629 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9630
9631 IEM_MC_BEGIN(0, 0);
9632 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9633 IEM_MC_ADVANCE_RIP();
9634 } IEM_MC_ELSE() {
9635 IEM_MC_REL_JMP_S8(i8Imm);
9636 } IEM_MC_ENDIF();
9637 IEM_MC_END();
9638 return VINF_SUCCESS;
9639}
9640
9641
9642/** Opcode 0x7a. */
9643FNIEMOP_DEF(iemOp_jp_Jb)
9644{
9645 IEMOP_MNEMONIC("jp Jb");
9646 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9648 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9649
9650 IEM_MC_BEGIN(0, 0);
9651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9652 IEM_MC_REL_JMP_S8(i8Imm);
9653 } IEM_MC_ELSE() {
9654 IEM_MC_ADVANCE_RIP();
9655 } IEM_MC_ENDIF();
9656 IEM_MC_END();
9657 return VINF_SUCCESS;
9658}
9659
9660
9661/** Opcode 0x7b. */
9662FNIEMOP_DEF(iemOp_jnp_Jb)
9663{
9664 IEMOP_MNEMONIC("jnp Jb");
9665 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9667 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9668
9669 IEM_MC_BEGIN(0, 0);
9670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9671 IEM_MC_ADVANCE_RIP();
9672 } IEM_MC_ELSE() {
9673 IEM_MC_REL_JMP_S8(i8Imm);
9674 } IEM_MC_ENDIF();
9675 IEM_MC_END();
9676 return VINF_SUCCESS;
9677}
9678
9679
9680/** Opcode 0x7c. */
9681FNIEMOP_DEF(iemOp_jl_Jb)
9682{
9683 IEMOP_MNEMONIC("jl/jnge Jb");
9684 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9686 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9687
9688 IEM_MC_BEGIN(0, 0);
9689 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9690 IEM_MC_REL_JMP_S8(i8Imm);
9691 } IEM_MC_ELSE() {
9692 IEM_MC_ADVANCE_RIP();
9693 } IEM_MC_ENDIF();
9694 IEM_MC_END();
9695 return VINF_SUCCESS;
9696}
9697
9698
9699/** Opcode 0x7d. */
9700FNIEMOP_DEF(iemOp_jnl_Jb)
9701{
9702 IEMOP_MNEMONIC("jnl/jge Jb");
9703 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9705 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9706
9707 IEM_MC_BEGIN(0, 0);
9708 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9709 IEM_MC_ADVANCE_RIP();
9710 } IEM_MC_ELSE() {
9711 IEM_MC_REL_JMP_S8(i8Imm);
9712 } IEM_MC_ENDIF();
9713 IEM_MC_END();
9714 return VINF_SUCCESS;
9715}
9716
9717
9718/** Opcode 0x7e. */
9719FNIEMOP_DEF(iemOp_jle_Jb)
9720{
9721 IEMOP_MNEMONIC("jle/jng Jb");
9722 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9725
9726 IEM_MC_BEGIN(0, 0);
9727 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9728 IEM_MC_REL_JMP_S8(i8Imm);
9729 } IEM_MC_ELSE() {
9730 IEM_MC_ADVANCE_RIP();
9731 } IEM_MC_ENDIF();
9732 IEM_MC_END();
9733 return VINF_SUCCESS;
9734}
9735
9736
9737/** Opcode 0x7f. */
9738FNIEMOP_DEF(iemOp_jnle_Jb)
9739{
9740 IEMOP_MNEMONIC("jnle/jg Jb");
9741 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9743 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9744
9745 IEM_MC_BEGIN(0, 0);
9746 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9747 IEM_MC_ADVANCE_RIP();
9748 } IEM_MC_ELSE() {
9749 IEM_MC_REL_JMP_S8(i8Imm);
9750 } IEM_MC_ENDIF();
9751 IEM_MC_END();
9752 return VINF_SUCCESS;
9753}
9754
9755
9756/** Opcode 0x80. */
9757FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9758{
9759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9760 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9761 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9762
9763 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9764 {
9765 /* register target */
9766 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9768 IEM_MC_BEGIN(3, 0);
9769 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9770 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9771 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9772
9773 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9774 IEM_MC_REF_EFLAGS(pEFlags);
9775 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9776
9777 IEM_MC_ADVANCE_RIP();
9778 IEM_MC_END();
9779 }
9780 else
9781 {
9782 /* memory target */
9783 uint32_t fAccess;
9784 if (pImpl->pfnLockedU8)
9785 fAccess = IEM_ACCESS_DATA_RW;
9786 else /* CMP */
9787 fAccess = IEM_ACCESS_DATA_R;
9788 IEM_MC_BEGIN(3, 2);
9789 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9790 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9792
9793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9794 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9795 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9796 if (pImpl->pfnLockedU8)
9797 IEMOP_HLP_DONE_DECODING();
9798 else
9799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9800
9801 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9802 IEM_MC_FETCH_EFLAGS(EFlags);
9803 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9804 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9805 else
9806 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9807
9808 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9809 IEM_MC_COMMIT_EFLAGS(EFlags);
9810 IEM_MC_ADVANCE_RIP();
9811 IEM_MC_END();
9812 }
9813 return VINF_SUCCESS;
9814}
9815
9816
9817/** Opcode 0x81. */
9818FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9819{
9820 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9821 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9822 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9823
9824 switch (pVCpu->iem.s.enmEffOpSize)
9825 {
9826 case IEMMODE_16BIT:
9827 {
9828 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9829 {
9830 /* register target */
9831 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9833 IEM_MC_BEGIN(3, 0);
9834 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9835 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9836 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9837
9838 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9839 IEM_MC_REF_EFLAGS(pEFlags);
9840 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9841
9842 IEM_MC_ADVANCE_RIP();
9843 IEM_MC_END();
9844 }
9845 else
9846 {
9847 /* memory target */
9848 uint32_t fAccess;
9849 if (pImpl->pfnLockedU16)
9850 fAccess = IEM_ACCESS_DATA_RW;
9851 else /* CMP, TEST */
9852 fAccess = IEM_ACCESS_DATA_R;
9853 IEM_MC_BEGIN(3, 2);
9854 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9855 IEM_MC_ARG(uint16_t, u16Src, 1);
9856 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9858
9859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9860 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9861 IEM_MC_ASSIGN(u16Src, u16Imm);
9862 if (pImpl->pfnLockedU16)
9863 IEMOP_HLP_DONE_DECODING();
9864 else
9865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9866 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9867 IEM_MC_FETCH_EFLAGS(EFlags);
9868 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9869 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9870 else
9871 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9872
9873 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9874 IEM_MC_COMMIT_EFLAGS(EFlags);
9875 IEM_MC_ADVANCE_RIP();
9876 IEM_MC_END();
9877 }
9878 break;
9879 }
9880
9881 case IEMMODE_32BIT:
9882 {
9883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9884 {
9885 /* register target */
9886 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9888 IEM_MC_BEGIN(3, 0);
9889 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9890 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9891 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9892
9893 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9894 IEM_MC_REF_EFLAGS(pEFlags);
9895 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9896 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9897
9898 IEM_MC_ADVANCE_RIP();
9899 IEM_MC_END();
9900 }
9901 else
9902 {
9903 /* memory target */
9904 uint32_t fAccess;
9905 if (pImpl->pfnLockedU32)
9906 fAccess = IEM_ACCESS_DATA_RW;
9907 else /* CMP, TEST */
9908 fAccess = IEM_ACCESS_DATA_R;
9909 IEM_MC_BEGIN(3, 2);
9910 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9911 IEM_MC_ARG(uint32_t, u32Src, 1);
9912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9914
9915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9916 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9917 IEM_MC_ASSIGN(u32Src, u32Imm);
9918 if (pImpl->pfnLockedU32)
9919 IEMOP_HLP_DONE_DECODING();
9920 else
9921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9922 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9923 IEM_MC_FETCH_EFLAGS(EFlags);
9924 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9925 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9926 else
9927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9928
9929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9930 IEM_MC_COMMIT_EFLAGS(EFlags);
9931 IEM_MC_ADVANCE_RIP();
9932 IEM_MC_END();
9933 }
9934 break;
9935 }
9936
9937 case IEMMODE_64BIT:
9938 {
9939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9940 {
9941 /* register target */
9942 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9944 IEM_MC_BEGIN(3, 0);
9945 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9946 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9947 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9948
9949 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9950 IEM_MC_REF_EFLAGS(pEFlags);
9951 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9952
9953 IEM_MC_ADVANCE_RIP();
9954 IEM_MC_END();
9955 }
9956 else
9957 {
9958 /* memory target */
9959 uint32_t fAccess;
9960 if (pImpl->pfnLockedU64)
9961 fAccess = IEM_ACCESS_DATA_RW;
9962 else /* CMP */
9963 fAccess = IEM_ACCESS_DATA_R;
9964 IEM_MC_BEGIN(3, 2);
9965 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9966 IEM_MC_ARG(uint64_t, u64Src, 1);
9967 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9969
9970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9971 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9972 if (pImpl->pfnLockedU64)
9973 IEMOP_HLP_DONE_DECODING();
9974 else
9975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9976 IEM_MC_ASSIGN(u64Src, u64Imm);
9977 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9978 IEM_MC_FETCH_EFLAGS(EFlags);
9979 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9980 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9981 else
9982 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9983
9984 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9985 IEM_MC_COMMIT_EFLAGS(EFlags);
9986 IEM_MC_ADVANCE_RIP();
9987 IEM_MC_END();
9988 }
9989 break;
9990 }
9991 }
9992 return VINF_SUCCESS;
9993}
9994
9995
9996/** Opcode 0x82. */
9997FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9998{
9999 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10000 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10001}
10002
10003
10004/** Opcode 0x83. */
10005FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10006{
10007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10008 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
10009 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10010 to the 386 even if absent in the intel reference manuals and some
10011 3rd party opcode listings. */
10012 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10013
10014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10015 {
10016 /*
10017 * Register target
10018 */
10019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10020 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10021 switch (pVCpu->iem.s.enmEffOpSize)
10022 {
10023 case IEMMODE_16BIT:
10024 {
10025 IEM_MC_BEGIN(3, 0);
10026 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10027 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10029
10030 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10031 IEM_MC_REF_EFLAGS(pEFlags);
10032 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10033
10034 IEM_MC_ADVANCE_RIP();
10035 IEM_MC_END();
10036 break;
10037 }
10038
10039 case IEMMODE_32BIT:
10040 {
10041 IEM_MC_BEGIN(3, 0);
10042 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10043 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10044 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10045
10046 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10047 IEM_MC_REF_EFLAGS(pEFlags);
10048 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10049 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10050
10051 IEM_MC_ADVANCE_RIP();
10052 IEM_MC_END();
10053 break;
10054 }
10055
10056 case IEMMODE_64BIT:
10057 {
10058 IEM_MC_BEGIN(3, 0);
10059 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10060 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10061 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10062
10063 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10064 IEM_MC_REF_EFLAGS(pEFlags);
10065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10066
10067 IEM_MC_ADVANCE_RIP();
10068 IEM_MC_END();
10069 break;
10070 }
10071 }
10072 }
10073 else
10074 {
10075 /*
10076 * Memory target.
10077 */
10078 uint32_t fAccess;
10079 if (pImpl->pfnLockedU16)
10080 fAccess = IEM_ACCESS_DATA_RW;
10081 else /* CMP */
10082 fAccess = IEM_ACCESS_DATA_R;
10083
10084 switch (pVCpu->iem.s.enmEffOpSize)
10085 {
10086 case IEMMODE_16BIT:
10087 {
10088 IEM_MC_BEGIN(3, 2);
10089 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10090 IEM_MC_ARG(uint16_t, u16Src, 1);
10091 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10093
10094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10095 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10096 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10097 if (pImpl->pfnLockedU16)
10098 IEMOP_HLP_DONE_DECODING();
10099 else
10100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10101 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10102 IEM_MC_FETCH_EFLAGS(EFlags);
10103 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10104 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10105 else
10106 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10107
10108 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10109 IEM_MC_COMMIT_EFLAGS(EFlags);
10110 IEM_MC_ADVANCE_RIP();
10111 IEM_MC_END();
10112 break;
10113 }
10114
10115 case IEMMODE_32BIT:
10116 {
10117 IEM_MC_BEGIN(3, 2);
10118 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10119 IEM_MC_ARG(uint32_t, u32Src, 1);
10120 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10122
10123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10124 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10125 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10126 if (pImpl->pfnLockedU32)
10127 IEMOP_HLP_DONE_DECODING();
10128 else
10129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10130 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10131 IEM_MC_FETCH_EFLAGS(EFlags);
10132 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10134 else
10135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10136
10137 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10138 IEM_MC_COMMIT_EFLAGS(EFlags);
10139 IEM_MC_ADVANCE_RIP();
10140 IEM_MC_END();
10141 break;
10142 }
10143
10144 case IEMMODE_64BIT:
10145 {
10146 IEM_MC_BEGIN(3, 2);
10147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10148 IEM_MC_ARG(uint64_t, u64Src, 1);
10149 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10151
10152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10153 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10154 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10155 if (pImpl->pfnLockedU64)
10156 IEMOP_HLP_DONE_DECODING();
10157 else
10158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10159 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10160 IEM_MC_FETCH_EFLAGS(EFlags);
10161 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10162 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10163 else
10164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10165
10166 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10167 IEM_MC_COMMIT_EFLAGS(EFlags);
10168 IEM_MC_ADVANCE_RIP();
10169 IEM_MC_END();
10170 break;
10171 }
10172 }
10173 }
10174 return VINF_SUCCESS;
10175}
10176
10177
10178/** Opcode 0x84. */
10179FNIEMOP_DEF(iemOp_test_Eb_Gb)
10180{
10181 IEMOP_MNEMONIC("test Eb,Gb");
10182 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10183 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10184}
10185
10186
10187/** Opcode 0x85. */
10188FNIEMOP_DEF(iemOp_test_Ev_Gv)
10189{
10190 IEMOP_MNEMONIC("test Ev,Gv");
10191 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10192 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10193}
10194
10195
10196/** Opcode 0x86. */
10197FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10198{
10199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10200 IEMOP_MNEMONIC("xchg Eb,Gb");
10201
10202 /*
10203 * If rm is denoting a register, no more instruction bytes.
10204 */
10205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10206 {
10207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10208
10209 IEM_MC_BEGIN(0, 2);
10210 IEM_MC_LOCAL(uint8_t, uTmp1);
10211 IEM_MC_LOCAL(uint8_t, uTmp2);
10212
10213 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10214 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10215 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10216 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10217
10218 IEM_MC_ADVANCE_RIP();
10219 IEM_MC_END();
10220 }
10221 else
10222 {
10223 /*
10224 * We're accessing memory.
10225 */
10226/** @todo the register must be committed separately! */
10227 IEM_MC_BEGIN(2, 2);
10228 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10229 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10231
10232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10233 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10234 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10235 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10236 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10237
10238 IEM_MC_ADVANCE_RIP();
10239 IEM_MC_END();
10240 }
10241 return VINF_SUCCESS;
10242}
10243
10244
10245/** Opcode 0x87. */
10246FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10247{
10248 IEMOP_MNEMONIC("xchg Ev,Gv");
10249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10250
10251 /*
10252 * If rm is denoting a register, no more instruction bytes.
10253 */
10254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10255 {
10256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10257
10258 switch (pVCpu->iem.s.enmEffOpSize)
10259 {
10260 case IEMMODE_16BIT:
10261 IEM_MC_BEGIN(0, 2);
10262 IEM_MC_LOCAL(uint16_t, uTmp1);
10263 IEM_MC_LOCAL(uint16_t, uTmp2);
10264
10265 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10266 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10267 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10268 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10269
10270 IEM_MC_ADVANCE_RIP();
10271 IEM_MC_END();
10272 return VINF_SUCCESS;
10273
10274 case IEMMODE_32BIT:
10275 IEM_MC_BEGIN(0, 2);
10276 IEM_MC_LOCAL(uint32_t, uTmp1);
10277 IEM_MC_LOCAL(uint32_t, uTmp2);
10278
10279 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10280 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10281 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10282 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10283
10284 IEM_MC_ADVANCE_RIP();
10285 IEM_MC_END();
10286 return VINF_SUCCESS;
10287
10288 case IEMMODE_64BIT:
10289 IEM_MC_BEGIN(0, 2);
10290 IEM_MC_LOCAL(uint64_t, uTmp1);
10291 IEM_MC_LOCAL(uint64_t, uTmp2);
10292
10293 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10294 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10295 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10296 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10297
10298 IEM_MC_ADVANCE_RIP();
10299 IEM_MC_END();
10300 return VINF_SUCCESS;
10301
10302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10303 }
10304 }
10305 else
10306 {
10307 /*
10308 * We're accessing memory.
10309 */
10310 switch (pVCpu->iem.s.enmEffOpSize)
10311 {
10312/** @todo the register must be committed separately! */
10313 case IEMMODE_16BIT:
10314 IEM_MC_BEGIN(2, 2);
10315 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10316 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10318
10319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10320 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10321 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10322 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10323 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10324
10325 IEM_MC_ADVANCE_RIP();
10326 IEM_MC_END();
10327 return VINF_SUCCESS;
10328
10329 case IEMMODE_32BIT:
10330 IEM_MC_BEGIN(2, 2);
10331 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10332 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10334
10335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10336 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10337 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10338 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10339 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10340
10341 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10342 IEM_MC_ADVANCE_RIP();
10343 IEM_MC_END();
10344 return VINF_SUCCESS;
10345
10346 case IEMMODE_64BIT:
10347 IEM_MC_BEGIN(2, 2);
10348 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10349 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10351
10352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10353 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10354 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10355 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10356 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10357
10358 IEM_MC_ADVANCE_RIP();
10359 IEM_MC_END();
10360 return VINF_SUCCESS;
10361
10362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10363 }
10364 }
10365}
10366
10367
10368/** Opcode 0x88. */
10369FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10370{
10371 IEMOP_MNEMONIC("mov Eb,Gb");
10372
10373 uint8_t bRm;
10374 IEM_OPCODE_GET_NEXT_U8(&bRm);
10375
10376 /*
10377 * If rm is denoting a register, no more instruction bytes.
10378 */
10379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10380 {
10381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10382 IEM_MC_BEGIN(0, 1);
10383 IEM_MC_LOCAL(uint8_t, u8Value);
10384 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10385 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
10386 IEM_MC_ADVANCE_RIP();
10387 IEM_MC_END();
10388 }
10389 else
10390 {
10391 /*
10392 * We're writing a register to memory.
10393 */
10394 IEM_MC_BEGIN(0, 2);
10395 IEM_MC_LOCAL(uint8_t, u8Value);
10396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10399 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10400 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
10401 IEM_MC_ADVANCE_RIP();
10402 IEM_MC_END();
10403 }
10404 return VINF_SUCCESS;
10405
10406}
10407
10408
10409/** Opcode 0x89. */
10410FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10411{
10412 IEMOP_MNEMONIC("mov Ev,Gv");
10413
10414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10415
10416 /*
10417 * If rm is denoting a register, no more instruction bytes.
10418 */
10419 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10420 {
10421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10422 switch (pVCpu->iem.s.enmEffOpSize)
10423 {
10424 case IEMMODE_16BIT:
10425 IEM_MC_BEGIN(0, 1);
10426 IEM_MC_LOCAL(uint16_t, u16Value);
10427 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10428 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10429 IEM_MC_ADVANCE_RIP();
10430 IEM_MC_END();
10431 break;
10432
10433 case IEMMODE_32BIT:
10434 IEM_MC_BEGIN(0, 1);
10435 IEM_MC_LOCAL(uint32_t, u32Value);
10436 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10437 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10438 IEM_MC_ADVANCE_RIP();
10439 IEM_MC_END();
10440 break;
10441
10442 case IEMMODE_64BIT:
10443 IEM_MC_BEGIN(0, 1);
10444 IEM_MC_LOCAL(uint64_t, u64Value);
10445 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10446 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10447 IEM_MC_ADVANCE_RIP();
10448 IEM_MC_END();
10449 break;
10450 }
10451 }
10452 else
10453 {
10454 /*
10455 * We're writing a register to memory.
10456 */
10457 switch (pVCpu->iem.s.enmEffOpSize)
10458 {
10459 case IEMMODE_16BIT:
10460 IEM_MC_BEGIN(0, 2);
10461 IEM_MC_LOCAL(uint16_t, u16Value);
10462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10465 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10466 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10467 IEM_MC_ADVANCE_RIP();
10468 IEM_MC_END();
10469 break;
10470
10471 case IEMMODE_32BIT:
10472 IEM_MC_BEGIN(0, 2);
10473 IEM_MC_LOCAL(uint32_t, u32Value);
10474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10477 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10478 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
10479 IEM_MC_ADVANCE_RIP();
10480 IEM_MC_END();
10481 break;
10482
10483 case IEMMODE_64BIT:
10484 IEM_MC_BEGIN(0, 2);
10485 IEM_MC_LOCAL(uint64_t, u64Value);
10486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10489 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10490 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
10491 IEM_MC_ADVANCE_RIP();
10492 IEM_MC_END();
10493 break;
10494 }
10495 }
10496 return VINF_SUCCESS;
10497}
10498
10499
10500/** Opcode 0x8a. */
10501FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10502{
10503 IEMOP_MNEMONIC("mov Gb,Eb");
10504
10505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10506
10507 /*
10508 * If rm is denoting a register, no more instruction bytes.
10509 */
10510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10511 {
10512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10513 IEM_MC_BEGIN(0, 1);
10514 IEM_MC_LOCAL(uint8_t, u8Value);
10515 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10516 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10517 IEM_MC_ADVANCE_RIP();
10518 IEM_MC_END();
10519 }
10520 else
10521 {
10522 /*
10523 * We're loading a register from memory.
10524 */
10525 IEM_MC_BEGIN(0, 2);
10526 IEM_MC_LOCAL(uint8_t, u8Value);
10527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10530 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10531 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10532 IEM_MC_ADVANCE_RIP();
10533 IEM_MC_END();
10534 }
10535 return VINF_SUCCESS;
10536}
10537
10538
10539/** Opcode 0x8b. */
10540FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10541{
10542 IEMOP_MNEMONIC("mov Gv,Ev");
10543
10544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10545
10546 /*
10547 * If rm is denoting a register, no more instruction bytes.
10548 */
10549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10550 {
10551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10552 switch (pVCpu->iem.s.enmEffOpSize)
10553 {
10554 case IEMMODE_16BIT:
10555 IEM_MC_BEGIN(0, 1);
10556 IEM_MC_LOCAL(uint16_t, u16Value);
10557 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10558 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10559 IEM_MC_ADVANCE_RIP();
10560 IEM_MC_END();
10561 break;
10562
10563 case IEMMODE_32BIT:
10564 IEM_MC_BEGIN(0, 1);
10565 IEM_MC_LOCAL(uint32_t, u32Value);
10566 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10567 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10568 IEM_MC_ADVANCE_RIP();
10569 IEM_MC_END();
10570 break;
10571
10572 case IEMMODE_64BIT:
10573 IEM_MC_BEGIN(0, 1);
10574 IEM_MC_LOCAL(uint64_t, u64Value);
10575 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10576 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10577 IEM_MC_ADVANCE_RIP();
10578 IEM_MC_END();
10579 break;
10580 }
10581 }
10582 else
10583 {
10584 /*
10585 * We're loading a register from memory.
10586 */
10587 switch (pVCpu->iem.s.enmEffOpSize)
10588 {
10589 case IEMMODE_16BIT:
10590 IEM_MC_BEGIN(0, 2);
10591 IEM_MC_LOCAL(uint16_t, u16Value);
10592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10595 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10596 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10597 IEM_MC_ADVANCE_RIP();
10598 IEM_MC_END();
10599 break;
10600
10601 case IEMMODE_32BIT:
10602 IEM_MC_BEGIN(0, 2);
10603 IEM_MC_LOCAL(uint32_t, u32Value);
10604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10607 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10608 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10609 IEM_MC_ADVANCE_RIP();
10610 IEM_MC_END();
10611 break;
10612
10613 case IEMMODE_64BIT:
10614 IEM_MC_BEGIN(0, 2);
10615 IEM_MC_LOCAL(uint64_t, u64Value);
10616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10619 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10620 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10621 IEM_MC_ADVANCE_RIP();
10622 IEM_MC_END();
10623 break;
10624 }
10625 }
10626 return VINF_SUCCESS;
10627}
10628
10629
10630/** Opcode 0x63. */
10631FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10632{
10633 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
10634 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10635 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10636 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10637 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10638}
10639
10640
10641/** Opcode 0x8c. */
10642FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10643{
10644 IEMOP_MNEMONIC("mov Ev,Sw");
10645
10646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10647
10648 /*
10649 * Check that the destination register exists. The REX.R prefix is ignored.
10650 */
10651 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10652 if ( iSegReg > X86_SREG_GS)
10653 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10654
10655 /*
10656 * If rm is denoting a register, no more instruction bytes.
10657 * In that case, the operand size is respected and the upper bits are
10658 * cleared (starting with some pentium).
10659 */
10660 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10661 {
10662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10663 switch (pVCpu->iem.s.enmEffOpSize)
10664 {
10665 case IEMMODE_16BIT:
10666 IEM_MC_BEGIN(0, 1);
10667 IEM_MC_LOCAL(uint16_t, u16Value);
10668 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10669 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10670 IEM_MC_ADVANCE_RIP();
10671 IEM_MC_END();
10672 break;
10673
10674 case IEMMODE_32BIT:
10675 IEM_MC_BEGIN(0, 1);
10676 IEM_MC_LOCAL(uint32_t, u32Value);
10677 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10678 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10679 IEM_MC_ADVANCE_RIP();
10680 IEM_MC_END();
10681 break;
10682
10683 case IEMMODE_64BIT:
10684 IEM_MC_BEGIN(0, 1);
10685 IEM_MC_LOCAL(uint64_t, u64Value);
10686 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10687 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10688 IEM_MC_ADVANCE_RIP();
10689 IEM_MC_END();
10690 break;
10691 }
10692 }
10693 else
10694 {
10695 /*
10696 * We're saving the register to memory. The access is word sized
10697 * regardless of operand size prefixes.
10698 */
10699#if 0 /* not necessary */
10700 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10701#endif
10702 IEM_MC_BEGIN(0, 2);
10703 IEM_MC_LOCAL(uint16_t, u16Value);
10704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10707 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10708 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10709 IEM_MC_ADVANCE_RIP();
10710 IEM_MC_END();
10711 }
10712 return VINF_SUCCESS;
10713}
10714
10715
10716
10717
10718/** Opcode 0x8d. */
10719FNIEMOP_DEF(iemOp_lea_Gv_M)
10720{
10721 IEMOP_MNEMONIC("lea Gv,M");
10722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10724 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10725
10726 switch (pVCpu->iem.s.enmEffOpSize)
10727 {
10728 case IEMMODE_16BIT:
10729 IEM_MC_BEGIN(0, 2);
10730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10731 IEM_MC_LOCAL(uint16_t, u16Cast);
10732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10734 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10735 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
10736 IEM_MC_ADVANCE_RIP();
10737 IEM_MC_END();
10738 return VINF_SUCCESS;
10739
10740 case IEMMODE_32BIT:
10741 IEM_MC_BEGIN(0, 2);
10742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10743 IEM_MC_LOCAL(uint32_t, u32Cast);
10744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10746 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10747 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
10748 IEM_MC_ADVANCE_RIP();
10749 IEM_MC_END();
10750 return VINF_SUCCESS;
10751
10752 case IEMMODE_64BIT:
10753 IEM_MC_BEGIN(0, 1);
10754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10757 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
10758 IEM_MC_ADVANCE_RIP();
10759 IEM_MC_END();
10760 return VINF_SUCCESS;
10761 }
10762 AssertFailedReturn(VERR_IEM_IPE_7);
10763}
10764
10765
10766/** Opcode 0x8e. */
10767FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10768{
10769 IEMOP_MNEMONIC("mov Sw,Ev");
10770
10771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10772
10773 /*
10774 * The practical operand size is 16-bit.
10775 */
10776#if 0 /* not necessary */
10777 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10778#endif
10779
10780 /*
10781 * Check that the destination register exists and can be used with this
10782 * instruction. The REX.R prefix is ignored.
10783 */
10784 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10785 if ( iSegReg == X86_SREG_CS
10786 || iSegReg > X86_SREG_GS)
10787 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10788
10789 /*
10790 * If rm is denoting a register, no more instruction bytes.
10791 */
10792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10793 {
10794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10795 IEM_MC_BEGIN(2, 0);
10796 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10797 IEM_MC_ARG(uint16_t, u16Value, 1);
10798 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10799 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10800 IEM_MC_END();
10801 }
10802 else
10803 {
10804 /*
10805 * We're loading the register from memory. The access is word sized
10806 * regardless of operand size prefixes.
10807 */
10808 IEM_MC_BEGIN(2, 1);
10809 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10810 IEM_MC_ARG(uint16_t, u16Value, 1);
10811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10814 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10815 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10816 IEM_MC_END();
10817 }
10818 return VINF_SUCCESS;
10819}
10820
10821
10822/** Opcode 0x8f /0. */
10823FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10824{
10825 /* This bugger is rather annoying as it requires rSP to be updated before
10826 doing the effective address calculations. Will eventually require a
10827 split between the R/M+SIB decoding and the effective address
10828 calculation - which is something that is required for any attempt at
10829 reusing this code for a recompiler. It may also be good to have if we
10830 need to delay #UD exception caused by invalid lock prefixes.
10831
10832 For now, we'll do a mostly safe interpreter-only implementation here. */
10833 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10834 * now until tests show it's checked.. */
10835 IEMOP_MNEMONIC("pop Ev");
10836
10837 /* Register access is relatively easy and can share code. */
10838 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10839 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10840
10841 /*
10842 * Memory target.
10843 *
10844 * Intel says that RSP is incremented before it's used in any effective
10845 * address calcuations. This means some serious extra annoyance here since
10846 * we decode and calculate the effective address in one step and like to
10847 * delay committing registers till everything is done.
10848 *
10849 * So, we'll decode and calculate the effective address twice. This will
10850 * require some recoding if turned into a recompiler.
10851 */
10852 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10853
10854#ifndef TST_IEM_CHECK_MC
10855 /* Calc effective address with modified ESP. */
10856/** @todo testcase */
10857 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
10858 RTGCPTR GCPtrEff;
10859 VBOXSTRICTRC rcStrict;
10860 switch (pVCpu->iem.s.enmEffOpSize)
10861 {
10862 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
10863 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
10864 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
10865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10866 }
10867 if (rcStrict != VINF_SUCCESS)
10868 return rcStrict;
10869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10870
10871 /* Perform the operation - this should be CImpl. */
10872 RTUINT64U TmpRsp;
10873 TmpRsp.u = pCtx->rsp;
10874 switch (pVCpu->iem.s.enmEffOpSize)
10875 {
10876 case IEMMODE_16BIT:
10877 {
10878 uint16_t u16Value;
10879 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
10880 if (rcStrict == VINF_SUCCESS)
10881 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
10882 break;
10883 }
10884
10885 case IEMMODE_32BIT:
10886 {
10887 uint32_t u32Value;
10888 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
10889 if (rcStrict == VINF_SUCCESS)
10890 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
10891 break;
10892 }
10893
10894 case IEMMODE_64BIT:
10895 {
10896 uint64_t u64Value;
10897 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
10898 if (rcStrict == VINF_SUCCESS)
10899 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
10900 break;
10901 }
10902
10903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10904 }
10905 if (rcStrict == VINF_SUCCESS)
10906 {
10907 pCtx->rsp = TmpRsp.u;
10908 iemRegUpdateRipAndClearRF(pVCpu);
10909 }
10910 return rcStrict;
10911
10912#else
10913 return VERR_IEM_IPE_2;
10914#endif
10915}
10916
10917
10918/** Opcode 0x8f. */
10919FNIEMOP_DEF(iemOp_Grp1A)
10920{
10921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10922 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10923 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10924
10925 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10926 /** @todo XOP decoding. */
10927 IEMOP_MNEMONIC("3-byte-xop");
10928 return IEMOP_RAISE_INVALID_OPCODE();
10929}
10930
10931
10932/**
10933 * Common 'xchg reg,rAX' helper.
10934 */
10935FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10936{
10937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10938
10939 iReg |= pVCpu->iem.s.uRexB;
10940 switch (pVCpu->iem.s.enmEffOpSize)
10941 {
10942 case IEMMODE_16BIT:
10943 IEM_MC_BEGIN(0, 2);
10944 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10945 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10946 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10947 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10948 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10949 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10950 IEM_MC_ADVANCE_RIP();
10951 IEM_MC_END();
10952 return VINF_SUCCESS;
10953
10954 case IEMMODE_32BIT:
10955 IEM_MC_BEGIN(0, 2);
10956 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10957 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10958 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10959 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10960 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10961 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10962 IEM_MC_ADVANCE_RIP();
10963 IEM_MC_END();
10964 return VINF_SUCCESS;
10965
10966 case IEMMODE_64BIT:
10967 IEM_MC_BEGIN(0, 2);
10968 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10969 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10970 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10971 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10972 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10973 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10974 IEM_MC_ADVANCE_RIP();
10975 IEM_MC_END();
10976 return VINF_SUCCESS;
10977
10978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10979 }
10980}
10981
10982
10983/** Opcode 0x90. */
10984FNIEMOP_DEF(iemOp_nop)
10985{
10986 /* R8/R8D and RAX/EAX can be exchanged. */
10987 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
10988 {
10989 IEMOP_MNEMONIC("xchg r8,rAX");
10990 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10991 }
10992
10993 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
10994 IEMOP_MNEMONIC("pause");
10995 else
10996 IEMOP_MNEMONIC("nop");
10997 IEM_MC_BEGIN(0, 0);
10998 IEM_MC_ADVANCE_RIP();
10999 IEM_MC_END();
11000 return VINF_SUCCESS;
11001}
11002
11003
11004/** Opcode 0x91. */
11005FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11006{
11007 IEMOP_MNEMONIC("xchg rCX,rAX");
11008 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11009}
11010
11011
11012/** Opcode 0x92. */
11013FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11014{
11015 IEMOP_MNEMONIC("xchg rDX,rAX");
11016 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11017}
11018
11019
11020/** Opcode 0x93. */
11021FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11022{
11023 IEMOP_MNEMONIC("xchg rBX,rAX");
11024 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11025}
11026
11027
11028/** Opcode 0x94. */
11029FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11030{
11031 IEMOP_MNEMONIC("xchg rSX,rAX");
11032 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11033}
11034
11035
11036/** Opcode 0x95. */
11037FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11038{
11039 IEMOP_MNEMONIC("xchg rBP,rAX");
11040 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11041}
11042
11043
11044/** Opcode 0x96. */
11045FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11046{
11047 IEMOP_MNEMONIC("xchg rSI,rAX");
11048 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11049}
11050
11051
11052/** Opcode 0x97. */
11053FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11054{
11055 IEMOP_MNEMONIC("xchg rDI,rAX");
11056 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11057}
11058
11059
11060/** Opcode 0x98. */
11061FNIEMOP_DEF(iemOp_cbw)
11062{
11063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11064 switch (pVCpu->iem.s.enmEffOpSize)
11065 {
11066 case IEMMODE_16BIT:
11067 IEMOP_MNEMONIC("cbw");
11068 IEM_MC_BEGIN(0, 1);
11069 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11070 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11071 } IEM_MC_ELSE() {
11072 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11073 } IEM_MC_ENDIF();
11074 IEM_MC_ADVANCE_RIP();
11075 IEM_MC_END();
11076 return VINF_SUCCESS;
11077
11078 case IEMMODE_32BIT:
11079 IEMOP_MNEMONIC("cwde");
11080 IEM_MC_BEGIN(0, 1);
11081 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11082 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11083 } IEM_MC_ELSE() {
11084 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11085 } IEM_MC_ENDIF();
11086 IEM_MC_ADVANCE_RIP();
11087 IEM_MC_END();
11088 return VINF_SUCCESS;
11089
11090 case IEMMODE_64BIT:
11091 IEMOP_MNEMONIC("cdqe");
11092 IEM_MC_BEGIN(0, 1);
11093 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11094 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11095 } IEM_MC_ELSE() {
11096 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11097 } IEM_MC_ENDIF();
11098 IEM_MC_ADVANCE_RIP();
11099 IEM_MC_END();
11100 return VINF_SUCCESS;
11101
11102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11103 }
11104}
11105
11106
11107/** Opcode 0x99. */
11108FNIEMOP_DEF(iemOp_cwd)
11109{
11110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11111 switch (pVCpu->iem.s.enmEffOpSize)
11112 {
11113 case IEMMODE_16BIT:
11114 IEMOP_MNEMONIC("cwd");
11115 IEM_MC_BEGIN(0, 1);
11116 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11117 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11118 } IEM_MC_ELSE() {
11119 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11120 } IEM_MC_ENDIF();
11121 IEM_MC_ADVANCE_RIP();
11122 IEM_MC_END();
11123 return VINF_SUCCESS;
11124
11125 case IEMMODE_32BIT:
11126 IEMOP_MNEMONIC("cdq");
11127 IEM_MC_BEGIN(0, 1);
11128 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11129 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11130 } IEM_MC_ELSE() {
11131 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11132 } IEM_MC_ENDIF();
11133 IEM_MC_ADVANCE_RIP();
11134 IEM_MC_END();
11135 return VINF_SUCCESS;
11136
11137 case IEMMODE_64BIT:
11138 IEMOP_MNEMONIC("cqo");
11139 IEM_MC_BEGIN(0, 1);
11140 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11141 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11142 } IEM_MC_ELSE() {
11143 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11144 } IEM_MC_ENDIF();
11145 IEM_MC_ADVANCE_RIP();
11146 IEM_MC_END();
11147 return VINF_SUCCESS;
11148
11149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11150 }
11151}
11152
11153
11154/** Opcode 0x9a. */
11155FNIEMOP_DEF(iemOp_call_Ap)
11156{
11157 IEMOP_MNEMONIC("call Ap");
11158 IEMOP_HLP_NO_64BIT();
11159
11160 /* Decode the far pointer address and pass it on to the far call C implementation. */
11161 uint32_t offSeg;
11162 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11163 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11164 else
11165 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11166 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11168 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11169}
11170
11171
11172/** Opcode 0x9b. (aka fwait) */
11173FNIEMOP_DEF(iemOp_wait)
11174{
11175 IEMOP_MNEMONIC("wait");
11176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11177
11178 IEM_MC_BEGIN(0, 0);
11179 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11180 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11181 IEM_MC_ADVANCE_RIP();
11182 IEM_MC_END();
11183 return VINF_SUCCESS;
11184}
11185
11186
11187/** Opcode 0x9c. */
11188FNIEMOP_DEF(iemOp_pushf_Fv)
11189{
11190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11191 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11192 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11193}
11194
11195
11196/** Opcode 0x9d. */
11197FNIEMOP_DEF(iemOp_popf_Fv)
11198{
11199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11200 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11201 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11202}
11203
11204
11205/** Opcode 0x9e. */
11206FNIEMOP_DEF(iemOp_sahf)
11207{
11208 IEMOP_MNEMONIC("sahf");
11209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11210 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11211 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11212 return IEMOP_RAISE_INVALID_OPCODE();
11213 IEM_MC_BEGIN(0, 2);
11214 IEM_MC_LOCAL(uint32_t, u32Flags);
11215 IEM_MC_LOCAL(uint32_t, EFlags);
11216 IEM_MC_FETCH_EFLAGS(EFlags);
11217 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11218 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11219 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11220 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11221 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11222 IEM_MC_COMMIT_EFLAGS(EFlags);
11223 IEM_MC_ADVANCE_RIP();
11224 IEM_MC_END();
11225 return VINF_SUCCESS;
11226}
11227
11228
11229/** Opcode 0x9f. */
11230FNIEMOP_DEF(iemOp_lahf)
11231{
11232 IEMOP_MNEMONIC("lahf");
11233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11234 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11235 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11236 return IEMOP_RAISE_INVALID_OPCODE();
11237 IEM_MC_BEGIN(0, 1);
11238 IEM_MC_LOCAL(uint8_t, u8Flags);
11239 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11240 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11241 IEM_MC_ADVANCE_RIP();
11242 IEM_MC_END();
11243 return VINF_SUCCESS;
11244}
11245
11246
11247/**
11248 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11249 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11250 * prefixes. Will return on failures.
11251 * @param a_GCPtrMemOff The variable to store the offset in.
11252 */
11253#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11254 do \
11255 { \
11256 switch (pVCpu->iem.s.enmEffAddrMode) \
11257 { \
11258 case IEMMODE_16BIT: \
11259 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11260 break; \
11261 case IEMMODE_32BIT: \
11262 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11263 break; \
11264 case IEMMODE_64BIT: \
11265 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11266 break; \
11267 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11268 } \
11269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11270 } while (0)
11271
11272/** Opcode 0xa0. */
11273FNIEMOP_DEF(iemOp_mov_Al_Ob)
11274{
11275 /*
11276 * Get the offset and fend of lock prefixes.
11277 */
11278 RTGCPTR GCPtrMemOff;
11279 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11280
11281 /*
11282 * Fetch AL.
11283 */
11284 IEM_MC_BEGIN(0,1);
11285 IEM_MC_LOCAL(uint8_t, u8Tmp);
11286 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11287 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11288 IEM_MC_ADVANCE_RIP();
11289 IEM_MC_END();
11290 return VINF_SUCCESS;
11291}
11292
11293
11294/** Opcode 0xa1. */
11295FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11296{
11297 /*
11298 * Get the offset and fend of lock prefixes.
11299 */
11300 IEMOP_MNEMONIC("mov rAX,Ov");
11301 RTGCPTR GCPtrMemOff;
11302 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11303
11304 /*
11305 * Fetch rAX.
11306 */
11307 switch (pVCpu->iem.s.enmEffOpSize)
11308 {
11309 case IEMMODE_16BIT:
11310 IEM_MC_BEGIN(0,1);
11311 IEM_MC_LOCAL(uint16_t, u16Tmp);
11312 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11313 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11314 IEM_MC_ADVANCE_RIP();
11315 IEM_MC_END();
11316 return VINF_SUCCESS;
11317
11318 case IEMMODE_32BIT:
11319 IEM_MC_BEGIN(0,1);
11320 IEM_MC_LOCAL(uint32_t, u32Tmp);
11321 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11322 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11323 IEM_MC_ADVANCE_RIP();
11324 IEM_MC_END();
11325 return VINF_SUCCESS;
11326
11327 case IEMMODE_64BIT:
11328 IEM_MC_BEGIN(0,1);
11329 IEM_MC_LOCAL(uint64_t, u64Tmp);
11330 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11331 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11332 IEM_MC_ADVANCE_RIP();
11333 IEM_MC_END();
11334 return VINF_SUCCESS;
11335
11336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11337 }
11338}
11339
11340
11341/** Opcode 0xa2. */
11342FNIEMOP_DEF(iemOp_mov_Ob_AL)
11343{
11344 /*
11345 * Get the offset and fend of lock prefixes.
11346 */
11347 RTGCPTR GCPtrMemOff;
11348 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11349
11350 /*
11351 * Store AL.
11352 */
11353 IEM_MC_BEGIN(0,1);
11354 IEM_MC_LOCAL(uint8_t, u8Tmp);
11355 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11356 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
11357 IEM_MC_ADVANCE_RIP();
11358 IEM_MC_END();
11359 return VINF_SUCCESS;
11360}
11361
11362
11363/** Opcode 0xa3. */
11364FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11365{
11366 /*
11367 * Get the offset and fend of lock prefixes.
11368 */
11369 RTGCPTR GCPtrMemOff;
11370 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11371
11372 /*
11373 * Store rAX.
11374 */
11375 switch (pVCpu->iem.s.enmEffOpSize)
11376 {
11377 case IEMMODE_16BIT:
11378 IEM_MC_BEGIN(0,1);
11379 IEM_MC_LOCAL(uint16_t, u16Tmp);
11380 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11381 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
11382 IEM_MC_ADVANCE_RIP();
11383 IEM_MC_END();
11384 return VINF_SUCCESS;
11385
11386 case IEMMODE_32BIT:
11387 IEM_MC_BEGIN(0,1);
11388 IEM_MC_LOCAL(uint32_t, u32Tmp);
11389 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11390 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
11391 IEM_MC_ADVANCE_RIP();
11392 IEM_MC_END();
11393 return VINF_SUCCESS;
11394
11395 case IEMMODE_64BIT:
11396 IEM_MC_BEGIN(0,1);
11397 IEM_MC_LOCAL(uint64_t, u64Tmp);
11398 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11399 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
11400 IEM_MC_ADVANCE_RIP();
11401 IEM_MC_END();
11402 return VINF_SUCCESS;
11403
11404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11405 }
11406}
11407
11408/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11409#define IEM_MOVS_CASE(ValBits, AddrBits) \
11410 IEM_MC_BEGIN(0, 2); \
11411 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11412 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11413 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11414 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11415 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11416 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11417 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11418 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11419 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11420 } IEM_MC_ELSE() { \
11421 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11422 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11423 } IEM_MC_ENDIF(); \
11424 IEM_MC_ADVANCE_RIP(); \
11425 IEM_MC_END();
11426
11427/** Opcode 0xa4. */
11428FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11429{
11430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11431
11432 /*
11433 * Use the C implementation if a repeat prefix is encountered.
11434 */
11435 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11436 {
11437 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11438 switch (pVCpu->iem.s.enmEffAddrMode)
11439 {
11440 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
11441 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
11442 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
11443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11444 }
11445 }
11446 IEMOP_MNEMONIC("movsb Xb,Yb");
11447
11448 /*
11449 * Sharing case implementation with movs[wdq] below.
11450 */
11451 switch (pVCpu->iem.s.enmEffAddrMode)
11452 {
11453 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11454 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11455 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11457 }
11458 return VINF_SUCCESS;
11459}
11460
11461
11462/** Opcode 0xa5. */
11463FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11464{
11465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11466
11467 /*
11468 * Use the C implementation if a repeat prefix is encountered.
11469 */
11470 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11471 {
11472 IEMOP_MNEMONIC("rep movs Xv,Yv");
11473 switch (pVCpu->iem.s.enmEffOpSize)
11474 {
11475 case IEMMODE_16BIT:
11476 switch (pVCpu->iem.s.enmEffAddrMode)
11477 {
11478 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
11479 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
11480 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
11481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11482 }
11483 break;
11484 case IEMMODE_32BIT:
11485 switch (pVCpu->iem.s.enmEffAddrMode)
11486 {
11487 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
11488 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
11489 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
11490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11491 }
11492 case IEMMODE_64BIT:
11493 switch (pVCpu->iem.s.enmEffAddrMode)
11494 {
11495 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11496 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
11497 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
11498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11499 }
11500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11501 }
11502 }
11503 IEMOP_MNEMONIC("movs Xv,Yv");
11504
11505 /*
11506 * Annoying double switch here.
11507 * Using ugly macro for implementing the cases, sharing it with movsb.
11508 */
11509 switch (pVCpu->iem.s.enmEffOpSize)
11510 {
11511 case IEMMODE_16BIT:
11512 switch (pVCpu->iem.s.enmEffAddrMode)
11513 {
11514 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11515 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11516 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11518 }
11519 break;
11520
11521 case IEMMODE_32BIT:
11522 switch (pVCpu->iem.s.enmEffAddrMode)
11523 {
11524 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11525 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11526 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11528 }
11529 break;
11530
11531 case IEMMODE_64BIT:
11532 switch (pVCpu->iem.s.enmEffAddrMode)
11533 {
11534 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11535 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11536 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11538 }
11539 break;
11540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11541 }
11542 return VINF_SUCCESS;
11543}
11544
11545#undef IEM_MOVS_CASE
11546
11547/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11548#define IEM_CMPS_CASE(ValBits, AddrBits) \
11549 IEM_MC_BEGIN(3, 3); \
11550 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11551 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11552 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11553 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11554 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11555 \
11556 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11557 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
11558 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11559 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11560 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11561 IEM_MC_REF_EFLAGS(pEFlags); \
11562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11563 \
11564 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11565 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11566 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11567 } IEM_MC_ELSE() { \
11568 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11569 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11570 } IEM_MC_ENDIF(); \
11571 IEM_MC_ADVANCE_RIP(); \
11572 IEM_MC_END(); \
11573
11574/** Opcode 0xa6. */
11575FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11576{
11577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11578
11579 /*
11580 * Use the C implementation if a repeat prefix is encountered.
11581 */
11582 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11583 {
11584 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11585 switch (pVCpu->iem.s.enmEffAddrMode)
11586 {
11587 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11588 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11589 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11591 }
11592 }
11593 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11594 {
11595 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11596 switch (pVCpu->iem.s.enmEffAddrMode)
11597 {
11598 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11599 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11600 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11602 }
11603 }
11604 IEMOP_MNEMONIC("cmps Xb,Yb");
11605
11606 /*
11607 * Sharing case implementation with cmps[wdq] below.
11608 */
11609 switch (pVCpu->iem.s.enmEffAddrMode)
11610 {
11611 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11612 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11613 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11615 }
11616 return VINF_SUCCESS;
11617
11618}
11619
11620
11621/** Opcode 0xa7. */
11622FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11623{
11624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11625
11626 /*
11627 * Use the C implementation if a repeat prefix is encountered.
11628 */
11629 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11630 {
11631 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11632 switch (pVCpu->iem.s.enmEffOpSize)
11633 {
11634 case IEMMODE_16BIT:
11635 switch (pVCpu->iem.s.enmEffAddrMode)
11636 {
11637 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11638 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11639 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11641 }
11642 break;
11643 case IEMMODE_32BIT:
11644 switch (pVCpu->iem.s.enmEffAddrMode)
11645 {
11646 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11647 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11648 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11650 }
11651 case IEMMODE_64BIT:
11652 switch (pVCpu->iem.s.enmEffAddrMode)
11653 {
11654 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11655 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11656 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11658 }
11659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11660 }
11661 }
11662
11663 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11664 {
11665 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11666 switch (pVCpu->iem.s.enmEffOpSize)
11667 {
11668 case IEMMODE_16BIT:
11669 switch (pVCpu->iem.s.enmEffAddrMode)
11670 {
11671 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11672 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11673 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11675 }
11676 break;
11677 case IEMMODE_32BIT:
11678 switch (pVCpu->iem.s.enmEffAddrMode)
11679 {
11680 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11681 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11682 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11684 }
11685 case IEMMODE_64BIT:
11686 switch (pVCpu->iem.s.enmEffAddrMode)
11687 {
11688 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11689 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11690 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11692 }
11693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11694 }
11695 }
11696
11697 IEMOP_MNEMONIC("cmps Xv,Yv");
11698
11699 /*
11700 * Annoying double switch here.
11701 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11702 */
11703 switch (pVCpu->iem.s.enmEffOpSize)
11704 {
11705 case IEMMODE_16BIT:
11706 switch (pVCpu->iem.s.enmEffAddrMode)
11707 {
11708 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11709 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11710 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11712 }
11713 break;
11714
11715 case IEMMODE_32BIT:
11716 switch (pVCpu->iem.s.enmEffAddrMode)
11717 {
11718 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11719 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11720 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11722 }
11723 break;
11724
11725 case IEMMODE_64BIT:
11726 switch (pVCpu->iem.s.enmEffAddrMode)
11727 {
11728 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11729 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11730 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11732 }
11733 break;
11734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11735 }
11736 return VINF_SUCCESS;
11737
11738}
11739
11740#undef IEM_CMPS_CASE
11741
11742/** Opcode 0xa8. */
11743FNIEMOP_DEF(iemOp_test_AL_Ib)
11744{
11745 IEMOP_MNEMONIC("test al,Ib");
11746 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11748}
11749
11750
11751/** Opcode 0xa9. */
11752FNIEMOP_DEF(iemOp_test_eAX_Iz)
11753{
11754 IEMOP_MNEMONIC("test rAX,Iz");
11755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11756 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11757}
11758
11759
11760/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11761#define IEM_STOS_CASE(ValBits, AddrBits) \
11762 IEM_MC_BEGIN(0, 2); \
11763 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11764 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11765 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11766 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11767 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11769 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11770 } IEM_MC_ELSE() { \
11771 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11772 } IEM_MC_ENDIF(); \
11773 IEM_MC_ADVANCE_RIP(); \
11774 IEM_MC_END(); \
11775
11776/** Opcode 0xaa. */
11777FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11778{
11779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11780
11781 /*
11782 * Use the C implementation if a repeat prefix is encountered.
11783 */
11784 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11785 {
11786 IEMOP_MNEMONIC("rep stos Yb,al");
11787 switch (pVCpu->iem.s.enmEffAddrMode)
11788 {
11789 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11790 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11791 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11793 }
11794 }
11795 IEMOP_MNEMONIC("stos Yb,al");
11796
11797 /*
11798 * Sharing case implementation with stos[wdq] below.
11799 */
11800 switch (pVCpu->iem.s.enmEffAddrMode)
11801 {
11802 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11803 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11804 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11806 }
11807 return VINF_SUCCESS;
11808}
11809
11810
11811/** Opcode 0xab. */
11812FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11813{
11814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11815
11816 /*
11817 * Use the C implementation if a repeat prefix is encountered.
11818 */
11819 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11820 {
11821 IEMOP_MNEMONIC("rep stos Yv,rAX");
11822 switch (pVCpu->iem.s.enmEffOpSize)
11823 {
11824 case IEMMODE_16BIT:
11825 switch (pVCpu->iem.s.enmEffAddrMode)
11826 {
11827 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11828 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11829 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11831 }
11832 break;
11833 case IEMMODE_32BIT:
11834 switch (pVCpu->iem.s.enmEffAddrMode)
11835 {
11836 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11837 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11838 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11840 }
11841 case IEMMODE_64BIT:
11842 switch (pVCpu->iem.s.enmEffAddrMode)
11843 {
11844 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11845 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11846 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11848 }
11849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11850 }
11851 }
11852 IEMOP_MNEMONIC("stos Yv,rAX");
11853
11854 /*
11855 * Annoying double switch here.
11856 * Using ugly macro for implementing the cases, sharing it with stosb.
11857 */
11858 switch (pVCpu->iem.s.enmEffOpSize)
11859 {
11860 case IEMMODE_16BIT:
11861 switch (pVCpu->iem.s.enmEffAddrMode)
11862 {
11863 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11864 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11865 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11867 }
11868 break;
11869
11870 case IEMMODE_32BIT:
11871 switch (pVCpu->iem.s.enmEffAddrMode)
11872 {
11873 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11874 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11875 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11877 }
11878 break;
11879
11880 case IEMMODE_64BIT:
11881 switch (pVCpu->iem.s.enmEffAddrMode)
11882 {
11883 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11884 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11885 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11887 }
11888 break;
11889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11890 }
11891 return VINF_SUCCESS;
11892}
11893
11894#undef IEM_STOS_CASE
11895
11896/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11897#define IEM_LODS_CASE(ValBits, AddrBits) \
11898 IEM_MC_BEGIN(0, 2); \
11899 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11900 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11901 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11902 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11903 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11905 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11906 } IEM_MC_ELSE() { \
11907 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11908 } IEM_MC_ENDIF(); \
11909 IEM_MC_ADVANCE_RIP(); \
11910 IEM_MC_END();
11911
11912/** Opcode 0xac. */
11913FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11914{
11915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11916
11917 /*
11918 * Use the C implementation if a repeat prefix is encountered.
11919 */
11920 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11921 {
11922 IEMOP_MNEMONIC("rep lodsb al,Xb");
11923 switch (pVCpu->iem.s.enmEffAddrMode)
11924 {
11925 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
11926 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
11927 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
11928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11929 }
11930 }
11931 IEMOP_MNEMONIC("lodsb al,Xb");
11932
11933 /*
11934 * Sharing case implementation with stos[wdq] below.
11935 */
11936 switch (pVCpu->iem.s.enmEffAddrMode)
11937 {
11938 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11939 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11940 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11942 }
11943 return VINF_SUCCESS;
11944}
11945
11946
11947/** Opcode 0xad. */
11948FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11949{
11950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11951
11952 /*
11953 * Use the C implementation if a repeat prefix is encountered.
11954 */
11955 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11956 {
11957 IEMOP_MNEMONIC("rep lods rAX,Xv");
11958 switch (pVCpu->iem.s.enmEffOpSize)
11959 {
11960 case IEMMODE_16BIT:
11961 switch (pVCpu->iem.s.enmEffAddrMode)
11962 {
11963 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
11964 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
11965 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
11966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11967 }
11968 break;
11969 case IEMMODE_32BIT:
11970 switch (pVCpu->iem.s.enmEffAddrMode)
11971 {
11972 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
11973 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
11974 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
11975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11976 }
11977 case IEMMODE_64BIT:
11978 switch (pVCpu->iem.s.enmEffAddrMode)
11979 {
11980 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11981 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
11982 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
11983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11984 }
11985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11986 }
11987 }
11988 IEMOP_MNEMONIC("lods rAX,Xv");
11989
11990 /*
11991 * Annoying double switch here.
11992 * Using ugly macro for implementing the cases, sharing it with lodsb.
11993 */
11994 switch (pVCpu->iem.s.enmEffOpSize)
11995 {
11996 case IEMMODE_16BIT:
11997 switch (pVCpu->iem.s.enmEffAddrMode)
11998 {
11999 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12000 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12001 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12003 }
12004 break;
12005
12006 case IEMMODE_32BIT:
12007 switch (pVCpu->iem.s.enmEffAddrMode)
12008 {
12009 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12010 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12011 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12013 }
12014 break;
12015
12016 case IEMMODE_64BIT:
12017 switch (pVCpu->iem.s.enmEffAddrMode)
12018 {
12019 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12020 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12021 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12023 }
12024 break;
12025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12026 }
12027 return VINF_SUCCESS;
12028}
12029
12030#undef IEM_LODS_CASE
12031
12032/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12033#define IEM_SCAS_CASE(ValBits, AddrBits) \
12034 IEM_MC_BEGIN(3, 2); \
12035 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12036 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12037 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12038 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12039 \
12040 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12041 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12042 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12043 IEM_MC_REF_EFLAGS(pEFlags); \
12044 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12045 \
12046 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12047 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12048 } IEM_MC_ELSE() { \
12049 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12050 } IEM_MC_ENDIF(); \
12051 IEM_MC_ADVANCE_RIP(); \
12052 IEM_MC_END();
12053
12054/** Opcode 0xae. */
12055FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12056{
12057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12058
12059 /*
12060 * Use the C implementation if a repeat prefix is encountered.
12061 */
12062 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12063 {
12064 IEMOP_MNEMONIC("repe scasb al,Xb");
12065 switch (pVCpu->iem.s.enmEffAddrMode)
12066 {
12067 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12068 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12069 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12071 }
12072 }
12073 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12074 {
12075 IEMOP_MNEMONIC("repne scasb al,Xb");
12076 switch (pVCpu->iem.s.enmEffAddrMode)
12077 {
12078 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12079 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12080 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12082 }
12083 }
12084 IEMOP_MNEMONIC("scasb al,Xb");
12085
12086 /*
12087 * Sharing case implementation with stos[wdq] below.
12088 */
12089 switch (pVCpu->iem.s.enmEffAddrMode)
12090 {
12091 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12092 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12093 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12095 }
12096 return VINF_SUCCESS;
12097}
12098
12099
12100/** Opcode 0xaf. */
12101FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12102{
12103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12104
12105 /*
12106 * Use the C implementation if a repeat prefix is encountered.
12107 */
12108 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12109 {
12110 IEMOP_MNEMONIC("repe scas rAX,Xv");
12111 switch (pVCpu->iem.s.enmEffOpSize)
12112 {
12113 case IEMMODE_16BIT:
12114 switch (pVCpu->iem.s.enmEffAddrMode)
12115 {
12116 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12117 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12118 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12120 }
12121 break;
12122 case IEMMODE_32BIT:
12123 switch (pVCpu->iem.s.enmEffAddrMode)
12124 {
12125 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12126 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12127 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12129 }
12130 case IEMMODE_64BIT:
12131 switch (pVCpu->iem.s.enmEffAddrMode)
12132 {
12133 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12134 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12135 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12137 }
12138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12139 }
12140 }
12141 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12142 {
12143 IEMOP_MNEMONIC("repne scas rAX,Xv");
12144 switch (pVCpu->iem.s.enmEffOpSize)
12145 {
12146 case IEMMODE_16BIT:
12147 switch (pVCpu->iem.s.enmEffAddrMode)
12148 {
12149 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12150 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12151 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12153 }
12154 break;
12155 case IEMMODE_32BIT:
12156 switch (pVCpu->iem.s.enmEffAddrMode)
12157 {
12158 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12159 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12160 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12162 }
12163 case IEMMODE_64BIT:
12164 switch (pVCpu->iem.s.enmEffAddrMode)
12165 {
12166 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12167 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12168 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12170 }
12171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12172 }
12173 }
12174 IEMOP_MNEMONIC("scas rAX,Xv");
12175
12176 /*
12177 * Annoying double switch here.
12178 * Using ugly macro for implementing the cases, sharing it with scasb.
12179 */
12180 switch (pVCpu->iem.s.enmEffOpSize)
12181 {
12182 case IEMMODE_16BIT:
12183 switch (pVCpu->iem.s.enmEffAddrMode)
12184 {
12185 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12186 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12187 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12189 }
12190 break;
12191
12192 case IEMMODE_32BIT:
12193 switch (pVCpu->iem.s.enmEffAddrMode)
12194 {
12195 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12196 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12197 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12199 }
12200 break;
12201
12202 case IEMMODE_64BIT:
12203 switch (pVCpu->iem.s.enmEffAddrMode)
12204 {
12205 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12206 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12207 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12209 }
12210 break;
12211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12212 }
12213 return VINF_SUCCESS;
12214}
12215
12216#undef IEM_SCAS_CASE
12217
12218/**
12219 * Common 'mov r8, imm8' helper.
12220 */
12221FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12222{
12223 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12225
12226 IEM_MC_BEGIN(0, 1);
12227 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12228 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12229 IEM_MC_ADVANCE_RIP();
12230 IEM_MC_END();
12231
12232 return VINF_SUCCESS;
12233}
12234
12235
12236/** Opcode 0xb0. */
12237FNIEMOP_DEF(iemOp_mov_AL_Ib)
12238{
12239 IEMOP_MNEMONIC("mov AL,Ib");
12240 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12241}
12242
12243
12244/** Opcode 0xb1. */
12245FNIEMOP_DEF(iemOp_CL_Ib)
12246{
12247 IEMOP_MNEMONIC("mov CL,Ib");
12248 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12249}
12250
12251
12252/** Opcode 0xb2. */
12253FNIEMOP_DEF(iemOp_DL_Ib)
12254{
12255 IEMOP_MNEMONIC("mov DL,Ib");
12256 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12257}
12258
12259
12260/** Opcode 0xb3. */
12261FNIEMOP_DEF(iemOp_BL_Ib)
12262{
12263 IEMOP_MNEMONIC("mov BL,Ib");
12264 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12265}
12266
12267
12268/** Opcode 0xb4. */
12269FNIEMOP_DEF(iemOp_mov_AH_Ib)
12270{
12271 IEMOP_MNEMONIC("mov AH,Ib");
12272 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12273}
12274
12275
12276/** Opcode 0xb5. */
12277FNIEMOP_DEF(iemOp_CH_Ib)
12278{
12279 IEMOP_MNEMONIC("mov CH,Ib");
12280 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12281}
12282
12283
12284/** Opcode 0xb6. */
12285FNIEMOP_DEF(iemOp_DH_Ib)
12286{
12287 IEMOP_MNEMONIC("mov DH,Ib");
12288 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12289}
12290
12291
12292/** Opcode 0xb7. */
12293FNIEMOP_DEF(iemOp_BH_Ib)
12294{
12295 IEMOP_MNEMONIC("mov BH,Ib");
12296 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12297}
12298
12299
12300/**
12301 * Common 'mov regX,immX' helper.
12302 */
12303FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12304{
12305 switch (pVCpu->iem.s.enmEffOpSize)
12306 {
12307 case IEMMODE_16BIT:
12308 {
12309 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12311
12312 IEM_MC_BEGIN(0, 1);
12313 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12314 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12315 IEM_MC_ADVANCE_RIP();
12316 IEM_MC_END();
12317 break;
12318 }
12319
12320 case IEMMODE_32BIT:
12321 {
12322 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12324
12325 IEM_MC_BEGIN(0, 1);
12326 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12327 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12328 IEM_MC_ADVANCE_RIP();
12329 IEM_MC_END();
12330 break;
12331 }
12332 case IEMMODE_64BIT:
12333 {
12334 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12336
12337 IEM_MC_BEGIN(0, 1);
12338 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12339 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12340 IEM_MC_ADVANCE_RIP();
12341 IEM_MC_END();
12342 break;
12343 }
12344 }
12345
12346 return VINF_SUCCESS;
12347}
12348
12349
12350/** Opcode 0xb8. */
12351FNIEMOP_DEF(iemOp_eAX_Iv)
12352{
12353 IEMOP_MNEMONIC("mov rAX,IV");
12354 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12355}
12356
12357
12358/** Opcode 0xb9. */
12359FNIEMOP_DEF(iemOp_eCX_Iv)
12360{
12361 IEMOP_MNEMONIC("mov rCX,IV");
12362 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12363}
12364
12365
12366/** Opcode 0xba. */
12367FNIEMOP_DEF(iemOp_eDX_Iv)
12368{
12369 IEMOP_MNEMONIC("mov rDX,IV");
12370 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12371}
12372
12373
12374/** Opcode 0xbb. */
12375FNIEMOP_DEF(iemOp_eBX_Iv)
12376{
12377 IEMOP_MNEMONIC("mov rBX,IV");
12378 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12379}
12380
12381
12382/** Opcode 0xbc. */
12383FNIEMOP_DEF(iemOp_eSP_Iv)
12384{
12385 IEMOP_MNEMONIC("mov rSP,IV");
12386 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12387}
12388
12389
12390/** Opcode 0xbd. */
12391FNIEMOP_DEF(iemOp_eBP_Iv)
12392{
12393 IEMOP_MNEMONIC("mov rBP,IV");
12394 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12395}
12396
12397
12398/** Opcode 0xbe. */
12399FNIEMOP_DEF(iemOp_eSI_Iv)
12400{
12401 IEMOP_MNEMONIC("mov rSI,IV");
12402 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12403}
12404
12405
12406/** Opcode 0xbf. */
12407FNIEMOP_DEF(iemOp_eDI_Iv)
12408{
12409 IEMOP_MNEMONIC("mov rDI,IV");
12410 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12411}
12412
12413
12414/** Opcode 0xc0. */
12415FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12416{
12417 IEMOP_HLP_MIN_186();
12418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12419 PCIEMOPSHIFTSIZES pImpl;
12420 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12421 {
12422 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12423 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12424 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12425 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12426 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12427 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12428 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12429 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12430 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12431 }
12432 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12433
12434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12435 {
12436 /* register */
12437 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12439 IEM_MC_BEGIN(3, 0);
12440 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12441 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12442 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12443 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12444 IEM_MC_REF_EFLAGS(pEFlags);
12445 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12446 IEM_MC_ADVANCE_RIP();
12447 IEM_MC_END();
12448 }
12449 else
12450 {
12451 /* memory */
12452 IEM_MC_BEGIN(3, 2);
12453 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12454 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12457
12458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12459 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12460 IEM_MC_ASSIGN(cShiftArg, cShift);
12461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12462 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12463 IEM_MC_FETCH_EFLAGS(EFlags);
12464 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12465
12466 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12467 IEM_MC_COMMIT_EFLAGS(EFlags);
12468 IEM_MC_ADVANCE_RIP();
12469 IEM_MC_END();
12470 }
12471 return VINF_SUCCESS;
12472}
12473
12474
12475/** Opcode 0xc1. */
12476FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12477{
12478 IEMOP_HLP_MIN_186();
12479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12480 PCIEMOPSHIFTSIZES pImpl;
12481 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12482 {
12483 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12484 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12485 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12486 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12487 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12488 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12489 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12490 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12491 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12492 }
12493 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12494
12495 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12496 {
12497 /* register */
12498 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12500 switch (pVCpu->iem.s.enmEffOpSize)
12501 {
12502 case IEMMODE_16BIT:
12503 IEM_MC_BEGIN(3, 0);
12504 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12505 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12506 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12507 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12508 IEM_MC_REF_EFLAGS(pEFlags);
12509 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12510 IEM_MC_ADVANCE_RIP();
12511 IEM_MC_END();
12512 return VINF_SUCCESS;
12513
12514 case IEMMODE_32BIT:
12515 IEM_MC_BEGIN(3, 0);
12516 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12517 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12519 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12520 IEM_MC_REF_EFLAGS(pEFlags);
12521 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12522 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12523 IEM_MC_ADVANCE_RIP();
12524 IEM_MC_END();
12525 return VINF_SUCCESS;
12526
12527 case IEMMODE_64BIT:
12528 IEM_MC_BEGIN(3, 0);
12529 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12530 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12531 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12532 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12533 IEM_MC_REF_EFLAGS(pEFlags);
12534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12535 IEM_MC_ADVANCE_RIP();
12536 IEM_MC_END();
12537 return VINF_SUCCESS;
12538
12539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12540 }
12541 }
12542 else
12543 {
12544 /* memory */
12545 switch (pVCpu->iem.s.enmEffOpSize)
12546 {
12547 case IEMMODE_16BIT:
12548 IEM_MC_BEGIN(3, 2);
12549 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12550 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12551 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12553
12554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12555 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12556 IEM_MC_ASSIGN(cShiftArg, cShift);
12557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12558 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12559 IEM_MC_FETCH_EFLAGS(EFlags);
12560 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12561
12562 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12563 IEM_MC_COMMIT_EFLAGS(EFlags);
12564 IEM_MC_ADVANCE_RIP();
12565 IEM_MC_END();
12566 return VINF_SUCCESS;
12567
12568 case IEMMODE_32BIT:
12569 IEM_MC_BEGIN(3, 2);
12570 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12571 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12572 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12574
12575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12576 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12577 IEM_MC_ASSIGN(cShiftArg, cShift);
12578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12579 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12580 IEM_MC_FETCH_EFLAGS(EFlags);
12581 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12582
12583 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12584 IEM_MC_COMMIT_EFLAGS(EFlags);
12585 IEM_MC_ADVANCE_RIP();
12586 IEM_MC_END();
12587 return VINF_SUCCESS;
12588
12589 case IEMMODE_64BIT:
12590 IEM_MC_BEGIN(3, 2);
12591 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12592 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12593 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12595
12596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12597 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12598 IEM_MC_ASSIGN(cShiftArg, cShift);
12599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12600 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12601 IEM_MC_FETCH_EFLAGS(EFlags);
12602 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12603
12604 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12605 IEM_MC_COMMIT_EFLAGS(EFlags);
12606 IEM_MC_ADVANCE_RIP();
12607 IEM_MC_END();
12608 return VINF_SUCCESS;
12609
12610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12611 }
12612 }
12613}
12614
12615
12616/** Opcode 0xc2. */
12617FNIEMOP_DEF(iemOp_retn_Iw)
12618{
12619 IEMOP_MNEMONIC("retn Iw");
12620 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12622 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12623 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
12624}
12625
12626
12627/** Opcode 0xc3. */
12628FNIEMOP_DEF(iemOp_retn)
12629{
12630 IEMOP_MNEMONIC("retn");
12631 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12633 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
12634}
12635
12636
12637/** Opcode 0xc4. */
12638FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12639{
12640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12641 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12642 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12643 {
12644 IEMOP_MNEMONIC("2-byte-vex");
12645 /* The LES instruction is invalid 64-bit mode. In legacy and
12646 compatability mode it is invalid with MOD=3.
12647 The use as a VEX prefix is made possible by assigning the inverted
12648 REX.R to the top MOD bit, and the top bit in the inverted register
12649 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12650 to accessing registers 0..7 in this VEX form. */
12651 /** @todo VEX: Just use new tables for it. */
12652 return IEMOP_RAISE_INVALID_OPCODE();
12653 }
12654 IEMOP_MNEMONIC("les Gv,Mp");
12655 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12656}
12657
12658
12659/** Opcode 0xc5. */
12660FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12661{
12662 /* The LDS instruction is invalid 64-bit mode. In legacy and
12663 compatability mode it is invalid with MOD=3.
12664 The use as a VEX prefix is made possible by assigning the inverted
12665 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12666 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12668 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
12669 {
12670 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12671 {
12672 IEMOP_MNEMONIC("lds Gv,Mp");
12673 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12674 }
12675 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12676 }
12677
12678 IEMOP_MNEMONIC("3-byte-vex");
12679 /** @todo Test when exctly the VEX conformance checks kick in during
12680 * instruction decoding and fetching (using \#PF). */
12681 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12682 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12683 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12684#if 0 /* will make sense of this next week... */
12685 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12686 &&
12687 )
12688 {
12689
12690 }
12691#endif
12692
12693 /** @todo VEX: Just use new tables for it. */
12694 return IEMOP_RAISE_INVALID_OPCODE();
12695}
12696
12697
12698/** Opcode 0xc6. */
12699FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12700{
12701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12702 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12703 return IEMOP_RAISE_INVALID_OPCODE();
12704 IEMOP_MNEMONIC("mov Eb,Ib");
12705
12706 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12707 {
12708 /* register access */
12709 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12711 IEM_MC_BEGIN(0, 0);
12712 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
12713 IEM_MC_ADVANCE_RIP();
12714 IEM_MC_END();
12715 }
12716 else
12717 {
12718 /* memory access. */
12719 IEM_MC_BEGIN(0, 1);
12720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12722 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12724 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
12725 IEM_MC_ADVANCE_RIP();
12726 IEM_MC_END();
12727 }
12728 return VINF_SUCCESS;
12729}
12730
12731
12732/** Opcode 0xc7. */
12733FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12734{
12735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12736 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12737 return IEMOP_RAISE_INVALID_OPCODE();
12738 IEMOP_MNEMONIC("mov Ev,Iz");
12739
12740 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12741 {
12742 /* register access */
12743 switch (pVCpu->iem.s.enmEffOpSize)
12744 {
12745 case IEMMODE_16BIT:
12746 IEM_MC_BEGIN(0, 0);
12747 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12749 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
12750 IEM_MC_ADVANCE_RIP();
12751 IEM_MC_END();
12752 return VINF_SUCCESS;
12753
12754 case IEMMODE_32BIT:
12755 IEM_MC_BEGIN(0, 0);
12756 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12758 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
12759 IEM_MC_ADVANCE_RIP();
12760 IEM_MC_END();
12761 return VINF_SUCCESS;
12762
12763 case IEMMODE_64BIT:
12764 IEM_MC_BEGIN(0, 0);
12765 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12767 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
12768 IEM_MC_ADVANCE_RIP();
12769 IEM_MC_END();
12770 return VINF_SUCCESS;
12771
12772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12773 }
12774 }
12775 else
12776 {
12777 /* memory access. */
12778 switch (pVCpu->iem.s.enmEffOpSize)
12779 {
12780 case IEMMODE_16BIT:
12781 IEM_MC_BEGIN(0, 1);
12782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12784 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12786 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
12787 IEM_MC_ADVANCE_RIP();
12788 IEM_MC_END();
12789 return VINF_SUCCESS;
12790
12791 case IEMMODE_32BIT:
12792 IEM_MC_BEGIN(0, 1);
12793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12795 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12797 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
12798 IEM_MC_ADVANCE_RIP();
12799 IEM_MC_END();
12800 return VINF_SUCCESS;
12801
12802 case IEMMODE_64BIT:
12803 IEM_MC_BEGIN(0, 1);
12804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12806 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12808 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
12809 IEM_MC_ADVANCE_RIP();
12810 IEM_MC_END();
12811 return VINF_SUCCESS;
12812
12813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12814 }
12815 }
12816}
12817
12818
12819
12820
12821/** Opcode 0xc8. */
12822FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12823{
12824 IEMOP_MNEMONIC("enter Iw,Ib");
12825 IEMOP_HLP_MIN_186();
12826 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12827 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12828 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12830 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
12831}
12832
12833
12834/** Opcode 0xc9. */
12835FNIEMOP_DEF(iemOp_leave)
12836{
12837 IEMOP_MNEMONIC("retn");
12838 IEMOP_HLP_MIN_186();
12839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12841 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
12842}
12843
12844
12845/** Opcode 0xca. */
12846FNIEMOP_DEF(iemOp_retf_Iw)
12847{
12848 IEMOP_MNEMONIC("retf Iw");
12849 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12851 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12852 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
12853}
12854
12855
12856/** Opcode 0xcb. */
12857FNIEMOP_DEF(iemOp_retf)
12858{
12859 IEMOP_MNEMONIC("retf");
12860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12862 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
12863}
12864
12865
12866/** Opcode 0xcc. */
12867FNIEMOP_DEF(iemOp_int_3)
12868{
12869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12870 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12871}
12872
12873
12874/** Opcode 0xcd. */
12875FNIEMOP_DEF(iemOp_int_Ib)
12876{
12877 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12879 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12880}
12881
12882
12883/** Opcode 0xce. */
12884FNIEMOP_DEF(iemOp_into)
12885{
12886 IEMOP_MNEMONIC("into");
12887 IEMOP_HLP_NO_64BIT();
12888
12889 IEM_MC_BEGIN(2, 0);
12890 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12891 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12892 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12893 IEM_MC_END();
12894 return VINF_SUCCESS;
12895}
12896
12897
12898/** Opcode 0xcf. */
12899FNIEMOP_DEF(iemOp_iret)
12900{
12901 IEMOP_MNEMONIC("iret");
12902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12903 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
12904}
12905
12906
12907/** Opcode 0xd0. */
12908FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12909{
12910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12911 PCIEMOPSHIFTSIZES pImpl;
12912 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12913 {
12914 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12915 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12916 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12917 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12918 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12919 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12920 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12921 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12922 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12923 }
12924 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12925
12926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12927 {
12928 /* register */
12929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12930 IEM_MC_BEGIN(3, 0);
12931 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12932 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12933 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12934 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12935 IEM_MC_REF_EFLAGS(pEFlags);
12936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12937 IEM_MC_ADVANCE_RIP();
12938 IEM_MC_END();
12939 }
12940 else
12941 {
12942 /* memory */
12943 IEM_MC_BEGIN(3, 2);
12944 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12945 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12946 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12948
12949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12951 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12952 IEM_MC_FETCH_EFLAGS(EFlags);
12953 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12954
12955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12956 IEM_MC_COMMIT_EFLAGS(EFlags);
12957 IEM_MC_ADVANCE_RIP();
12958 IEM_MC_END();
12959 }
12960 return VINF_SUCCESS;
12961}
12962
12963
12964
12965/** Opcode 0xd1. */
12966FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12967{
12968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12969 PCIEMOPSHIFTSIZES pImpl;
12970 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12971 {
12972 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12973 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12974 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12975 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12976 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12977 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12978 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12979 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12980 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12981 }
12982 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12983
12984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12985 {
12986 /* register */
12987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12988 switch (pVCpu->iem.s.enmEffOpSize)
12989 {
12990 case IEMMODE_16BIT:
12991 IEM_MC_BEGIN(3, 0);
12992 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12993 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12995 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12996 IEM_MC_REF_EFLAGS(pEFlags);
12997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12998 IEM_MC_ADVANCE_RIP();
12999 IEM_MC_END();
13000 return VINF_SUCCESS;
13001
13002 case IEMMODE_32BIT:
13003 IEM_MC_BEGIN(3, 0);
13004 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13005 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13006 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13007 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13008 IEM_MC_REF_EFLAGS(pEFlags);
13009 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13010 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13011 IEM_MC_ADVANCE_RIP();
13012 IEM_MC_END();
13013 return VINF_SUCCESS;
13014
13015 case IEMMODE_64BIT:
13016 IEM_MC_BEGIN(3, 0);
13017 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13018 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13020 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13021 IEM_MC_REF_EFLAGS(pEFlags);
13022 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13023 IEM_MC_ADVANCE_RIP();
13024 IEM_MC_END();
13025 return VINF_SUCCESS;
13026
13027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13028 }
13029 }
13030 else
13031 {
13032 /* memory */
13033 switch (pVCpu->iem.s.enmEffOpSize)
13034 {
13035 case IEMMODE_16BIT:
13036 IEM_MC_BEGIN(3, 2);
13037 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13038 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13039 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13041
13042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13044 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13045 IEM_MC_FETCH_EFLAGS(EFlags);
13046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13047
13048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13049 IEM_MC_COMMIT_EFLAGS(EFlags);
13050 IEM_MC_ADVANCE_RIP();
13051 IEM_MC_END();
13052 return VINF_SUCCESS;
13053
13054 case IEMMODE_32BIT:
13055 IEM_MC_BEGIN(3, 2);
13056 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13057 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13058 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13060
13061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13063 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13064 IEM_MC_FETCH_EFLAGS(EFlags);
13065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13066
13067 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13068 IEM_MC_COMMIT_EFLAGS(EFlags);
13069 IEM_MC_ADVANCE_RIP();
13070 IEM_MC_END();
13071 return VINF_SUCCESS;
13072
13073 case IEMMODE_64BIT:
13074 IEM_MC_BEGIN(3, 2);
13075 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13076 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13077 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13079
13080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13082 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13083 IEM_MC_FETCH_EFLAGS(EFlags);
13084 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13085
13086 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13087 IEM_MC_COMMIT_EFLAGS(EFlags);
13088 IEM_MC_ADVANCE_RIP();
13089 IEM_MC_END();
13090 return VINF_SUCCESS;
13091
13092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13093 }
13094 }
13095}
13096
13097
13098/** Opcode 0xd2. */
13099FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13100{
13101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13102 PCIEMOPSHIFTSIZES pImpl;
13103 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13104 {
13105 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
13106 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
13107 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
13108 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
13109 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
13110 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
13111 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
13112 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13113 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13114 }
13115 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13116
13117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13118 {
13119 /* register */
13120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13121 IEM_MC_BEGIN(3, 0);
13122 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13123 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13124 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13125 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13126 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13127 IEM_MC_REF_EFLAGS(pEFlags);
13128 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13129 IEM_MC_ADVANCE_RIP();
13130 IEM_MC_END();
13131 }
13132 else
13133 {
13134 /* memory */
13135 IEM_MC_BEGIN(3, 2);
13136 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13137 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13138 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13140
13141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13143 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13144 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13145 IEM_MC_FETCH_EFLAGS(EFlags);
13146 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13147
13148 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13149 IEM_MC_COMMIT_EFLAGS(EFlags);
13150 IEM_MC_ADVANCE_RIP();
13151 IEM_MC_END();
13152 }
13153 return VINF_SUCCESS;
13154}
13155
13156
13157/** Opcode 0xd3. */
13158FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13159{
13160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13161 PCIEMOPSHIFTSIZES pImpl;
13162 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13163 {
13164 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
13165 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
13166 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
13167 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
13168 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
13169 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
13170 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
13171 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13172 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13173 }
13174 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13175
13176 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13177 {
13178 /* register */
13179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13180 switch (pVCpu->iem.s.enmEffOpSize)
13181 {
13182 case IEMMODE_16BIT:
13183 IEM_MC_BEGIN(3, 0);
13184 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13185 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13187 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13188 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13189 IEM_MC_REF_EFLAGS(pEFlags);
13190 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13191 IEM_MC_ADVANCE_RIP();
13192 IEM_MC_END();
13193 return VINF_SUCCESS;
13194
13195 case IEMMODE_32BIT:
13196 IEM_MC_BEGIN(3, 0);
13197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13198 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13200 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13201 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13202 IEM_MC_REF_EFLAGS(pEFlags);
13203 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13204 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13205 IEM_MC_ADVANCE_RIP();
13206 IEM_MC_END();
13207 return VINF_SUCCESS;
13208
13209 case IEMMODE_64BIT:
13210 IEM_MC_BEGIN(3, 0);
13211 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13212 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13213 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13214 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13215 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13216 IEM_MC_REF_EFLAGS(pEFlags);
13217 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13218 IEM_MC_ADVANCE_RIP();
13219 IEM_MC_END();
13220 return VINF_SUCCESS;
13221
13222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13223 }
13224 }
13225 else
13226 {
13227 /* memory */
13228 switch (pVCpu->iem.s.enmEffOpSize)
13229 {
13230 case IEMMODE_16BIT:
13231 IEM_MC_BEGIN(3, 2);
13232 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13233 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13234 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13236
13237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13239 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13240 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13241 IEM_MC_FETCH_EFLAGS(EFlags);
13242 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13243
13244 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13245 IEM_MC_COMMIT_EFLAGS(EFlags);
13246 IEM_MC_ADVANCE_RIP();
13247 IEM_MC_END();
13248 return VINF_SUCCESS;
13249
13250 case IEMMODE_32BIT:
13251 IEM_MC_BEGIN(3, 2);
13252 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13253 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13254 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13256
13257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13259 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13260 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13261 IEM_MC_FETCH_EFLAGS(EFlags);
13262 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13263
13264 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13265 IEM_MC_COMMIT_EFLAGS(EFlags);
13266 IEM_MC_ADVANCE_RIP();
13267 IEM_MC_END();
13268 return VINF_SUCCESS;
13269
13270 case IEMMODE_64BIT:
13271 IEM_MC_BEGIN(3, 2);
13272 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13273 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13274 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13276
13277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13279 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13280 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13281 IEM_MC_FETCH_EFLAGS(EFlags);
13282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13283
13284 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13285 IEM_MC_COMMIT_EFLAGS(EFlags);
13286 IEM_MC_ADVANCE_RIP();
13287 IEM_MC_END();
13288 return VINF_SUCCESS;
13289
13290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13291 }
13292 }
13293}
13294
13295/** Opcode 0xd4. */
13296FNIEMOP_DEF(iemOp_aam_Ib)
13297{
13298 IEMOP_MNEMONIC("aam Ib");
13299 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13301 IEMOP_HLP_NO_64BIT();
13302 if (!bImm)
13303 return IEMOP_RAISE_DIVIDE_ERROR();
13304 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13305}
13306
13307
13308/** Opcode 0xd5. */
13309FNIEMOP_DEF(iemOp_aad_Ib)
13310{
13311 IEMOP_MNEMONIC("aad Ib");
13312 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13314 IEMOP_HLP_NO_64BIT();
13315 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13316}
13317
13318
13319/** Opcode 0xd6. */
13320FNIEMOP_DEF(iemOp_salc)
13321{
13322 IEMOP_MNEMONIC("salc");
13323 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13324 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13326 IEMOP_HLP_NO_64BIT();
13327
13328 IEM_MC_BEGIN(0, 0);
13329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13330 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13331 } IEM_MC_ELSE() {
13332 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13333 } IEM_MC_ENDIF();
13334 IEM_MC_ADVANCE_RIP();
13335 IEM_MC_END();
13336 return VINF_SUCCESS;
13337}
13338
13339
13340/** Opcode 0xd7. */
13341FNIEMOP_DEF(iemOp_xlat)
13342{
13343 IEMOP_MNEMONIC("xlat");
13344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13345 switch (pVCpu->iem.s.enmEffAddrMode)
13346 {
13347 case IEMMODE_16BIT:
13348 IEM_MC_BEGIN(2, 0);
13349 IEM_MC_LOCAL(uint8_t, u8Tmp);
13350 IEM_MC_LOCAL(uint16_t, u16Addr);
13351 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13352 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13353 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
13354 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13355 IEM_MC_ADVANCE_RIP();
13356 IEM_MC_END();
13357 return VINF_SUCCESS;
13358
13359 case IEMMODE_32BIT:
13360 IEM_MC_BEGIN(2, 0);
13361 IEM_MC_LOCAL(uint8_t, u8Tmp);
13362 IEM_MC_LOCAL(uint32_t, u32Addr);
13363 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13364 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13365 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
13366 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13367 IEM_MC_ADVANCE_RIP();
13368 IEM_MC_END();
13369 return VINF_SUCCESS;
13370
13371 case IEMMODE_64BIT:
13372 IEM_MC_BEGIN(2, 0);
13373 IEM_MC_LOCAL(uint8_t, u8Tmp);
13374 IEM_MC_LOCAL(uint64_t, u64Addr);
13375 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13376 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13377 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
13378 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13379 IEM_MC_ADVANCE_RIP();
13380 IEM_MC_END();
13381 return VINF_SUCCESS;
13382
13383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13384 }
13385}
13386
13387
13388/**
13389 * Common worker for FPU instructions working on ST0 and STn, and storing the
13390 * result in ST0.
13391 *
13392 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13393 */
13394FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13395{
13396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13397
13398 IEM_MC_BEGIN(3, 1);
13399 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13400 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13401 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13402 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13403
13404 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13405 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13406 IEM_MC_PREPARE_FPU_USAGE();
13407 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13408 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13409 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13410 IEM_MC_ELSE()
13411 IEM_MC_FPU_STACK_UNDERFLOW(0);
13412 IEM_MC_ENDIF();
13413 IEM_MC_ADVANCE_RIP();
13414
13415 IEM_MC_END();
13416 return VINF_SUCCESS;
13417}
13418
13419
13420/**
13421 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13422 * flags.
13423 *
13424 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13425 */
13426FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13427{
13428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13429
13430 IEM_MC_BEGIN(3, 1);
13431 IEM_MC_LOCAL(uint16_t, u16Fsw);
13432 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13433 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13434 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13435
13436 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13437 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13438 IEM_MC_PREPARE_FPU_USAGE();
13439 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13440 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13441 IEM_MC_UPDATE_FSW(u16Fsw);
13442 IEM_MC_ELSE()
13443 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13444 IEM_MC_ENDIF();
13445 IEM_MC_ADVANCE_RIP();
13446
13447 IEM_MC_END();
13448 return VINF_SUCCESS;
13449}
13450
13451
13452/**
13453 * Common worker for FPU instructions working on ST0 and STn, only affecting
13454 * flags, and popping when done.
13455 *
13456 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13457 */
13458FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13459{
13460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13461
13462 IEM_MC_BEGIN(3, 1);
13463 IEM_MC_LOCAL(uint16_t, u16Fsw);
13464 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13465 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13466 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13467
13468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13470 IEM_MC_PREPARE_FPU_USAGE();
13471 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13472 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13473 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13474 IEM_MC_ELSE()
13475 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13476 IEM_MC_ENDIF();
13477 IEM_MC_ADVANCE_RIP();
13478
13479 IEM_MC_END();
13480 return VINF_SUCCESS;
13481}
13482
13483
13484/** Opcode 0xd8 11/0. */
13485FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13486{
13487 IEMOP_MNEMONIC("fadd st0,stN");
13488 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13489}
13490
13491
13492/** Opcode 0xd8 11/1. */
13493FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13494{
13495 IEMOP_MNEMONIC("fmul st0,stN");
13496 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13497}
13498
13499
13500/** Opcode 0xd8 11/2. */
13501FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13502{
13503 IEMOP_MNEMONIC("fcom st0,stN");
13504 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13505}
13506
13507
13508/** Opcode 0xd8 11/3. */
13509FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13510{
13511 IEMOP_MNEMONIC("fcomp st0,stN");
13512 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13513}
13514
13515
13516/** Opcode 0xd8 11/4. */
13517FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13518{
13519 IEMOP_MNEMONIC("fsub st0,stN");
13520 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13521}
13522
13523
13524/** Opcode 0xd8 11/5. */
13525FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13526{
13527 IEMOP_MNEMONIC("fsubr st0,stN");
13528 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13529}
13530
13531
13532/** Opcode 0xd8 11/6. */
13533FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13534{
13535 IEMOP_MNEMONIC("fdiv st0,stN");
13536 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13537}
13538
13539
13540/** Opcode 0xd8 11/7. */
13541FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13542{
13543 IEMOP_MNEMONIC("fdivr st0,stN");
13544 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13545}
13546
13547
13548/**
13549 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13550 * the result in ST0.
13551 *
13552 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13553 */
13554FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13555{
13556 IEM_MC_BEGIN(3, 3);
13557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13558 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13559 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13560 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13561 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13562 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13563
13564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13566
13567 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13568 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13569 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13570
13571 IEM_MC_PREPARE_FPU_USAGE();
13572 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13573 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13574 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13575 IEM_MC_ELSE()
13576 IEM_MC_FPU_STACK_UNDERFLOW(0);
13577 IEM_MC_ENDIF();
13578 IEM_MC_ADVANCE_RIP();
13579
13580 IEM_MC_END();
13581 return VINF_SUCCESS;
13582}
13583
13584
13585/** Opcode 0xd8 !11/0. */
13586FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13587{
13588 IEMOP_MNEMONIC("fadd st0,m32r");
13589 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13590}
13591
13592
13593/** Opcode 0xd8 !11/1. */
13594FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13595{
13596 IEMOP_MNEMONIC("fmul st0,m32r");
13597 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13598}
13599
13600
13601/** Opcode 0xd8 !11/2. */
13602FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13603{
13604 IEMOP_MNEMONIC("fcom st0,m32r");
13605
13606 IEM_MC_BEGIN(3, 3);
13607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13608 IEM_MC_LOCAL(uint16_t, u16Fsw);
13609 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13610 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13611 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13612 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13613
13614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13616
13617 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13618 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13619 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13620
13621 IEM_MC_PREPARE_FPU_USAGE();
13622 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13623 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13624 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13625 IEM_MC_ELSE()
13626 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13627 IEM_MC_ENDIF();
13628 IEM_MC_ADVANCE_RIP();
13629
13630 IEM_MC_END();
13631 return VINF_SUCCESS;
13632}
13633
13634
13635/** Opcode 0xd8 !11/3. */
13636FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13637{
13638 IEMOP_MNEMONIC("fcomp st0,m32r");
13639
13640 IEM_MC_BEGIN(3, 3);
13641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13642 IEM_MC_LOCAL(uint16_t, u16Fsw);
13643 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13644 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13645 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13646 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13647
13648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13650
13651 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13652 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13653 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13654
13655 IEM_MC_PREPARE_FPU_USAGE();
13656 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13657 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13658 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13659 IEM_MC_ELSE()
13660 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13661 IEM_MC_ENDIF();
13662 IEM_MC_ADVANCE_RIP();
13663
13664 IEM_MC_END();
13665 return VINF_SUCCESS;
13666}
13667
13668
13669/** Opcode 0xd8 !11/4. */
13670FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13671{
13672 IEMOP_MNEMONIC("fsub st0,m32r");
13673 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13674}
13675
13676
13677/** Opcode 0xd8 !11/5. */
13678FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13679{
13680 IEMOP_MNEMONIC("fsubr st0,m32r");
13681 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13682}
13683
13684
13685/** Opcode 0xd8 !11/6. */
13686FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13687{
13688 IEMOP_MNEMONIC("fdiv st0,m32r");
13689 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13690}
13691
13692
13693/** Opcode 0xd8 !11/7. */
13694FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13695{
13696 IEMOP_MNEMONIC("fdivr st0,m32r");
13697 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13698}
13699
13700
13701/** Opcode 0xd8. */
13702FNIEMOP_DEF(iemOp_EscF0)
13703{
13704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13705 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
13706
13707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13708 {
13709 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13710 {
13711 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13712 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13713 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13714 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13715 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13716 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13717 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13718 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13720 }
13721 }
13722 else
13723 {
13724 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13725 {
13726 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13727 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13728 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13729 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13730 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13731 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13732 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13733 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13735 }
13736 }
13737}
13738
13739
13740/** Opcode 0xd9 /0 mem32real
13741 * @sa iemOp_fld_m64r */
13742FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13743{
13744 IEMOP_MNEMONIC("fld m32r");
13745
13746 IEM_MC_BEGIN(2, 3);
13747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13748 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13749 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13750 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13751 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13752
13753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13755
13756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13758 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13759
13760 IEM_MC_PREPARE_FPU_USAGE();
13761 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13762 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13763 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13764 IEM_MC_ELSE()
13765 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13766 IEM_MC_ENDIF();
13767 IEM_MC_ADVANCE_RIP();
13768
13769 IEM_MC_END();
13770 return VINF_SUCCESS;
13771}
13772
13773
13774/** Opcode 0xd9 !11/2 mem32real */
13775FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13776{
13777 IEMOP_MNEMONIC("fst m32r");
13778 IEM_MC_BEGIN(3, 2);
13779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13780 IEM_MC_LOCAL(uint16_t, u16Fsw);
13781 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13782 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13783 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13784
13785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13788 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13789
13790 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13791 IEM_MC_PREPARE_FPU_USAGE();
13792 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13793 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13794 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13795 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13796 IEM_MC_ELSE()
13797 IEM_MC_IF_FCW_IM()
13798 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13799 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13800 IEM_MC_ENDIF();
13801 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13802 IEM_MC_ENDIF();
13803 IEM_MC_ADVANCE_RIP();
13804
13805 IEM_MC_END();
13806 return VINF_SUCCESS;
13807}
13808
13809
13810/** Opcode 0xd9 !11/3 */
13811FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13812{
13813 IEMOP_MNEMONIC("fstp m32r");
13814 IEM_MC_BEGIN(3, 2);
13815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13816 IEM_MC_LOCAL(uint16_t, u16Fsw);
13817 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13818 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13819 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13820
13821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13824 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13825
13826 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13827 IEM_MC_PREPARE_FPU_USAGE();
13828 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13829 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13830 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13831 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13832 IEM_MC_ELSE()
13833 IEM_MC_IF_FCW_IM()
13834 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13835 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13836 IEM_MC_ENDIF();
13837 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13838 IEM_MC_ENDIF();
13839 IEM_MC_ADVANCE_RIP();
13840
13841 IEM_MC_END();
13842 return VINF_SUCCESS;
13843}
13844
13845
13846/** Opcode 0xd9 !11/4 */
13847FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13848{
13849 IEMOP_MNEMONIC("fldenv m14/28byte");
13850 IEM_MC_BEGIN(3, 0);
13851 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13852 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13853 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13856 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13857 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13858 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13859 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13860 IEM_MC_END();
13861 return VINF_SUCCESS;
13862}
13863
13864
13865/** Opcode 0xd9 !11/5 */
13866FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13867{
13868 IEMOP_MNEMONIC("fldcw m2byte");
13869 IEM_MC_BEGIN(1, 1);
13870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13871 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13874 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13875 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13876 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13877 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13878 IEM_MC_END();
13879 return VINF_SUCCESS;
13880}
13881
13882
13883/** Opcode 0xd9 !11/6 */
13884FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13885{
13886 IEMOP_MNEMONIC("fstenv m14/m28byte");
13887 IEM_MC_BEGIN(3, 0);
13888 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13889 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13890 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13893 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13894 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13895 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13896 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13897 IEM_MC_END();
13898 return VINF_SUCCESS;
13899}
13900
13901
13902/** Opcode 0xd9 !11/7 */
13903FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13904{
13905 IEMOP_MNEMONIC("fnstcw m2byte");
13906 IEM_MC_BEGIN(2, 0);
13907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13908 IEM_MC_LOCAL(uint16_t, u16Fcw);
13909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13912 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13913 IEM_MC_FETCH_FCW(u16Fcw);
13914 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
13915 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13916 IEM_MC_END();
13917 return VINF_SUCCESS;
13918}
13919
13920
13921/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13922FNIEMOP_DEF(iemOp_fnop)
13923{
13924 IEMOP_MNEMONIC("fnop");
13925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13926
13927 IEM_MC_BEGIN(0, 0);
13928 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13929 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13930 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13931 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13932 * intel optimizations. Investigate. */
13933 IEM_MC_UPDATE_FPU_OPCODE_IP();
13934 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13935 IEM_MC_END();
13936 return VINF_SUCCESS;
13937}
13938
13939
13940/** Opcode 0xd9 11/0 stN */
13941FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13942{
13943 IEMOP_MNEMONIC("fld stN");
13944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13945
13946 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13947 * indicates that it does. */
13948 IEM_MC_BEGIN(0, 2);
13949 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13950 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13953
13954 IEM_MC_PREPARE_FPU_USAGE();
13955 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13956 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13957 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13958 IEM_MC_ELSE()
13959 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13960 IEM_MC_ENDIF();
13961
13962 IEM_MC_ADVANCE_RIP();
13963 IEM_MC_END();
13964
13965 return VINF_SUCCESS;
13966}
13967
13968
13969/** Opcode 0xd9 11/3 stN */
13970FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13971{
13972 IEMOP_MNEMONIC("fxch stN");
13973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13974
13975 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13976 * indicates that it does. */
13977 IEM_MC_BEGIN(1, 3);
13978 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13979 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13980 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13981 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13984
13985 IEM_MC_PREPARE_FPU_USAGE();
13986 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13987 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13988 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13989 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13990 IEM_MC_ELSE()
13991 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13992 IEM_MC_ENDIF();
13993
13994 IEM_MC_ADVANCE_RIP();
13995 IEM_MC_END();
13996
13997 return VINF_SUCCESS;
13998}
13999
14000
14001/** Opcode 0xd9 11/4, 0xdd 11/2. */
14002FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14003{
14004 IEMOP_MNEMONIC("fstp st0,stN");
14005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14006
14007 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
14008 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14009 if (!iDstReg)
14010 {
14011 IEM_MC_BEGIN(0, 1);
14012 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14015
14016 IEM_MC_PREPARE_FPU_USAGE();
14017 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14018 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14019 IEM_MC_ELSE()
14020 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14021 IEM_MC_ENDIF();
14022
14023 IEM_MC_ADVANCE_RIP();
14024 IEM_MC_END();
14025 }
14026 else
14027 {
14028 IEM_MC_BEGIN(0, 2);
14029 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14030 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14033
14034 IEM_MC_PREPARE_FPU_USAGE();
14035 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14036 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14037 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14038 IEM_MC_ELSE()
14039 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14040 IEM_MC_ENDIF();
14041
14042 IEM_MC_ADVANCE_RIP();
14043 IEM_MC_END();
14044 }
14045 return VINF_SUCCESS;
14046}
14047
14048
14049/**
14050 * Common worker for FPU instructions working on ST0 and replaces it with the
14051 * result, i.e. unary operators.
14052 *
14053 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14054 */
14055FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14056{
14057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14058
14059 IEM_MC_BEGIN(2, 1);
14060 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14061 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14062 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14063
14064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14066 IEM_MC_PREPARE_FPU_USAGE();
14067 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14068 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14069 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14070 IEM_MC_ELSE()
14071 IEM_MC_FPU_STACK_UNDERFLOW(0);
14072 IEM_MC_ENDIF();
14073 IEM_MC_ADVANCE_RIP();
14074
14075 IEM_MC_END();
14076 return VINF_SUCCESS;
14077}
14078
14079
14080/** Opcode 0xd9 0xe0. */
14081FNIEMOP_DEF(iemOp_fchs)
14082{
14083 IEMOP_MNEMONIC("fchs st0");
14084 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14085}
14086
14087
14088/** Opcode 0xd9 0xe1. */
14089FNIEMOP_DEF(iemOp_fabs)
14090{
14091 IEMOP_MNEMONIC("fabs st0");
14092 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14093}
14094
14095
14096/**
14097 * Common worker for FPU instructions working on ST0 and only returns FSW.
14098 *
14099 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14100 */
14101FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14102{
14103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14104
14105 IEM_MC_BEGIN(2, 1);
14106 IEM_MC_LOCAL(uint16_t, u16Fsw);
14107 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14108 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14109
14110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14111 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14112 IEM_MC_PREPARE_FPU_USAGE();
14113 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14114 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14115 IEM_MC_UPDATE_FSW(u16Fsw);
14116 IEM_MC_ELSE()
14117 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14118 IEM_MC_ENDIF();
14119 IEM_MC_ADVANCE_RIP();
14120
14121 IEM_MC_END();
14122 return VINF_SUCCESS;
14123}
14124
14125
14126/** Opcode 0xd9 0xe4. */
14127FNIEMOP_DEF(iemOp_ftst)
14128{
14129 IEMOP_MNEMONIC("ftst st0");
14130 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14131}
14132
14133
14134/** Opcode 0xd9 0xe5. */
14135FNIEMOP_DEF(iemOp_fxam)
14136{
14137 IEMOP_MNEMONIC("fxam st0");
14138 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14139}
14140
14141
14142/**
14143 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14144 *
14145 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14146 */
14147FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14148{
14149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14150
14151 IEM_MC_BEGIN(1, 1);
14152 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14153 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14154
14155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14157 IEM_MC_PREPARE_FPU_USAGE();
14158 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14159 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14160 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14161 IEM_MC_ELSE()
14162 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14163 IEM_MC_ENDIF();
14164 IEM_MC_ADVANCE_RIP();
14165
14166 IEM_MC_END();
14167 return VINF_SUCCESS;
14168}
14169
14170
14171/** Opcode 0xd9 0xe8. */
14172FNIEMOP_DEF(iemOp_fld1)
14173{
14174 IEMOP_MNEMONIC("fld1");
14175 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14176}
14177
14178
14179/** Opcode 0xd9 0xe9. */
14180FNIEMOP_DEF(iemOp_fldl2t)
14181{
14182 IEMOP_MNEMONIC("fldl2t");
14183 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14184}
14185
14186
14187/** Opcode 0xd9 0xea. */
14188FNIEMOP_DEF(iemOp_fldl2e)
14189{
14190 IEMOP_MNEMONIC("fldl2e");
14191 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14192}
14193
14194/** Opcode 0xd9 0xeb. */
14195FNIEMOP_DEF(iemOp_fldpi)
14196{
14197 IEMOP_MNEMONIC("fldpi");
14198 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14199}
14200
14201
14202/** Opcode 0xd9 0xec. */
14203FNIEMOP_DEF(iemOp_fldlg2)
14204{
14205 IEMOP_MNEMONIC("fldlg2");
14206 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14207}
14208
14209/** Opcode 0xd9 0xed. */
14210FNIEMOP_DEF(iemOp_fldln2)
14211{
14212 IEMOP_MNEMONIC("fldln2");
14213 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14214}
14215
14216
14217/** Opcode 0xd9 0xee. */
14218FNIEMOP_DEF(iemOp_fldz)
14219{
14220 IEMOP_MNEMONIC("fldz");
14221 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14222}
14223
14224
14225/** Opcode 0xd9 0xf0. */
14226FNIEMOP_DEF(iemOp_f2xm1)
14227{
14228 IEMOP_MNEMONIC("f2xm1 st0");
14229 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14230}
14231
14232
14233/** Opcode 0xd9 0xf1. */
14234FNIEMOP_DEF(iemOp_fylx2)
14235{
14236 IEMOP_MNEMONIC("fylx2 st0");
14237 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
14238}
14239
14240
14241/**
14242 * Common worker for FPU instructions working on ST0 and having two outputs, one
14243 * replacing ST0 and one pushed onto the stack.
14244 *
14245 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14246 */
14247FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14248{
14249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14250
14251 IEM_MC_BEGIN(2, 1);
14252 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14253 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14254 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14255
14256 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14257 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14258 IEM_MC_PREPARE_FPU_USAGE();
14259 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14260 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14261 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14262 IEM_MC_ELSE()
14263 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14264 IEM_MC_ENDIF();
14265 IEM_MC_ADVANCE_RIP();
14266
14267 IEM_MC_END();
14268 return VINF_SUCCESS;
14269}
14270
14271
14272/** Opcode 0xd9 0xf2. */
14273FNIEMOP_DEF(iemOp_fptan)
14274{
14275 IEMOP_MNEMONIC("fptan st0");
14276 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14277}
14278
14279
14280/**
14281 * Common worker for FPU instructions working on STn and ST0, storing the result
14282 * in STn, and popping the stack unless IE, DE or ZE was raised.
14283 *
14284 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14285 */
14286FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14287{
14288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14289
14290 IEM_MC_BEGIN(3, 1);
14291 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14292 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14293 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14294 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14295
14296 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14297 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14298
14299 IEM_MC_PREPARE_FPU_USAGE();
14300 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14301 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14302 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14303 IEM_MC_ELSE()
14304 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14305 IEM_MC_ENDIF();
14306 IEM_MC_ADVANCE_RIP();
14307
14308 IEM_MC_END();
14309 return VINF_SUCCESS;
14310}
14311
14312
14313/** Opcode 0xd9 0xf3. */
14314FNIEMOP_DEF(iemOp_fpatan)
14315{
14316 IEMOP_MNEMONIC("fpatan st1,st0");
14317 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14318}
14319
14320
14321/** Opcode 0xd9 0xf4. */
14322FNIEMOP_DEF(iemOp_fxtract)
14323{
14324 IEMOP_MNEMONIC("fxtract st0");
14325 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14326}
14327
14328
14329/** Opcode 0xd9 0xf5. */
14330FNIEMOP_DEF(iemOp_fprem1)
14331{
14332 IEMOP_MNEMONIC("fprem1 st0, st1");
14333 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14334}
14335
14336
14337/** Opcode 0xd9 0xf6. */
14338FNIEMOP_DEF(iemOp_fdecstp)
14339{
14340 IEMOP_MNEMONIC("fdecstp");
14341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14342 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14343 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14344 * FINCSTP and FDECSTP. */
14345
14346 IEM_MC_BEGIN(0,0);
14347
14348 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14349 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14350
14351 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14352 IEM_MC_FPU_STACK_DEC_TOP();
14353 IEM_MC_UPDATE_FSW_CONST(0);
14354
14355 IEM_MC_ADVANCE_RIP();
14356 IEM_MC_END();
14357 return VINF_SUCCESS;
14358}
14359
14360
14361/** Opcode 0xd9 0xf7. */
14362FNIEMOP_DEF(iemOp_fincstp)
14363{
14364 IEMOP_MNEMONIC("fincstp");
14365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14366 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14367 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14368 * FINCSTP and FDECSTP. */
14369
14370 IEM_MC_BEGIN(0,0);
14371
14372 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14373 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14374
14375 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14376 IEM_MC_FPU_STACK_INC_TOP();
14377 IEM_MC_UPDATE_FSW_CONST(0);
14378
14379 IEM_MC_ADVANCE_RIP();
14380 IEM_MC_END();
14381 return VINF_SUCCESS;
14382}
14383
14384
14385/** Opcode 0xd9 0xf8. */
14386FNIEMOP_DEF(iemOp_fprem)
14387{
14388 IEMOP_MNEMONIC("fprem st0, st1");
14389 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14390}
14391
14392
14393/** Opcode 0xd9 0xf9. */
14394FNIEMOP_DEF(iemOp_fyl2xp1)
14395{
14396 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
14397 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14398}
14399
14400
14401/** Opcode 0xd9 0xfa. */
14402FNIEMOP_DEF(iemOp_fsqrt)
14403{
14404 IEMOP_MNEMONIC("fsqrt st0");
14405 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14406}
14407
14408
14409/** Opcode 0xd9 0xfb. */
14410FNIEMOP_DEF(iemOp_fsincos)
14411{
14412 IEMOP_MNEMONIC("fsincos st0");
14413 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14414}
14415
14416
14417/** Opcode 0xd9 0xfc. */
14418FNIEMOP_DEF(iemOp_frndint)
14419{
14420 IEMOP_MNEMONIC("frndint st0");
14421 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14422}
14423
14424
14425/** Opcode 0xd9 0xfd. */
14426FNIEMOP_DEF(iemOp_fscale)
14427{
14428 IEMOP_MNEMONIC("fscale st0, st1");
14429 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14430}
14431
14432
14433/** Opcode 0xd9 0xfe. */
14434FNIEMOP_DEF(iemOp_fsin)
14435{
14436 IEMOP_MNEMONIC("fsin st0");
14437 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14438}
14439
14440
14441/** Opcode 0xd9 0xff. */
14442FNIEMOP_DEF(iemOp_fcos)
14443{
14444 IEMOP_MNEMONIC("fcos st0");
14445 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14446}
14447
14448
14449/** Used by iemOp_EscF1. */
14450IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14451{
14452 /* 0xe0 */ iemOp_fchs,
14453 /* 0xe1 */ iemOp_fabs,
14454 /* 0xe2 */ iemOp_Invalid,
14455 /* 0xe3 */ iemOp_Invalid,
14456 /* 0xe4 */ iemOp_ftst,
14457 /* 0xe5 */ iemOp_fxam,
14458 /* 0xe6 */ iemOp_Invalid,
14459 /* 0xe7 */ iemOp_Invalid,
14460 /* 0xe8 */ iemOp_fld1,
14461 /* 0xe9 */ iemOp_fldl2t,
14462 /* 0xea */ iemOp_fldl2e,
14463 /* 0xeb */ iemOp_fldpi,
14464 /* 0xec */ iemOp_fldlg2,
14465 /* 0xed */ iemOp_fldln2,
14466 /* 0xee */ iemOp_fldz,
14467 /* 0xef */ iemOp_Invalid,
14468 /* 0xf0 */ iemOp_f2xm1,
14469 /* 0xf1 */ iemOp_fylx2,
14470 /* 0xf2 */ iemOp_fptan,
14471 /* 0xf3 */ iemOp_fpatan,
14472 /* 0xf4 */ iemOp_fxtract,
14473 /* 0xf5 */ iemOp_fprem1,
14474 /* 0xf6 */ iemOp_fdecstp,
14475 /* 0xf7 */ iemOp_fincstp,
14476 /* 0xf8 */ iemOp_fprem,
14477 /* 0xf9 */ iemOp_fyl2xp1,
14478 /* 0xfa */ iemOp_fsqrt,
14479 /* 0xfb */ iemOp_fsincos,
14480 /* 0xfc */ iemOp_frndint,
14481 /* 0xfd */ iemOp_fscale,
14482 /* 0xfe */ iemOp_fsin,
14483 /* 0xff */ iemOp_fcos
14484};
14485
14486
14487/** Opcode 0xd9. */
14488FNIEMOP_DEF(iemOp_EscF1)
14489{
14490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14491 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
14492
14493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14494 {
14495 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14496 {
14497 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14498 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14499 case 2:
14500 if (bRm == 0xd0)
14501 return FNIEMOP_CALL(iemOp_fnop);
14502 return IEMOP_RAISE_INVALID_OPCODE();
14503 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14504 case 4:
14505 case 5:
14506 case 6:
14507 case 7:
14508 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14509 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14511 }
14512 }
14513 else
14514 {
14515 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14516 {
14517 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14518 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14519 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14520 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14521 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14522 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14523 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14524 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14526 }
14527 }
14528}
14529
14530
14531/** Opcode 0xda 11/0. */
14532FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14533{
14534 IEMOP_MNEMONIC("fcmovb st0,stN");
14535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14536
14537 IEM_MC_BEGIN(0, 1);
14538 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14539
14540 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14541 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14542
14543 IEM_MC_PREPARE_FPU_USAGE();
14544 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14545 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14546 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14547 IEM_MC_ENDIF();
14548 IEM_MC_UPDATE_FPU_OPCODE_IP();
14549 IEM_MC_ELSE()
14550 IEM_MC_FPU_STACK_UNDERFLOW(0);
14551 IEM_MC_ENDIF();
14552 IEM_MC_ADVANCE_RIP();
14553
14554 IEM_MC_END();
14555 return VINF_SUCCESS;
14556}
14557
14558
14559/** Opcode 0xda 11/1. */
14560FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14561{
14562 IEMOP_MNEMONIC("fcmove st0,stN");
14563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14564
14565 IEM_MC_BEGIN(0, 1);
14566 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14567
14568 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14569 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14570
14571 IEM_MC_PREPARE_FPU_USAGE();
14572 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14574 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14575 IEM_MC_ENDIF();
14576 IEM_MC_UPDATE_FPU_OPCODE_IP();
14577 IEM_MC_ELSE()
14578 IEM_MC_FPU_STACK_UNDERFLOW(0);
14579 IEM_MC_ENDIF();
14580 IEM_MC_ADVANCE_RIP();
14581
14582 IEM_MC_END();
14583 return VINF_SUCCESS;
14584}
14585
14586
14587/** Opcode 0xda 11/2. */
14588FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14589{
14590 IEMOP_MNEMONIC("fcmovbe st0,stN");
14591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14592
14593 IEM_MC_BEGIN(0, 1);
14594 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14595
14596 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14597 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14598
14599 IEM_MC_PREPARE_FPU_USAGE();
14600 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14601 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14602 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14603 IEM_MC_ENDIF();
14604 IEM_MC_UPDATE_FPU_OPCODE_IP();
14605 IEM_MC_ELSE()
14606 IEM_MC_FPU_STACK_UNDERFLOW(0);
14607 IEM_MC_ENDIF();
14608 IEM_MC_ADVANCE_RIP();
14609
14610 IEM_MC_END();
14611 return VINF_SUCCESS;
14612}
14613
14614
14615/** Opcode 0xda 11/3. */
14616FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14617{
14618 IEMOP_MNEMONIC("fcmovu st0,stN");
14619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14620
14621 IEM_MC_BEGIN(0, 1);
14622 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14623
14624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14625 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14626
14627 IEM_MC_PREPARE_FPU_USAGE();
14628 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14629 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14630 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14631 IEM_MC_ENDIF();
14632 IEM_MC_UPDATE_FPU_OPCODE_IP();
14633 IEM_MC_ELSE()
14634 IEM_MC_FPU_STACK_UNDERFLOW(0);
14635 IEM_MC_ENDIF();
14636 IEM_MC_ADVANCE_RIP();
14637
14638 IEM_MC_END();
14639 return VINF_SUCCESS;
14640}
14641
14642
14643/**
14644 * Common worker for FPU instructions working on ST0 and STn, only affecting
14645 * flags, and popping twice when done.
14646 *
14647 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14648 */
14649FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14650{
14651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14652
14653 IEM_MC_BEGIN(3, 1);
14654 IEM_MC_LOCAL(uint16_t, u16Fsw);
14655 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14656 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14657 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14658
14659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14660 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14661
14662 IEM_MC_PREPARE_FPU_USAGE();
14663 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14664 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14665 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14666 IEM_MC_ELSE()
14667 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14668 IEM_MC_ENDIF();
14669 IEM_MC_ADVANCE_RIP();
14670
14671 IEM_MC_END();
14672 return VINF_SUCCESS;
14673}
14674
14675
14676/** Opcode 0xda 0xe9. */
14677FNIEMOP_DEF(iemOp_fucompp)
14678{
14679 IEMOP_MNEMONIC("fucompp st0,stN");
14680 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14681}
14682
14683
14684/**
14685 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14686 * the result in ST0.
14687 *
14688 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14689 */
14690FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14691{
14692 IEM_MC_BEGIN(3, 3);
14693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14694 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14695 IEM_MC_LOCAL(int32_t, i32Val2);
14696 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14697 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14698 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14699
14700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14702
14703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14705 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14706
14707 IEM_MC_PREPARE_FPU_USAGE();
14708 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14709 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14710 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14711 IEM_MC_ELSE()
14712 IEM_MC_FPU_STACK_UNDERFLOW(0);
14713 IEM_MC_ENDIF();
14714 IEM_MC_ADVANCE_RIP();
14715
14716 IEM_MC_END();
14717 return VINF_SUCCESS;
14718}
14719
14720
14721/** Opcode 0xda !11/0. */
14722FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14723{
14724 IEMOP_MNEMONIC("fiadd m32i");
14725 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14726}
14727
14728
14729/** Opcode 0xda !11/1. */
14730FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14731{
14732 IEMOP_MNEMONIC("fimul m32i");
14733 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14734}
14735
14736
14737/** Opcode 0xda !11/2. */
14738FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14739{
14740 IEMOP_MNEMONIC("ficom st0,m32i");
14741
14742 IEM_MC_BEGIN(3, 3);
14743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14744 IEM_MC_LOCAL(uint16_t, u16Fsw);
14745 IEM_MC_LOCAL(int32_t, i32Val2);
14746 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14747 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14748 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14749
14750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14752
14753 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14754 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14755 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14756
14757 IEM_MC_PREPARE_FPU_USAGE();
14758 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14759 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14760 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14761 IEM_MC_ELSE()
14762 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14763 IEM_MC_ENDIF();
14764 IEM_MC_ADVANCE_RIP();
14765
14766 IEM_MC_END();
14767 return VINF_SUCCESS;
14768}
14769
14770
14771/** Opcode 0xda !11/3. */
14772FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14773{
14774 IEMOP_MNEMONIC("ficomp st0,m32i");
14775
14776 IEM_MC_BEGIN(3, 3);
14777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14778 IEM_MC_LOCAL(uint16_t, u16Fsw);
14779 IEM_MC_LOCAL(int32_t, i32Val2);
14780 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14781 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14782 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14783
14784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14786
14787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14788 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14789 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14790
14791 IEM_MC_PREPARE_FPU_USAGE();
14792 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14793 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14794 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14795 IEM_MC_ELSE()
14796 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14797 IEM_MC_ENDIF();
14798 IEM_MC_ADVANCE_RIP();
14799
14800 IEM_MC_END();
14801 return VINF_SUCCESS;
14802}
14803
14804
14805/** Opcode 0xda !11/4. */
14806FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14807{
14808 IEMOP_MNEMONIC("fisub m32i");
14809 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14810}
14811
14812
14813/** Opcode 0xda !11/5. */
14814FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14815{
14816 IEMOP_MNEMONIC("fisubr m32i");
14817 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14818}
14819
14820
14821/** Opcode 0xda !11/6. */
14822FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14823{
14824 IEMOP_MNEMONIC("fidiv m32i");
14825 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14826}
14827
14828
14829/** Opcode 0xda !11/7. */
14830FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14831{
14832 IEMOP_MNEMONIC("fidivr m32i");
14833 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14834}
14835
14836
14837/** Opcode 0xda. */
14838FNIEMOP_DEF(iemOp_EscF2)
14839{
14840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14841 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
14842 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14843 {
14844 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14845 {
14846 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14847 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14848 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14849 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14850 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14851 case 5:
14852 if (bRm == 0xe9)
14853 return FNIEMOP_CALL(iemOp_fucompp);
14854 return IEMOP_RAISE_INVALID_OPCODE();
14855 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14856 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14858 }
14859 }
14860 else
14861 {
14862 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14863 {
14864 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14865 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14866 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14867 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14868 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14869 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14870 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14871 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14873 }
14874 }
14875}
14876
14877
14878/** Opcode 0xdb !11/0. */
14879FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14880{
14881 IEMOP_MNEMONIC("fild m32i");
14882
14883 IEM_MC_BEGIN(2, 3);
14884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14885 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14886 IEM_MC_LOCAL(int32_t, i32Val);
14887 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14888 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14889
14890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14892
14893 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14894 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14895 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14896
14897 IEM_MC_PREPARE_FPU_USAGE();
14898 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14899 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14900 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14901 IEM_MC_ELSE()
14902 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14903 IEM_MC_ENDIF();
14904 IEM_MC_ADVANCE_RIP();
14905
14906 IEM_MC_END();
14907 return VINF_SUCCESS;
14908}
14909
14910
14911/** Opcode 0xdb !11/1. */
14912FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14913{
14914 IEMOP_MNEMONIC("fisttp m32i");
14915 IEM_MC_BEGIN(3, 2);
14916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14917 IEM_MC_LOCAL(uint16_t, u16Fsw);
14918 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14919 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14920 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14921
14922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14924 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14925 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14926
14927 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14928 IEM_MC_PREPARE_FPU_USAGE();
14929 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14930 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14931 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14932 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14933 IEM_MC_ELSE()
14934 IEM_MC_IF_FCW_IM()
14935 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14936 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14937 IEM_MC_ENDIF();
14938 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14939 IEM_MC_ENDIF();
14940 IEM_MC_ADVANCE_RIP();
14941
14942 IEM_MC_END();
14943 return VINF_SUCCESS;
14944}
14945
14946
14947/** Opcode 0xdb !11/2. */
14948FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14949{
14950 IEMOP_MNEMONIC("fist m32i");
14951 IEM_MC_BEGIN(3, 2);
14952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14953 IEM_MC_LOCAL(uint16_t, u16Fsw);
14954 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14955 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14956 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14957
14958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14961 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14962
14963 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14964 IEM_MC_PREPARE_FPU_USAGE();
14965 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14966 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14967 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14968 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14969 IEM_MC_ELSE()
14970 IEM_MC_IF_FCW_IM()
14971 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14972 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14973 IEM_MC_ENDIF();
14974 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14975 IEM_MC_ENDIF();
14976 IEM_MC_ADVANCE_RIP();
14977
14978 IEM_MC_END();
14979 return VINF_SUCCESS;
14980}
14981
14982
14983/** Opcode 0xdb !11/3. */
14984FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14985{
14986 IEMOP_MNEMONIC("fisttp m32i");
14987 IEM_MC_BEGIN(3, 2);
14988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14989 IEM_MC_LOCAL(uint16_t, u16Fsw);
14990 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14991 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14992 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14993
14994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14998
14999 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15000 IEM_MC_PREPARE_FPU_USAGE();
15001 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15002 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15003 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15004 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15005 IEM_MC_ELSE()
15006 IEM_MC_IF_FCW_IM()
15007 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15008 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15009 IEM_MC_ENDIF();
15010 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15011 IEM_MC_ENDIF();
15012 IEM_MC_ADVANCE_RIP();
15013
15014 IEM_MC_END();
15015 return VINF_SUCCESS;
15016}
15017
15018
15019/** Opcode 0xdb !11/5. */
15020FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15021{
15022 IEMOP_MNEMONIC("fld m80r");
15023
15024 IEM_MC_BEGIN(2, 3);
15025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15026 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15027 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15028 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15029 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15030
15031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15033
15034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15036 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15037
15038 IEM_MC_PREPARE_FPU_USAGE();
15039 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15040 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15041 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15042 IEM_MC_ELSE()
15043 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15044 IEM_MC_ENDIF();
15045 IEM_MC_ADVANCE_RIP();
15046
15047 IEM_MC_END();
15048 return VINF_SUCCESS;
15049}
15050
15051
15052/** Opcode 0xdb !11/7. */
15053FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15054{
15055 IEMOP_MNEMONIC("fstp m80r");
15056 IEM_MC_BEGIN(3, 2);
15057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15058 IEM_MC_LOCAL(uint16_t, u16Fsw);
15059 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15060 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15061 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15062
15063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15067
15068 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15069 IEM_MC_PREPARE_FPU_USAGE();
15070 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15071 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15072 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15073 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15074 IEM_MC_ELSE()
15075 IEM_MC_IF_FCW_IM()
15076 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15077 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15078 IEM_MC_ENDIF();
15079 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15080 IEM_MC_ENDIF();
15081 IEM_MC_ADVANCE_RIP();
15082
15083 IEM_MC_END();
15084 return VINF_SUCCESS;
15085}
15086
15087
15088/** Opcode 0xdb 11/0. */
15089FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15090{
15091 IEMOP_MNEMONIC("fcmovnb st0,stN");
15092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15093
15094 IEM_MC_BEGIN(0, 1);
15095 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15096
15097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15098 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15099
15100 IEM_MC_PREPARE_FPU_USAGE();
15101 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15102 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15103 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15104 IEM_MC_ENDIF();
15105 IEM_MC_UPDATE_FPU_OPCODE_IP();
15106 IEM_MC_ELSE()
15107 IEM_MC_FPU_STACK_UNDERFLOW(0);
15108 IEM_MC_ENDIF();
15109 IEM_MC_ADVANCE_RIP();
15110
15111 IEM_MC_END();
15112 return VINF_SUCCESS;
15113}
15114
15115
15116/** Opcode 0xdb 11/1. */
15117FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15118{
15119 IEMOP_MNEMONIC("fcmovne st0,stN");
15120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15121
15122 IEM_MC_BEGIN(0, 1);
15123 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15124
15125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15127
15128 IEM_MC_PREPARE_FPU_USAGE();
15129 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15130 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15131 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15132 IEM_MC_ENDIF();
15133 IEM_MC_UPDATE_FPU_OPCODE_IP();
15134 IEM_MC_ELSE()
15135 IEM_MC_FPU_STACK_UNDERFLOW(0);
15136 IEM_MC_ENDIF();
15137 IEM_MC_ADVANCE_RIP();
15138
15139 IEM_MC_END();
15140 return VINF_SUCCESS;
15141}
15142
15143
15144/** Opcode 0xdb 11/2. */
15145FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15146{
15147 IEMOP_MNEMONIC("fcmovnbe st0,stN");
15148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15149
15150 IEM_MC_BEGIN(0, 1);
15151 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15152
15153 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15154 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15155
15156 IEM_MC_PREPARE_FPU_USAGE();
15157 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15158 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15159 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15160 IEM_MC_ENDIF();
15161 IEM_MC_UPDATE_FPU_OPCODE_IP();
15162 IEM_MC_ELSE()
15163 IEM_MC_FPU_STACK_UNDERFLOW(0);
15164 IEM_MC_ENDIF();
15165 IEM_MC_ADVANCE_RIP();
15166
15167 IEM_MC_END();
15168 return VINF_SUCCESS;
15169}
15170
15171
15172/** Opcode 0xdb 11/3. */
15173FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15174{
15175 IEMOP_MNEMONIC("fcmovnnu st0,stN");
15176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15177
15178 IEM_MC_BEGIN(0, 1);
15179 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15180
15181 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15182 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15183
15184 IEM_MC_PREPARE_FPU_USAGE();
15185 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15186 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15187 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15188 IEM_MC_ENDIF();
15189 IEM_MC_UPDATE_FPU_OPCODE_IP();
15190 IEM_MC_ELSE()
15191 IEM_MC_FPU_STACK_UNDERFLOW(0);
15192 IEM_MC_ENDIF();
15193 IEM_MC_ADVANCE_RIP();
15194
15195 IEM_MC_END();
15196 return VINF_SUCCESS;
15197}
15198
15199
15200/** Opcode 0xdb 0xe0. */
15201FNIEMOP_DEF(iemOp_fneni)
15202{
15203 IEMOP_MNEMONIC("fneni (8087/ign)");
15204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15205 IEM_MC_BEGIN(0,0);
15206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15207 IEM_MC_ADVANCE_RIP();
15208 IEM_MC_END();
15209 return VINF_SUCCESS;
15210}
15211
15212
15213/** Opcode 0xdb 0xe1. */
15214FNIEMOP_DEF(iemOp_fndisi)
15215{
15216 IEMOP_MNEMONIC("fndisi (8087/ign)");
15217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15218 IEM_MC_BEGIN(0,0);
15219 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15220 IEM_MC_ADVANCE_RIP();
15221 IEM_MC_END();
15222 return VINF_SUCCESS;
15223}
15224
15225
15226/** Opcode 0xdb 0xe2. */
15227FNIEMOP_DEF(iemOp_fnclex)
15228{
15229 IEMOP_MNEMONIC("fnclex");
15230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15231
15232 IEM_MC_BEGIN(0,0);
15233 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15234 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15235 IEM_MC_CLEAR_FSW_EX();
15236 IEM_MC_ADVANCE_RIP();
15237 IEM_MC_END();
15238 return VINF_SUCCESS;
15239}
15240
15241
15242/** Opcode 0xdb 0xe3. */
15243FNIEMOP_DEF(iemOp_fninit)
15244{
15245 IEMOP_MNEMONIC("fninit");
15246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15247 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15248}
15249
15250
15251/** Opcode 0xdb 0xe4. */
15252FNIEMOP_DEF(iemOp_fnsetpm)
15253{
15254 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15256 IEM_MC_BEGIN(0,0);
15257 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15258 IEM_MC_ADVANCE_RIP();
15259 IEM_MC_END();
15260 return VINF_SUCCESS;
15261}
15262
15263
15264/** Opcode 0xdb 0xe5. */
15265FNIEMOP_DEF(iemOp_frstpm)
15266{
15267 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15268#if 0 /* #UDs on newer CPUs */
15269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15270 IEM_MC_BEGIN(0,0);
15271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15272 IEM_MC_ADVANCE_RIP();
15273 IEM_MC_END();
15274 return VINF_SUCCESS;
15275#else
15276 return IEMOP_RAISE_INVALID_OPCODE();
15277#endif
15278}
15279
15280
15281/** Opcode 0xdb 11/5. */
15282FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15283{
15284 IEMOP_MNEMONIC("fucomi st0,stN");
15285 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15286}
15287
15288
15289/** Opcode 0xdb 11/6. */
15290FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15291{
15292 IEMOP_MNEMONIC("fcomi st0,stN");
15293 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15294}
15295
15296
15297/** Opcode 0xdb. */
15298FNIEMOP_DEF(iemOp_EscF3)
15299{
15300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15301 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
15302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15303 {
15304 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15305 {
15306 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15307 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15308 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15309 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15310 case 4:
15311 switch (bRm)
15312 {
15313 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15314 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15315 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15316 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15317 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15318 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15319 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15320 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15322 }
15323 break;
15324 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15325 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15326 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15328 }
15329 }
15330 else
15331 {
15332 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15333 {
15334 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15335 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15336 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15337 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15338 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15339 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15340 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15341 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15343 }
15344 }
15345}
15346
15347
15348/**
15349 * Common worker for FPU instructions working on STn and ST0, and storing the
15350 * result in STn unless IE, DE or ZE was raised.
15351 *
15352 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15353 */
15354FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15355{
15356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15357
15358 IEM_MC_BEGIN(3, 1);
15359 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15360 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15361 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15362 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15363
15364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15366
15367 IEM_MC_PREPARE_FPU_USAGE();
15368 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15369 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15370 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15371 IEM_MC_ELSE()
15372 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15373 IEM_MC_ENDIF();
15374 IEM_MC_ADVANCE_RIP();
15375
15376 IEM_MC_END();
15377 return VINF_SUCCESS;
15378}
15379
15380
15381/** Opcode 0xdc 11/0. */
15382FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15383{
15384 IEMOP_MNEMONIC("fadd stN,st0");
15385 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15386}
15387
15388
15389/** Opcode 0xdc 11/1. */
15390FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15391{
15392 IEMOP_MNEMONIC("fmul stN,st0");
15393 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15394}
15395
15396
15397/** Opcode 0xdc 11/4. */
15398FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15399{
15400 IEMOP_MNEMONIC("fsubr stN,st0");
15401 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15402}
15403
15404
15405/** Opcode 0xdc 11/5. */
15406FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15407{
15408 IEMOP_MNEMONIC("fsub stN,st0");
15409 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15410}
15411
15412
15413/** Opcode 0xdc 11/6. */
15414FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15415{
15416 IEMOP_MNEMONIC("fdivr stN,st0");
15417 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15418}
15419
15420
15421/** Opcode 0xdc 11/7. */
15422FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15423{
15424 IEMOP_MNEMONIC("fdiv stN,st0");
15425 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15426}
15427
15428
15429/**
15430 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15431 * memory operand, and storing the result in ST0.
15432 *
15433 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15434 */
15435FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15436{
15437 IEM_MC_BEGIN(3, 3);
15438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15439 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15440 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15441 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15442 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15443 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15444
15445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15447 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15448 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15449
15450 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15451 IEM_MC_PREPARE_FPU_USAGE();
15452 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15453 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15454 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15455 IEM_MC_ELSE()
15456 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15457 IEM_MC_ENDIF();
15458 IEM_MC_ADVANCE_RIP();
15459
15460 IEM_MC_END();
15461 return VINF_SUCCESS;
15462}
15463
15464
15465/** Opcode 0xdc !11/0. */
15466FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15467{
15468 IEMOP_MNEMONIC("fadd m64r");
15469 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15470}
15471
15472
15473/** Opcode 0xdc !11/1. */
15474FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15475{
15476 IEMOP_MNEMONIC("fmul m64r");
15477 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15478}
15479
15480
15481/** Opcode 0xdc !11/2. */
15482FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15483{
15484 IEMOP_MNEMONIC("fcom st0,m64r");
15485
15486 IEM_MC_BEGIN(3, 3);
15487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15488 IEM_MC_LOCAL(uint16_t, u16Fsw);
15489 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15490 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15491 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15492 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15493
15494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15496
15497 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15498 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15499 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15500
15501 IEM_MC_PREPARE_FPU_USAGE();
15502 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15503 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15504 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15505 IEM_MC_ELSE()
15506 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15507 IEM_MC_ENDIF();
15508 IEM_MC_ADVANCE_RIP();
15509
15510 IEM_MC_END();
15511 return VINF_SUCCESS;
15512}
15513
15514
15515/** Opcode 0xdc !11/3. */
15516FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15517{
15518 IEMOP_MNEMONIC("fcomp st0,m64r");
15519
15520 IEM_MC_BEGIN(3, 3);
15521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15522 IEM_MC_LOCAL(uint16_t, u16Fsw);
15523 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15524 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15525 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15526 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15527
15528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15530
15531 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15532 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15533 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15534
15535 IEM_MC_PREPARE_FPU_USAGE();
15536 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15537 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15538 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15539 IEM_MC_ELSE()
15540 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15541 IEM_MC_ENDIF();
15542 IEM_MC_ADVANCE_RIP();
15543
15544 IEM_MC_END();
15545 return VINF_SUCCESS;
15546}
15547
15548
15549/** Opcode 0xdc !11/4. */
15550FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15551{
15552 IEMOP_MNEMONIC("fsub m64r");
15553 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15554}
15555
15556
15557/** Opcode 0xdc !11/5. */
15558FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15559{
15560 IEMOP_MNEMONIC("fsubr m64r");
15561 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15562}
15563
15564
15565/** Opcode 0xdc !11/6. */
15566FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15567{
15568 IEMOP_MNEMONIC("fdiv m64r");
15569 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15570}
15571
15572
15573/** Opcode 0xdc !11/7. */
15574FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15575{
15576 IEMOP_MNEMONIC("fdivr m64r");
15577 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15578}
15579
15580
15581/** Opcode 0xdc. */
15582FNIEMOP_DEF(iemOp_EscF4)
15583{
15584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15585 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
15586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15587 {
15588 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15589 {
15590 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15591 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15592 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15593 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15594 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15595 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15596 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15597 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15599 }
15600 }
15601 else
15602 {
15603 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15604 {
15605 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15606 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15607 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15608 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15609 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15610 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15611 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15612 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15614 }
15615 }
15616}
15617
15618
15619/** Opcode 0xdd !11/0.
15620 * @sa iemOp_fld_m32r */
15621FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15622{
15623 IEMOP_MNEMONIC("fld m64r");
15624
15625 IEM_MC_BEGIN(2, 3);
15626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15627 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15628 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15629 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15630 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15631
15632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15634 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15635 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15636
15637 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15638 IEM_MC_PREPARE_FPU_USAGE();
15639 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15640 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15641 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15642 IEM_MC_ELSE()
15643 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15644 IEM_MC_ENDIF();
15645 IEM_MC_ADVANCE_RIP();
15646
15647 IEM_MC_END();
15648 return VINF_SUCCESS;
15649}
15650
15651
15652/** Opcode 0xdd !11/0. */
15653FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15654{
15655 IEMOP_MNEMONIC("fisttp m64i");
15656 IEM_MC_BEGIN(3, 2);
15657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15658 IEM_MC_LOCAL(uint16_t, u16Fsw);
15659 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15660 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15661 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15662
15663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15667
15668 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15669 IEM_MC_PREPARE_FPU_USAGE();
15670 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15671 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15672 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15673 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15674 IEM_MC_ELSE()
15675 IEM_MC_IF_FCW_IM()
15676 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15677 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15678 IEM_MC_ENDIF();
15679 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15680 IEM_MC_ENDIF();
15681 IEM_MC_ADVANCE_RIP();
15682
15683 IEM_MC_END();
15684 return VINF_SUCCESS;
15685}
15686
15687
15688/** Opcode 0xdd !11/0. */
15689FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15690{
15691 IEMOP_MNEMONIC("fst m64r");
15692 IEM_MC_BEGIN(3, 2);
15693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15694 IEM_MC_LOCAL(uint16_t, u16Fsw);
15695 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15696 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15697 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15698
15699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15701 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15702 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15703
15704 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15705 IEM_MC_PREPARE_FPU_USAGE();
15706 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15707 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15708 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15709 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15710 IEM_MC_ELSE()
15711 IEM_MC_IF_FCW_IM()
15712 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15713 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15714 IEM_MC_ENDIF();
15715 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15716 IEM_MC_ENDIF();
15717 IEM_MC_ADVANCE_RIP();
15718
15719 IEM_MC_END();
15720 return VINF_SUCCESS;
15721}
15722
15723
15724
15725
15726/** Opcode 0xdd !11/0. */
15727FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15728{
15729 IEMOP_MNEMONIC("fstp m64r");
15730 IEM_MC_BEGIN(3, 2);
15731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15732 IEM_MC_LOCAL(uint16_t, u16Fsw);
15733 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15734 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15736
15737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15740 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15741
15742 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15743 IEM_MC_PREPARE_FPU_USAGE();
15744 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15745 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15746 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15747 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15748 IEM_MC_ELSE()
15749 IEM_MC_IF_FCW_IM()
15750 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15751 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15752 IEM_MC_ENDIF();
15753 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15754 IEM_MC_ENDIF();
15755 IEM_MC_ADVANCE_RIP();
15756
15757 IEM_MC_END();
15758 return VINF_SUCCESS;
15759}
15760
15761
15762/** Opcode 0xdd !11/0. */
15763FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15764{
15765 IEMOP_MNEMONIC("frstor m94/108byte");
15766 IEM_MC_BEGIN(3, 0);
15767 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15768 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15769 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15772 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15773 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15774 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15775 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15776 IEM_MC_END();
15777 return VINF_SUCCESS;
15778}
15779
15780
15781/** Opcode 0xdd !11/0. */
15782FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15783{
15784 IEMOP_MNEMONIC("fnsave m94/108byte");
15785 IEM_MC_BEGIN(3, 0);
15786 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15787 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15788 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15791 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15792 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15793 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15794 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15795 IEM_MC_END();
15796 return VINF_SUCCESS;
15797
15798}
15799
15800/** Opcode 0xdd !11/0. */
15801FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15802{
15803 IEMOP_MNEMONIC("fnstsw m16");
15804
15805 IEM_MC_BEGIN(0, 2);
15806 IEM_MC_LOCAL(uint16_t, u16Tmp);
15807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15808
15809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15812
15813 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15814 IEM_MC_FETCH_FSW(u16Tmp);
15815 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
15816 IEM_MC_ADVANCE_RIP();
15817
15818/** @todo Debug / drop a hint to the verifier that things may differ
15819 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15820 * NT4SP1. (X86_FSW_PE) */
15821 IEM_MC_END();
15822 return VINF_SUCCESS;
15823}
15824
15825
15826/** Opcode 0xdd 11/0. */
15827FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15828{
15829 IEMOP_MNEMONIC("ffree stN");
15830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15831 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15832 unmodified. */
15833
15834 IEM_MC_BEGIN(0, 0);
15835
15836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15837 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15838
15839 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15840 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15841 IEM_MC_UPDATE_FPU_OPCODE_IP();
15842
15843 IEM_MC_ADVANCE_RIP();
15844 IEM_MC_END();
15845 return VINF_SUCCESS;
15846}
15847
15848
15849/** Opcode 0xdd 11/1. */
15850FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15851{
15852 IEMOP_MNEMONIC("fst st0,stN");
15853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15854
15855 IEM_MC_BEGIN(0, 2);
15856 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15857 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15858 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15859 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15860
15861 IEM_MC_PREPARE_FPU_USAGE();
15862 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15863 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15864 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15865 IEM_MC_ELSE()
15866 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15867 IEM_MC_ENDIF();
15868
15869 IEM_MC_ADVANCE_RIP();
15870 IEM_MC_END();
15871 return VINF_SUCCESS;
15872}
15873
15874
15875/** Opcode 0xdd 11/3. */
15876FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15877{
15878 IEMOP_MNEMONIC("fcom st0,stN");
15879 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15880}
15881
15882
15883/** Opcode 0xdd 11/4. */
15884FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15885{
15886 IEMOP_MNEMONIC("fcomp st0,stN");
15887 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15888}
15889
15890
15891/** Opcode 0xdd. */
15892FNIEMOP_DEF(iemOp_EscF5)
15893{
15894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15895 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
15896 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15897 {
15898 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15899 {
15900 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15901 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15902 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15903 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15904 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15905 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15906 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15907 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15909 }
15910 }
15911 else
15912 {
15913 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15914 {
15915 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15916 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15917 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15918 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15919 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15920 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15921 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15922 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15924 }
15925 }
15926}
15927
15928
15929/** Opcode 0xde 11/0. */
15930FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15931{
15932 IEMOP_MNEMONIC("faddp stN,st0");
15933 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15934}
15935
15936
15937/** Opcode 0xde 11/0. */
15938FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15939{
15940 IEMOP_MNEMONIC("fmulp stN,st0");
15941 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15942}
15943
15944
15945/** Opcode 0xde 0xd9. */
15946FNIEMOP_DEF(iemOp_fcompp)
15947{
15948 IEMOP_MNEMONIC("fucompp st0,stN");
15949 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15950}
15951
15952
15953/** Opcode 0xde 11/4. */
15954FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15955{
15956 IEMOP_MNEMONIC("fsubrp stN,st0");
15957 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15958}
15959
15960
15961/** Opcode 0xde 11/5. */
15962FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15963{
15964 IEMOP_MNEMONIC("fsubp stN,st0");
15965 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15966}
15967
15968
15969/** Opcode 0xde 11/6. */
15970FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15971{
15972 IEMOP_MNEMONIC("fdivrp stN,st0");
15973 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15974}
15975
15976
15977/** Opcode 0xde 11/7. */
15978FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15979{
15980 IEMOP_MNEMONIC("fdivp stN,st0");
15981 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15982}
15983
15984
15985/**
15986 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15987 * the result in ST0.
15988 *
15989 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15990 */
15991FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15992{
15993 IEM_MC_BEGIN(3, 3);
15994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15995 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15996 IEM_MC_LOCAL(int16_t, i16Val2);
15997 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15998 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15999 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16000
16001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16003
16004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16005 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16006 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16007
16008 IEM_MC_PREPARE_FPU_USAGE();
16009 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16010 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16011 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16012 IEM_MC_ELSE()
16013 IEM_MC_FPU_STACK_UNDERFLOW(0);
16014 IEM_MC_ENDIF();
16015 IEM_MC_ADVANCE_RIP();
16016
16017 IEM_MC_END();
16018 return VINF_SUCCESS;
16019}
16020
16021
16022/** Opcode 0xde !11/0. */
16023FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16024{
16025 IEMOP_MNEMONIC("fiadd m16i");
16026 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16027}
16028
16029
16030/** Opcode 0xde !11/1. */
16031FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16032{
16033 IEMOP_MNEMONIC("fimul m16i");
16034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16035}
16036
16037
16038/** Opcode 0xde !11/2. */
16039FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16040{
16041 IEMOP_MNEMONIC("ficom st0,m16i");
16042
16043 IEM_MC_BEGIN(3, 3);
16044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16045 IEM_MC_LOCAL(uint16_t, u16Fsw);
16046 IEM_MC_LOCAL(int16_t, i16Val2);
16047 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16048 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16049 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16050
16051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16053
16054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16055 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16056 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16057
16058 IEM_MC_PREPARE_FPU_USAGE();
16059 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16060 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16061 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16062 IEM_MC_ELSE()
16063 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16064 IEM_MC_ENDIF();
16065 IEM_MC_ADVANCE_RIP();
16066
16067 IEM_MC_END();
16068 return VINF_SUCCESS;
16069}
16070
16071
16072/** Opcode 0xde !11/3. */
16073FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16074{
16075 IEMOP_MNEMONIC("ficomp st0,m16i");
16076
16077 IEM_MC_BEGIN(3, 3);
16078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16079 IEM_MC_LOCAL(uint16_t, u16Fsw);
16080 IEM_MC_LOCAL(int16_t, i16Val2);
16081 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16082 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16083 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16084
16085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16087
16088 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16089 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16090 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16091
16092 IEM_MC_PREPARE_FPU_USAGE();
16093 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16094 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16095 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16096 IEM_MC_ELSE()
16097 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16098 IEM_MC_ENDIF();
16099 IEM_MC_ADVANCE_RIP();
16100
16101 IEM_MC_END();
16102 return VINF_SUCCESS;
16103}
16104
16105
16106/** Opcode 0xde !11/4. */
16107FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16108{
16109 IEMOP_MNEMONIC("fisub m16i");
16110 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16111}
16112
16113
16114/** Opcode 0xde !11/5. */
16115FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16116{
16117 IEMOP_MNEMONIC("fisubr m16i");
16118 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16119}
16120
16121
16122/** Opcode 0xde !11/6. */
16123FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16124{
16125 IEMOP_MNEMONIC("fiadd m16i");
16126 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16127}
16128
16129
16130/** Opcode 0xde !11/7. */
16131FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16132{
16133 IEMOP_MNEMONIC("fiadd m16i");
16134 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16135}
16136
16137
16138/** Opcode 0xde. */
16139FNIEMOP_DEF(iemOp_EscF6)
16140{
16141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16142 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16144 {
16145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16146 {
16147 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16148 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16149 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16150 case 3: if (bRm == 0xd9)
16151 return FNIEMOP_CALL(iemOp_fcompp);
16152 return IEMOP_RAISE_INVALID_OPCODE();
16153 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16154 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16155 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16156 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16158 }
16159 }
16160 else
16161 {
16162 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16163 {
16164 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16165 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16166 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16167 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16168 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16169 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16170 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16171 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16173 }
16174 }
16175}
16176
16177
16178/** Opcode 0xdf 11/0.
16179 * Undocument instruction, assumed to work like ffree + fincstp. */
16180FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16181{
16182 IEMOP_MNEMONIC("ffreep stN");
16183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16184
16185 IEM_MC_BEGIN(0, 0);
16186
16187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16188 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16189
16190 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16191 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16192 IEM_MC_FPU_STACK_INC_TOP();
16193 IEM_MC_UPDATE_FPU_OPCODE_IP();
16194
16195 IEM_MC_ADVANCE_RIP();
16196 IEM_MC_END();
16197 return VINF_SUCCESS;
16198}
16199
16200
16201/** Opcode 0xdf 0xe0. */
16202FNIEMOP_DEF(iemOp_fnstsw_ax)
16203{
16204 IEMOP_MNEMONIC("fnstsw ax");
16205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16206
16207 IEM_MC_BEGIN(0, 1);
16208 IEM_MC_LOCAL(uint16_t, u16Tmp);
16209 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16210 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16211 IEM_MC_FETCH_FSW(u16Tmp);
16212 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16213 IEM_MC_ADVANCE_RIP();
16214 IEM_MC_END();
16215 return VINF_SUCCESS;
16216}
16217
16218
16219/** Opcode 0xdf 11/5. */
16220FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16221{
16222 IEMOP_MNEMONIC("fcomip st0,stN");
16223 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16224}
16225
16226
16227/** Opcode 0xdf 11/6. */
16228FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16229{
16230 IEMOP_MNEMONIC("fcomip st0,stN");
16231 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16232}
16233
16234
16235/** Opcode 0xdf !11/0. */
16236FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16237{
16238 IEMOP_MNEMONIC("fild m16i");
16239
16240 IEM_MC_BEGIN(2, 3);
16241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16242 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16243 IEM_MC_LOCAL(int16_t, i16Val);
16244 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16245 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16246
16247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16249
16250 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16251 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16252 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16253
16254 IEM_MC_PREPARE_FPU_USAGE();
16255 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16256 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16257 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16258 IEM_MC_ELSE()
16259 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16260 IEM_MC_ENDIF();
16261 IEM_MC_ADVANCE_RIP();
16262
16263 IEM_MC_END();
16264 return VINF_SUCCESS;
16265}
16266
16267
16268/** Opcode 0xdf !11/1. */
16269FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16270{
16271 IEMOP_MNEMONIC("fisttp m16i");
16272 IEM_MC_BEGIN(3, 2);
16273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16274 IEM_MC_LOCAL(uint16_t, u16Fsw);
16275 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16276 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16277 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16278
16279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16283
16284 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16285 IEM_MC_PREPARE_FPU_USAGE();
16286 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16287 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16288 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16289 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16290 IEM_MC_ELSE()
16291 IEM_MC_IF_FCW_IM()
16292 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16293 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16294 IEM_MC_ENDIF();
16295 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16296 IEM_MC_ENDIF();
16297 IEM_MC_ADVANCE_RIP();
16298
16299 IEM_MC_END();
16300 return VINF_SUCCESS;
16301}
16302
16303
16304/** Opcode 0xdf !11/2. */
16305FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16306{
16307 IEMOP_MNEMONIC("fistp m16i");
16308 IEM_MC_BEGIN(3, 2);
16309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16310 IEM_MC_LOCAL(uint16_t, u16Fsw);
16311 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16312 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16313 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16314
16315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16317 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16318 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16319
16320 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16321 IEM_MC_PREPARE_FPU_USAGE();
16322 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16323 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16324 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16325 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16326 IEM_MC_ELSE()
16327 IEM_MC_IF_FCW_IM()
16328 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16329 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16330 IEM_MC_ENDIF();
16331 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16332 IEM_MC_ENDIF();
16333 IEM_MC_ADVANCE_RIP();
16334
16335 IEM_MC_END();
16336 return VINF_SUCCESS;
16337}
16338
16339
16340/** Opcode 0xdf !11/3. */
16341FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16342{
16343 IEMOP_MNEMONIC("fistp m16i");
16344 IEM_MC_BEGIN(3, 2);
16345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16346 IEM_MC_LOCAL(uint16_t, u16Fsw);
16347 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16348 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16349 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16350
16351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16354 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16355
16356 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16357 IEM_MC_PREPARE_FPU_USAGE();
16358 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16359 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16360 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16361 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16362 IEM_MC_ELSE()
16363 IEM_MC_IF_FCW_IM()
16364 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16365 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16366 IEM_MC_ENDIF();
16367 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16368 IEM_MC_ENDIF();
16369 IEM_MC_ADVANCE_RIP();
16370
16371 IEM_MC_END();
16372 return VINF_SUCCESS;
16373}
16374
16375
16376/** Opcode 0xdf !11/4. */
16377FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16378
16379
16380/** Opcode 0xdf !11/5. */
16381FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16382{
16383 IEMOP_MNEMONIC("fild m64i");
16384
16385 IEM_MC_BEGIN(2, 3);
16386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16387 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16388 IEM_MC_LOCAL(int64_t, i64Val);
16389 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16390 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16391
16392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16394
16395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16396 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16397 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16398
16399 IEM_MC_PREPARE_FPU_USAGE();
16400 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16401 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16402 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16403 IEM_MC_ELSE()
16404 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16405 IEM_MC_ENDIF();
16406 IEM_MC_ADVANCE_RIP();
16407
16408 IEM_MC_END();
16409 return VINF_SUCCESS;
16410}
16411
16412
16413/** Opcode 0xdf !11/6. */
16414FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16415
16416
16417/** Opcode 0xdf !11/7. */
16418FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16419{
16420 IEMOP_MNEMONIC("fistp m64i");
16421 IEM_MC_BEGIN(3, 2);
16422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16423 IEM_MC_LOCAL(uint16_t, u16Fsw);
16424 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16425 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16426 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16427
16428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16430 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16431 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16432
16433 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16434 IEM_MC_PREPARE_FPU_USAGE();
16435 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16436 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16437 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16438 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16439 IEM_MC_ELSE()
16440 IEM_MC_IF_FCW_IM()
16441 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16442 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16443 IEM_MC_ENDIF();
16444 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16445 IEM_MC_ENDIF();
16446 IEM_MC_ADVANCE_RIP();
16447
16448 IEM_MC_END();
16449 return VINF_SUCCESS;
16450}
16451
16452
16453/** Opcode 0xdf. */
16454FNIEMOP_DEF(iemOp_EscF7)
16455{
16456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16458 {
16459 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16460 {
16461 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16462 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16463 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16464 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16465 case 4: if (bRm == 0xe0)
16466 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16467 return IEMOP_RAISE_INVALID_OPCODE();
16468 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16469 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16470 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16472 }
16473 }
16474 else
16475 {
16476 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16477 {
16478 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16479 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16480 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16481 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16482 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16483 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16484 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16485 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16487 }
16488 }
16489}
16490
16491
16492/** Opcode 0xe0. */
16493FNIEMOP_DEF(iemOp_loopne_Jb)
16494{
16495 IEMOP_MNEMONIC("loopne Jb");
16496 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16499
16500 switch (pVCpu->iem.s.enmEffAddrMode)
16501 {
16502 case IEMMODE_16BIT:
16503 IEM_MC_BEGIN(0,0);
16504 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16505 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16506 IEM_MC_REL_JMP_S8(i8Imm);
16507 } IEM_MC_ELSE() {
16508 IEM_MC_ADVANCE_RIP();
16509 } IEM_MC_ENDIF();
16510 IEM_MC_END();
16511 return VINF_SUCCESS;
16512
16513 case IEMMODE_32BIT:
16514 IEM_MC_BEGIN(0,0);
16515 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16516 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16517 IEM_MC_REL_JMP_S8(i8Imm);
16518 } IEM_MC_ELSE() {
16519 IEM_MC_ADVANCE_RIP();
16520 } IEM_MC_ENDIF();
16521 IEM_MC_END();
16522 return VINF_SUCCESS;
16523
16524 case IEMMODE_64BIT:
16525 IEM_MC_BEGIN(0,0);
16526 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16527 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16528 IEM_MC_REL_JMP_S8(i8Imm);
16529 } IEM_MC_ELSE() {
16530 IEM_MC_ADVANCE_RIP();
16531 } IEM_MC_ENDIF();
16532 IEM_MC_END();
16533 return VINF_SUCCESS;
16534
16535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16536 }
16537}
16538
16539
16540/** Opcode 0xe1. */
16541FNIEMOP_DEF(iemOp_loope_Jb)
16542{
16543 IEMOP_MNEMONIC("loope Jb");
16544 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16546 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16547
16548 switch (pVCpu->iem.s.enmEffAddrMode)
16549 {
16550 case IEMMODE_16BIT:
16551 IEM_MC_BEGIN(0,0);
16552 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16553 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16554 IEM_MC_REL_JMP_S8(i8Imm);
16555 } IEM_MC_ELSE() {
16556 IEM_MC_ADVANCE_RIP();
16557 } IEM_MC_ENDIF();
16558 IEM_MC_END();
16559 return VINF_SUCCESS;
16560
16561 case IEMMODE_32BIT:
16562 IEM_MC_BEGIN(0,0);
16563 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16564 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16565 IEM_MC_REL_JMP_S8(i8Imm);
16566 } IEM_MC_ELSE() {
16567 IEM_MC_ADVANCE_RIP();
16568 } IEM_MC_ENDIF();
16569 IEM_MC_END();
16570 return VINF_SUCCESS;
16571
16572 case IEMMODE_64BIT:
16573 IEM_MC_BEGIN(0,0);
16574 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16575 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16576 IEM_MC_REL_JMP_S8(i8Imm);
16577 } IEM_MC_ELSE() {
16578 IEM_MC_ADVANCE_RIP();
16579 } IEM_MC_ENDIF();
16580 IEM_MC_END();
16581 return VINF_SUCCESS;
16582
16583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16584 }
16585}
16586
16587
16588/** Opcode 0xe2. */
16589FNIEMOP_DEF(iemOp_loop_Jb)
16590{
16591 IEMOP_MNEMONIC("loop Jb");
16592 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16594 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16595
16596 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16597 * using the 32-bit operand size override. How can that be restarted? See
16598 * weird pseudo code in intel manual. */
16599 switch (pVCpu->iem.s.enmEffAddrMode)
16600 {
16601 case IEMMODE_16BIT:
16602 IEM_MC_BEGIN(0,0);
16603 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16604 {
16605 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16606 IEM_MC_IF_CX_IS_NZ() {
16607 IEM_MC_REL_JMP_S8(i8Imm);
16608 } IEM_MC_ELSE() {
16609 IEM_MC_ADVANCE_RIP();
16610 } IEM_MC_ENDIF();
16611 }
16612 else
16613 {
16614 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16615 IEM_MC_ADVANCE_RIP();
16616 }
16617 IEM_MC_END();
16618 return VINF_SUCCESS;
16619
16620 case IEMMODE_32BIT:
16621 IEM_MC_BEGIN(0,0);
16622 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16623 {
16624 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16625 IEM_MC_IF_ECX_IS_NZ() {
16626 IEM_MC_REL_JMP_S8(i8Imm);
16627 } IEM_MC_ELSE() {
16628 IEM_MC_ADVANCE_RIP();
16629 } IEM_MC_ENDIF();
16630 }
16631 else
16632 {
16633 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16634 IEM_MC_ADVANCE_RIP();
16635 }
16636 IEM_MC_END();
16637 return VINF_SUCCESS;
16638
16639 case IEMMODE_64BIT:
16640 IEM_MC_BEGIN(0,0);
16641 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16642 {
16643 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16644 IEM_MC_IF_RCX_IS_NZ() {
16645 IEM_MC_REL_JMP_S8(i8Imm);
16646 } IEM_MC_ELSE() {
16647 IEM_MC_ADVANCE_RIP();
16648 } IEM_MC_ENDIF();
16649 }
16650 else
16651 {
16652 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16653 IEM_MC_ADVANCE_RIP();
16654 }
16655 IEM_MC_END();
16656 return VINF_SUCCESS;
16657
16658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16659 }
16660}
16661
16662
16663/** Opcode 0xe3. */
16664FNIEMOP_DEF(iemOp_jecxz_Jb)
16665{
16666 IEMOP_MNEMONIC("jecxz Jb");
16667 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16669 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16670
16671 switch (pVCpu->iem.s.enmEffAddrMode)
16672 {
16673 case IEMMODE_16BIT:
16674 IEM_MC_BEGIN(0,0);
16675 IEM_MC_IF_CX_IS_NZ() {
16676 IEM_MC_ADVANCE_RIP();
16677 } IEM_MC_ELSE() {
16678 IEM_MC_REL_JMP_S8(i8Imm);
16679 } IEM_MC_ENDIF();
16680 IEM_MC_END();
16681 return VINF_SUCCESS;
16682
16683 case IEMMODE_32BIT:
16684 IEM_MC_BEGIN(0,0);
16685 IEM_MC_IF_ECX_IS_NZ() {
16686 IEM_MC_ADVANCE_RIP();
16687 } IEM_MC_ELSE() {
16688 IEM_MC_REL_JMP_S8(i8Imm);
16689 } IEM_MC_ENDIF();
16690 IEM_MC_END();
16691 return VINF_SUCCESS;
16692
16693 case IEMMODE_64BIT:
16694 IEM_MC_BEGIN(0,0);
16695 IEM_MC_IF_RCX_IS_NZ() {
16696 IEM_MC_ADVANCE_RIP();
16697 } IEM_MC_ELSE() {
16698 IEM_MC_REL_JMP_S8(i8Imm);
16699 } IEM_MC_ENDIF();
16700 IEM_MC_END();
16701 return VINF_SUCCESS;
16702
16703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16704 }
16705}
16706
16707
16708/** Opcode 0xe4 */
16709FNIEMOP_DEF(iemOp_in_AL_Ib)
16710{
16711 IEMOP_MNEMONIC("in eAX,Ib");
16712 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16714 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16715}
16716
16717
16718/** Opcode 0xe5 */
16719FNIEMOP_DEF(iemOp_in_eAX_Ib)
16720{
16721 IEMOP_MNEMONIC("in eAX,Ib");
16722 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16724 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16725}
16726
16727
16728/** Opcode 0xe6 */
16729FNIEMOP_DEF(iemOp_out_Ib_AL)
16730{
16731 IEMOP_MNEMONIC("out Ib,AL");
16732 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16734 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16735}
16736
16737
16738/** Opcode 0xe7 */
16739FNIEMOP_DEF(iemOp_out_Ib_eAX)
16740{
16741 IEMOP_MNEMONIC("out Ib,eAX");
16742 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16744 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16745}
16746
16747
16748/** Opcode 0xe8. */
16749FNIEMOP_DEF(iemOp_call_Jv)
16750{
16751 IEMOP_MNEMONIC("call Jv");
16752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16753 switch (pVCpu->iem.s.enmEffOpSize)
16754 {
16755 case IEMMODE_16BIT:
16756 {
16757 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16758 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16759 }
16760
16761 case IEMMODE_32BIT:
16762 {
16763 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16764 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16765 }
16766
16767 case IEMMODE_64BIT:
16768 {
16769 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16770 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16771 }
16772
16773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16774 }
16775}
16776
16777
16778/** Opcode 0xe9. */
16779FNIEMOP_DEF(iemOp_jmp_Jv)
16780{
16781 IEMOP_MNEMONIC("jmp Jv");
16782 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16783 switch (pVCpu->iem.s.enmEffOpSize)
16784 {
16785 case IEMMODE_16BIT:
16786 {
16787 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16788 IEM_MC_BEGIN(0, 0);
16789 IEM_MC_REL_JMP_S16(i16Imm);
16790 IEM_MC_END();
16791 return VINF_SUCCESS;
16792 }
16793
16794 case IEMMODE_64BIT:
16795 case IEMMODE_32BIT:
16796 {
16797 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16798 IEM_MC_BEGIN(0, 0);
16799 IEM_MC_REL_JMP_S32(i32Imm);
16800 IEM_MC_END();
16801 return VINF_SUCCESS;
16802 }
16803
16804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16805 }
16806}
16807
16808
16809/** Opcode 0xea. */
16810FNIEMOP_DEF(iemOp_jmp_Ap)
16811{
16812 IEMOP_MNEMONIC("jmp Ap");
16813 IEMOP_HLP_NO_64BIT();
16814
16815 /* Decode the far pointer address and pass it on to the far call C implementation. */
16816 uint32_t offSeg;
16817 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
16818 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16819 else
16820 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16821 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16823 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
16824}
16825
16826
16827/** Opcode 0xeb. */
16828FNIEMOP_DEF(iemOp_jmp_Jb)
16829{
16830 IEMOP_MNEMONIC("jmp Jb");
16831 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16833 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16834
16835 IEM_MC_BEGIN(0, 0);
16836 IEM_MC_REL_JMP_S8(i8Imm);
16837 IEM_MC_END();
16838 return VINF_SUCCESS;
16839}
16840
16841
16842/** Opcode 0xec */
16843FNIEMOP_DEF(iemOp_in_AL_DX)
16844{
16845 IEMOP_MNEMONIC("in AL,DX");
16846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16847 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16848}
16849
16850
16851/** Opcode 0xed */
16852FNIEMOP_DEF(iemOp_eAX_DX)
16853{
16854 IEMOP_MNEMONIC("in eAX,DX");
16855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16856 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16857}
16858
16859
16860/** Opcode 0xee */
16861FNIEMOP_DEF(iemOp_out_DX_AL)
16862{
16863 IEMOP_MNEMONIC("out DX,AL");
16864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16865 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16866}
16867
16868
16869/** Opcode 0xef */
16870FNIEMOP_DEF(iemOp_out_DX_eAX)
16871{
16872 IEMOP_MNEMONIC("out DX,eAX");
16873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16874 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16875}
16876
16877
16878/** Opcode 0xf0. */
16879FNIEMOP_DEF(iemOp_lock)
16880{
16881 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16882 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
16883
16884 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16885 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16886}
16887
16888
16889/** Opcode 0xf1. */
16890FNIEMOP_DEF(iemOp_int_1)
16891{
16892 IEMOP_MNEMONIC("int1"); /* icebp */
16893 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16894 /** @todo testcase! */
16895 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16896}
16897
16898
16899/** Opcode 0xf2. */
16900FNIEMOP_DEF(iemOp_repne)
16901{
16902 /* This overrides any previous REPE prefix. */
16903 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
16904 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16905 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
16906
16907 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16908 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16909}
16910
16911
16912/** Opcode 0xf3. */
16913FNIEMOP_DEF(iemOp_repe)
16914{
16915 /* This overrides any previous REPNE prefix. */
16916 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
16917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
16919
16920 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16921 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16922}
16923
16924
16925/** Opcode 0xf4. */
16926FNIEMOP_DEF(iemOp_hlt)
16927{
16928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16929 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16930}
16931
16932
16933/** Opcode 0xf5. */
16934FNIEMOP_DEF(iemOp_cmc)
16935{
16936 IEMOP_MNEMONIC("cmc");
16937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16938 IEM_MC_BEGIN(0, 0);
16939 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16940 IEM_MC_ADVANCE_RIP();
16941 IEM_MC_END();
16942 return VINF_SUCCESS;
16943}
16944
16945
16946/**
16947 * Common implementation of 'inc/dec/not/neg Eb'.
16948 *
16949 * @param bRm The RM byte.
16950 * @param pImpl The instruction implementation.
16951 */
16952FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16953{
16954 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16955 {
16956 /* register access */
16957 IEM_MC_BEGIN(2, 0);
16958 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16959 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16960 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
16961 IEM_MC_REF_EFLAGS(pEFlags);
16962 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16963 IEM_MC_ADVANCE_RIP();
16964 IEM_MC_END();
16965 }
16966 else
16967 {
16968 /* memory access. */
16969 IEM_MC_BEGIN(2, 2);
16970 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16971 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16973
16974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16975 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
16976 IEM_MC_FETCH_EFLAGS(EFlags);
16977 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
16978 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16979 else
16980 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16981
16982 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16983 IEM_MC_COMMIT_EFLAGS(EFlags);
16984 IEM_MC_ADVANCE_RIP();
16985 IEM_MC_END();
16986 }
16987 return VINF_SUCCESS;
16988}
16989
16990
16991/**
16992 * Common implementation of 'inc/dec/not/neg Ev'.
16993 *
16994 * @param bRm The RM byte.
16995 * @param pImpl The instruction implementation.
16996 */
16997FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16998{
16999 /* Registers are handled by a common worker. */
17000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17001 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17002
17003 /* Memory we do here. */
17004 switch (pVCpu->iem.s.enmEffOpSize)
17005 {
17006 case IEMMODE_16BIT:
17007 IEM_MC_BEGIN(2, 2);
17008 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17009 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17011
17012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17013 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17014 IEM_MC_FETCH_EFLAGS(EFlags);
17015 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17016 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17017 else
17018 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17019
17020 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17021 IEM_MC_COMMIT_EFLAGS(EFlags);
17022 IEM_MC_ADVANCE_RIP();
17023 IEM_MC_END();
17024 return VINF_SUCCESS;
17025
17026 case IEMMODE_32BIT:
17027 IEM_MC_BEGIN(2, 2);
17028 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17029 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17031
17032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17033 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17034 IEM_MC_FETCH_EFLAGS(EFlags);
17035 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17036 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17037 else
17038 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17039
17040 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17041 IEM_MC_COMMIT_EFLAGS(EFlags);
17042 IEM_MC_ADVANCE_RIP();
17043 IEM_MC_END();
17044 return VINF_SUCCESS;
17045
17046 case IEMMODE_64BIT:
17047 IEM_MC_BEGIN(2, 2);
17048 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17049 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17051
17052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17053 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17054 IEM_MC_FETCH_EFLAGS(EFlags);
17055 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17056 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17057 else
17058 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17059
17060 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17061 IEM_MC_COMMIT_EFLAGS(EFlags);
17062 IEM_MC_ADVANCE_RIP();
17063 IEM_MC_END();
17064 return VINF_SUCCESS;
17065
17066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17067 }
17068}
17069
17070
17071/** Opcode 0xf6 /0. */
17072FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17073{
17074 IEMOP_MNEMONIC("test Eb,Ib");
17075 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17076
17077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17078 {
17079 /* register access */
17080 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17082
17083 IEM_MC_BEGIN(3, 0);
17084 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17085 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17086 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17087 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17088 IEM_MC_REF_EFLAGS(pEFlags);
17089 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17090 IEM_MC_ADVANCE_RIP();
17091 IEM_MC_END();
17092 }
17093 else
17094 {
17095 /* memory access. */
17096 IEM_MC_BEGIN(3, 2);
17097 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17098 IEM_MC_ARG(uint8_t, u8Src, 1);
17099 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17101
17102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17103 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17104 IEM_MC_ASSIGN(u8Src, u8Imm);
17105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17106 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17107 IEM_MC_FETCH_EFLAGS(EFlags);
17108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17109
17110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17111 IEM_MC_COMMIT_EFLAGS(EFlags);
17112 IEM_MC_ADVANCE_RIP();
17113 IEM_MC_END();
17114 }
17115 return VINF_SUCCESS;
17116}
17117
17118
17119/** Opcode 0xf7 /0. */
17120FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17121{
17122 IEMOP_MNEMONIC("test Ev,Iv");
17123 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17124
17125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17126 {
17127 /* register access */
17128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17129 switch (pVCpu->iem.s.enmEffOpSize)
17130 {
17131 case IEMMODE_16BIT:
17132 {
17133 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17134 IEM_MC_BEGIN(3, 0);
17135 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17136 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17138 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17139 IEM_MC_REF_EFLAGS(pEFlags);
17140 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17141 IEM_MC_ADVANCE_RIP();
17142 IEM_MC_END();
17143 return VINF_SUCCESS;
17144 }
17145
17146 case IEMMODE_32BIT:
17147 {
17148 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17149 IEM_MC_BEGIN(3, 0);
17150 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17151 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17153 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17154 IEM_MC_REF_EFLAGS(pEFlags);
17155 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17156 /* No clearing the high dword here - test doesn't write back the result. */
17157 IEM_MC_ADVANCE_RIP();
17158 IEM_MC_END();
17159 return VINF_SUCCESS;
17160 }
17161
17162 case IEMMODE_64BIT:
17163 {
17164 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17165 IEM_MC_BEGIN(3, 0);
17166 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17167 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17168 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17169 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17170 IEM_MC_REF_EFLAGS(pEFlags);
17171 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17172 IEM_MC_ADVANCE_RIP();
17173 IEM_MC_END();
17174 return VINF_SUCCESS;
17175 }
17176
17177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17178 }
17179 }
17180 else
17181 {
17182 /* memory access. */
17183 switch (pVCpu->iem.s.enmEffOpSize)
17184 {
17185 case IEMMODE_16BIT:
17186 {
17187 IEM_MC_BEGIN(3, 2);
17188 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17189 IEM_MC_ARG(uint16_t, u16Src, 1);
17190 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17192
17193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17194 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17195 IEM_MC_ASSIGN(u16Src, u16Imm);
17196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17197 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17198 IEM_MC_FETCH_EFLAGS(EFlags);
17199 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17200
17201 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17202 IEM_MC_COMMIT_EFLAGS(EFlags);
17203 IEM_MC_ADVANCE_RIP();
17204 IEM_MC_END();
17205 return VINF_SUCCESS;
17206 }
17207
17208 case IEMMODE_32BIT:
17209 {
17210 IEM_MC_BEGIN(3, 2);
17211 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17212 IEM_MC_ARG(uint32_t, u32Src, 1);
17213 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17215
17216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17217 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17218 IEM_MC_ASSIGN(u32Src, u32Imm);
17219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17220 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17221 IEM_MC_FETCH_EFLAGS(EFlags);
17222 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17223
17224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17225 IEM_MC_COMMIT_EFLAGS(EFlags);
17226 IEM_MC_ADVANCE_RIP();
17227 IEM_MC_END();
17228 return VINF_SUCCESS;
17229 }
17230
17231 case IEMMODE_64BIT:
17232 {
17233 IEM_MC_BEGIN(3, 2);
17234 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17235 IEM_MC_ARG(uint64_t, u64Src, 1);
17236 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17238
17239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17240 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17241 IEM_MC_ASSIGN(u64Src, u64Imm);
17242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17243 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17244 IEM_MC_FETCH_EFLAGS(EFlags);
17245 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17246
17247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17248 IEM_MC_COMMIT_EFLAGS(EFlags);
17249 IEM_MC_ADVANCE_RIP();
17250 IEM_MC_END();
17251 return VINF_SUCCESS;
17252 }
17253
17254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17255 }
17256 }
17257}
17258
17259
17260/** Opcode 0xf6 /4, /5, /6 and /7. */
17261FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17262{
17263 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17264 {
17265 /* register access */
17266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17267 IEM_MC_BEGIN(3, 1);
17268 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17269 IEM_MC_ARG(uint8_t, u8Value, 1);
17270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17271 IEM_MC_LOCAL(int32_t, rc);
17272
17273 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17274 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17275 IEM_MC_REF_EFLAGS(pEFlags);
17276 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17277 IEM_MC_IF_LOCAL_IS_Z(rc) {
17278 IEM_MC_ADVANCE_RIP();
17279 } IEM_MC_ELSE() {
17280 IEM_MC_RAISE_DIVIDE_ERROR();
17281 } IEM_MC_ENDIF();
17282
17283 IEM_MC_END();
17284 }
17285 else
17286 {
17287 /* memory access. */
17288 IEM_MC_BEGIN(3, 2);
17289 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17290 IEM_MC_ARG(uint8_t, u8Value, 1);
17291 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17293 IEM_MC_LOCAL(int32_t, rc);
17294
17295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17297 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17298 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17299 IEM_MC_REF_EFLAGS(pEFlags);
17300 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17301 IEM_MC_IF_LOCAL_IS_Z(rc) {
17302 IEM_MC_ADVANCE_RIP();
17303 } IEM_MC_ELSE() {
17304 IEM_MC_RAISE_DIVIDE_ERROR();
17305 } IEM_MC_ENDIF();
17306
17307 IEM_MC_END();
17308 }
17309 return VINF_SUCCESS;
17310}
17311
17312
17313/** Opcode 0xf7 /4, /5, /6 and /7. */
17314FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17315{
17316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17317
17318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17319 {
17320 /* register access */
17321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17322 switch (pVCpu->iem.s.enmEffOpSize)
17323 {
17324 case IEMMODE_16BIT:
17325 {
17326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17327 IEM_MC_BEGIN(4, 1);
17328 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17329 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17330 IEM_MC_ARG(uint16_t, u16Value, 2);
17331 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17332 IEM_MC_LOCAL(int32_t, rc);
17333
17334 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17335 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17336 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17337 IEM_MC_REF_EFLAGS(pEFlags);
17338 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17339 IEM_MC_IF_LOCAL_IS_Z(rc) {
17340 IEM_MC_ADVANCE_RIP();
17341 } IEM_MC_ELSE() {
17342 IEM_MC_RAISE_DIVIDE_ERROR();
17343 } IEM_MC_ENDIF();
17344
17345 IEM_MC_END();
17346 return VINF_SUCCESS;
17347 }
17348
17349 case IEMMODE_32BIT:
17350 {
17351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17352 IEM_MC_BEGIN(4, 1);
17353 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17354 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17355 IEM_MC_ARG(uint32_t, u32Value, 2);
17356 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17357 IEM_MC_LOCAL(int32_t, rc);
17358
17359 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17360 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17361 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17362 IEM_MC_REF_EFLAGS(pEFlags);
17363 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17364 IEM_MC_IF_LOCAL_IS_Z(rc) {
17365 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17366 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17367 IEM_MC_ADVANCE_RIP();
17368 } IEM_MC_ELSE() {
17369 IEM_MC_RAISE_DIVIDE_ERROR();
17370 } IEM_MC_ENDIF();
17371
17372 IEM_MC_END();
17373 return VINF_SUCCESS;
17374 }
17375
17376 case IEMMODE_64BIT:
17377 {
17378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17379 IEM_MC_BEGIN(4, 1);
17380 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17381 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17382 IEM_MC_ARG(uint64_t, u64Value, 2);
17383 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17384 IEM_MC_LOCAL(int32_t, rc);
17385
17386 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17387 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17388 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17389 IEM_MC_REF_EFLAGS(pEFlags);
17390 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17391 IEM_MC_IF_LOCAL_IS_Z(rc) {
17392 IEM_MC_ADVANCE_RIP();
17393 } IEM_MC_ELSE() {
17394 IEM_MC_RAISE_DIVIDE_ERROR();
17395 } IEM_MC_ENDIF();
17396
17397 IEM_MC_END();
17398 return VINF_SUCCESS;
17399 }
17400
17401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17402 }
17403 }
17404 else
17405 {
17406 /* memory access. */
17407 switch (pVCpu->iem.s.enmEffOpSize)
17408 {
17409 case IEMMODE_16BIT:
17410 {
17411 IEM_MC_BEGIN(4, 2);
17412 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17413 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17414 IEM_MC_ARG(uint16_t, u16Value, 2);
17415 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17417 IEM_MC_LOCAL(int32_t, rc);
17418
17419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17421 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17422 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17423 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17424 IEM_MC_REF_EFLAGS(pEFlags);
17425 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17426 IEM_MC_IF_LOCAL_IS_Z(rc) {
17427 IEM_MC_ADVANCE_RIP();
17428 } IEM_MC_ELSE() {
17429 IEM_MC_RAISE_DIVIDE_ERROR();
17430 } IEM_MC_ENDIF();
17431
17432 IEM_MC_END();
17433 return VINF_SUCCESS;
17434 }
17435
17436 case IEMMODE_32BIT:
17437 {
17438 IEM_MC_BEGIN(4, 2);
17439 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17440 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17441 IEM_MC_ARG(uint32_t, u32Value, 2);
17442 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17444 IEM_MC_LOCAL(int32_t, rc);
17445
17446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17448 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17449 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17450 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17451 IEM_MC_REF_EFLAGS(pEFlags);
17452 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17453 IEM_MC_IF_LOCAL_IS_Z(rc) {
17454 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17455 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17456 IEM_MC_ADVANCE_RIP();
17457 } IEM_MC_ELSE() {
17458 IEM_MC_RAISE_DIVIDE_ERROR();
17459 } IEM_MC_ENDIF();
17460
17461 IEM_MC_END();
17462 return VINF_SUCCESS;
17463 }
17464
17465 case IEMMODE_64BIT:
17466 {
17467 IEM_MC_BEGIN(4, 2);
17468 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17469 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17470 IEM_MC_ARG(uint64_t, u64Value, 2);
17471 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17473 IEM_MC_LOCAL(int32_t, rc);
17474
17475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17477 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17478 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17479 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17480 IEM_MC_REF_EFLAGS(pEFlags);
17481 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17482 IEM_MC_IF_LOCAL_IS_Z(rc) {
17483 IEM_MC_ADVANCE_RIP();
17484 } IEM_MC_ELSE() {
17485 IEM_MC_RAISE_DIVIDE_ERROR();
17486 } IEM_MC_ENDIF();
17487
17488 IEM_MC_END();
17489 return VINF_SUCCESS;
17490 }
17491
17492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17493 }
17494 }
17495}
17496
17497/** Opcode 0xf6. */
17498FNIEMOP_DEF(iemOp_Grp3_Eb)
17499{
17500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17501 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17502 {
17503 case 0:
17504 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17505 case 1:
17506/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17507 return IEMOP_RAISE_INVALID_OPCODE();
17508 case 2:
17509 IEMOP_MNEMONIC("not Eb");
17510 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17511 case 3:
17512 IEMOP_MNEMONIC("neg Eb");
17513 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17514 case 4:
17515 IEMOP_MNEMONIC("mul Eb");
17516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17517 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17518 case 5:
17519 IEMOP_MNEMONIC("imul Eb");
17520 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17521 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17522 case 6:
17523 IEMOP_MNEMONIC("div Eb");
17524 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17525 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17526 case 7:
17527 IEMOP_MNEMONIC("idiv Eb");
17528 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17529 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17531 }
17532}
17533
17534
17535/** Opcode 0xf7. */
17536FNIEMOP_DEF(iemOp_Grp3_Ev)
17537{
17538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17539 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17540 {
17541 case 0:
17542 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17543 case 1:
17544/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17545 return IEMOP_RAISE_INVALID_OPCODE();
17546 case 2:
17547 IEMOP_MNEMONIC("not Ev");
17548 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17549 case 3:
17550 IEMOP_MNEMONIC("neg Ev");
17551 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17552 case 4:
17553 IEMOP_MNEMONIC("mul Ev");
17554 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17555 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17556 case 5:
17557 IEMOP_MNEMONIC("imul Ev");
17558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17559 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17560 case 6:
17561 IEMOP_MNEMONIC("div Ev");
17562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17563 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17564 case 7:
17565 IEMOP_MNEMONIC("idiv Ev");
17566 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17567 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17569 }
17570}
17571
17572
17573/** Opcode 0xf8. */
17574FNIEMOP_DEF(iemOp_clc)
17575{
17576 IEMOP_MNEMONIC("clc");
17577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17578 IEM_MC_BEGIN(0, 0);
17579 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17580 IEM_MC_ADVANCE_RIP();
17581 IEM_MC_END();
17582 return VINF_SUCCESS;
17583}
17584
17585
17586/** Opcode 0xf9. */
17587FNIEMOP_DEF(iemOp_stc)
17588{
17589 IEMOP_MNEMONIC("stc");
17590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17591 IEM_MC_BEGIN(0, 0);
17592 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17593 IEM_MC_ADVANCE_RIP();
17594 IEM_MC_END();
17595 return VINF_SUCCESS;
17596}
17597
17598
17599/** Opcode 0xfa. */
17600FNIEMOP_DEF(iemOp_cli)
17601{
17602 IEMOP_MNEMONIC("cli");
17603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17604 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17605}
17606
17607
17608FNIEMOP_DEF(iemOp_sti)
17609{
17610 IEMOP_MNEMONIC("sti");
17611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17612 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17613}
17614
17615
17616/** Opcode 0xfc. */
17617FNIEMOP_DEF(iemOp_cld)
17618{
17619 IEMOP_MNEMONIC("cld");
17620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17621 IEM_MC_BEGIN(0, 0);
17622 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17623 IEM_MC_ADVANCE_RIP();
17624 IEM_MC_END();
17625 return VINF_SUCCESS;
17626}
17627
17628
17629/** Opcode 0xfd. */
17630FNIEMOP_DEF(iemOp_std)
17631{
17632 IEMOP_MNEMONIC("std");
17633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17634 IEM_MC_BEGIN(0, 0);
17635 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17636 IEM_MC_ADVANCE_RIP();
17637 IEM_MC_END();
17638 return VINF_SUCCESS;
17639}
17640
17641
17642/** Opcode 0xfe. */
17643FNIEMOP_DEF(iemOp_Grp4)
17644{
17645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17646 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17647 {
17648 case 0:
17649 IEMOP_MNEMONIC("inc Ev");
17650 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17651 case 1:
17652 IEMOP_MNEMONIC("dec Ev");
17653 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17654 default:
17655 IEMOP_MNEMONIC("grp4-ud");
17656 return IEMOP_RAISE_INVALID_OPCODE();
17657 }
17658}
17659
17660
17661/**
17662 * Opcode 0xff /2.
17663 * @param bRm The RM byte.
17664 */
17665FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17666{
17667 IEMOP_MNEMONIC("calln Ev");
17668 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17669
17670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17671 {
17672 /* The new RIP is taken from a register. */
17673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17674 switch (pVCpu->iem.s.enmEffOpSize)
17675 {
17676 case IEMMODE_16BIT:
17677 IEM_MC_BEGIN(1, 0);
17678 IEM_MC_ARG(uint16_t, u16Target, 0);
17679 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17680 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17681 IEM_MC_END()
17682 return VINF_SUCCESS;
17683
17684 case IEMMODE_32BIT:
17685 IEM_MC_BEGIN(1, 0);
17686 IEM_MC_ARG(uint32_t, u32Target, 0);
17687 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17688 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17689 IEM_MC_END()
17690 return VINF_SUCCESS;
17691
17692 case IEMMODE_64BIT:
17693 IEM_MC_BEGIN(1, 0);
17694 IEM_MC_ARG(uint64_t, u64Target, 0);
17695 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17696 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17697 IEM_MC_END()
17698 return VINF_SUCCESS;
17699
17700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17701 }
17702 }
17703 else
17704 {
17705 /* The new RIP is taken from a register. */
17706 switch (pVCpu->iem.s.enmEffOpSize)
17707 {
17708 case IEMMODE_16BIT:
17709 IEM_MC_BEGIN(1, 1);
17710 IEM_MC_ARG(uint16_t, u16Target, 0);
17711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17714 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17715 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17716 IEM_MC_END()
17717 return VINF_SUCCESS;
17718
17719 case IEMMODE_32BIT:
17720 IEM_MC_BEGIN(1, 1);
17721 IEM_MC_ARG(uint32_t, u32Target, 0);
17722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17725 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17726 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17727 IEM_MC_END()
17728 return VINF_SUCCESS;
17729
17730 case IEMMODE_64BIT:
17731 IEM_MC_BEGIN(1, 1);
17732 IEM_MC_ARG(uint64_t, u64Target, 0);
17733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17736 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17737 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17738 IEM_MC_END()
17739 return VINF_SUCCESS;
17740
17741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17742 }
17743 }
17744}
17745
17746typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17747
17748FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17749{
17750 /* Registers? How?? */
17751 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
17752 { /* likely */ }
17753 else
17754 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17755
17756 /* Far pointer loaded from memory. */
17757 switch (pVCpu->iem.s.enmEffOpSize)
17758 {
17759 case IEMMODE_16BIT:
17760 IEM_MC_BEGIN(3, 1);
17761 IEM_MC_ARG(uint16_t, u16Sel, 0);
17762 IEM_MC_ARG(uint16_t, offSeg, 1);
17763 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17767 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17768 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
17769 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17770 IEM_MC_END();
17771 return VINF_SUCCESS;
17772
17773 case IEMMODE_64BIT:
17774 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17775 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17776 * and call far qword [rsp] encodings. */
17777 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
17778 {
17779 IEM_MC_BEGIN(3, 1);
17780 IEM_MC_ARG(uint16_t, u16Sel, 0);
17781 IEM_MC_ARG(uint64_t, offSeg, 1);
17782 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17786 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17787 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
17788 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17789 IEM_MC_END();
17790 return VINF_SUCCESS;
17791 }
17792 /* AMD falls thru. */
17793
17794 case IEMMODE_32BIT:
17795 IEM_MC_BEGIN(3, 1);
17796 IEM_MC_ARG(uint16_t, u16Sel, 0);
17797 IEM_MC_ARG(uint32_t, offSeg, 1);
17798 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17802 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17803 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
17804 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17805 IEM_MC_END();
17806 return VINF_SUCCESS;
17807
17808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17809 }
17810}
17811
17812
17813/**
17814 * Opcode 0xff /3.
17815 * @param bRm The RM byte.
17816 */
17817FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17818{
17819 IEMOP_MNEMONIC("callf Ep");
17820 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17821}
17822
17823
17824/**
17825 * Opcode 0xff /4.
17826 * @param bRm The RM byte.
17827 */
17828FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17829{
17830 IEMOP_MNEMONIC("jmpn Ev");
17831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17832
17833 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17834 {
17835 /* The new RIP is taken from a register. */
17836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17837 switch (pVCpu->iem.s.enmEffOpSize)
17838 {
17839 case IEMMODE_16BIT:
17840 IEM_MC_BEGIN(0, 1);
17841 IEM_MC_LOCAL(uint16_t, u16Target);
17842 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17843 IEM_MC_SET_RIP_U16(u16Target);
17844 IEM_MC_END()
17845 return VINF_SUCCESS;
17846
17847 case IEMMODE_32BIT:
17848 IEM_MC_BEGIN(0, 1);
17849 IEM_MC_LOCAL(uint32_t, u32Target);
17850 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17851 IEM_MC_SET_RIP_U32(u32Target);
17852 IEM_MC_END()
17853 return VINF_SUCCESS;
17854
17855 case IEMMODE_64BIT:
17856 IEM_MC_BEGIN(0, 1);
17857 IEM_MC_LOCAL(uint64_t, u64Target);
17858 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17859 IEM_MC_SET_RIP_U64(u64Target);
17860 IEM_MC_END()
17861 return VINF_SUCCESS;
17862
17863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17864 }
17865 }
17866 else
17867 {
17868 /* The new RIP is taken from a memory location. */
17869 switch (pVCpu->iem.s.enmEffOpSize)
17870 {
17871 case IEMMODE_16BIT:
17872 IEM_MC_BEGIN(0, 2);
17873 IEM_MC_LOCAL(uint16_t, u16Target);
17874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17877 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17878 IEM_MC_SET_RIP_U16(u16Target);
17879 IEM_MC_END()
17880 return VINF_SUCCESS;
17881
17882 case IEMMODE_32BIT:
17883 IEM_MC_BEGIN(0, 2);
17884 IEM_MC_LOCAL(uint32_t, u32Target);
17885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17888 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17889 IEM_MC_SET_RIP_U32(u32Target);
17890 IEM_MC_END()
17891 return VINF_SUCCESS;
17892
17893 case IEMMODE_64BIT:
17894 IEM_MC_BEGIN(0, 2);
17895 IEM_MC_LOCAL(uint64_t, u64Target);
17896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17899 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17900 IEM_MC_SET_RIP_U64(u64Target);
17901 IEM_MC_END()
17902 return VINF_SUCCESS;
17903
17904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17905 }
17906 }
17907}
17908
17909
17910/**
17911 * Opcode 0xff /5.
17912 * @param bRm The RM byte.
17913 */
17914FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17915{
17916 IEMOP_MNEMONIC("jmpf Ep");
17917 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17918}
17919
17920
17921/**
17922 * Opcode 0xff /6.
17923 * @param bRm The RM byte.
17924 */
17925FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17926{
17927 IEMOP_MNEMONIC("push Ev");
17928
17929 /* Registers are handled by a common worker. */
17930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17931 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17932
17933 /* Memory we do here. */
17934 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17935 switch (pVCpu->iem.s.enmEffOpSize)
17936 {
17937 case IEMMODE_16BIT:
17938 IEM_MC_BEGIN(0, 2);
17939 IEM_MC_LOCAL(uint16_t, u16Src);
17940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17943 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17944 IEM_MC_PUSH_U16(u16Src);
17945 IEM_MC_ADVANCE_RIP();
17946 IEM_MC_END();
17947 return VINF_SUCCESS;
17948
17949 case IEMMODE_32BIT:
17950 IEM_MC_BEGIN(0, 2);
17951 IEM_MC_LOCAL(uint32_t, u32Src);
17952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17955 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17956 IEM_MC_PUSH_U32(u32Src);
17957 IEM_MC_ADVANCE_RIP();
17958 IEM_MC_END();
17959 return VINF_SUCCESS;
17960
17961 case IEMMODE_64BIT:
17962 IEM_MC_BEGIN(0, 2);
17963 IEM_MC_LOCAL(uint64_t, u64Src);
17964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17967 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17968 IEM_MC_PUSH_U64(u64Src);
17969 IEM_MC_ADVANCE_RIP();
17970 IEM_MC_END();
17971 return VINF_SUCCESS;
17972
17973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17974 }
17975}
17976
17977
17978/** Opcode 0xff. */
17979FNIEMOP_DEF(iemOp_Grp5)
17980{
17981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17982 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17983 {
17984 case 0:
17985 IEMOP_MNEMONIC("inc Ev");
17986 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17987 case 1:
17988 IEMOP_MNEMONIC("dec Ev");
17989 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17990 case 2:
17991 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17992 case 3:
17993 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17994 case 4:
17995 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17996 case 5:
17997 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17998 case 6:
17999 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18000 case 7:
18001 IEMOP_MNEMONIC("grp5-ud");
18002 return IEMOP_RAISE_INVALID_OPCODE();
18003 }
18004 AssertFailedReturn(VERR_IEM_IPE_3);
18005}
18006
18007
18008
18009const PFNIEMOP g_apfnOneByteMap[256] =
18010{
18011 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18012 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18013 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18014 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18015 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18016 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18017 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18018 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18019 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18020 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18021 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18022 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18023 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18024 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18025 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18026 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18027 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18028 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18029 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18030 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18031 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18032 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18033 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18034 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18035 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18036 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18037 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18038 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18039 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18040 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18041 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18042 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18043 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18044 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18045 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18046 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18047 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18048 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18049 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18050 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18051 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18052 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18053 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18054 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18055 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18056 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18057 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18058 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18059 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18060 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18061 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18062 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18063 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18064 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18065 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18066 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18067 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18068 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18069 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18070 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18071 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18072 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18073 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18074 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18075};
18076
18077
18078/** @} */
18079
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette