VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 47173

Last change on this file since 47173 was 47138, checked in by vboxsync, 11 years ago

IEM: idiv and div missing bits and fixes.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 531.9 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 47138 2013-07-14 18:05:53Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2012 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
133 IEM_MC_ADVANCE_RIP();
134 IEM_MC_END();
135 break;
136
137 case IEMMODE_64BIT:
138 IEM_MC_BEGIN(3, 0);
139 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
140 IEM_MC_ARG(uint64_t, u64Src, 1);
141 IEM_MC_ARG(uint32_t *, pEFlags, 2);
142
143 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
144 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
145 IEM_MC_REF_EFLAGS(pEFlags);
146 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
147
148 IEM_MC_ADVANCE_RIP();
149 IEM_MC_END();
150 break;
151 }
152 }
153 else
154 {
155 /*
156 * We're accessing memory.
157 * Note! We're putting the eflags on the stack here so we can commit them
158 * after the memory.
159 */
160 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
161 switch (pIemCpu->enmEffOpSize)
162 {
163 case IEMMODE_16BIT:
164 IEM_MC_BEGIN(3, 2);
165 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
166 IEM_MC_ARG(uint16_t, u16Src, 1);
167 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
169
170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
171 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
172 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
173 IEM_MC_FETCH_EFLAGS(EFlags);
174 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
176 else
177 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
178
179 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
180 IEM_MC_COMMIT_EFLAGS(EFlags);
181 IEM_MC_ADVANCE_RIP();
182 IEM_MC_END();
183 break;
184
185 case IEMMODE_32BIT:
186 IEM_MC_BEGIN(3, 2);
187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
188 IEM_MC_ARG(uint32_t, u32Src, 1);
189 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
191
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
193 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
194 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
195 IEM_MC_FETCH_EFLAGS(EFlags);
196 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
198 else
199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
200
201 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
202 IEM_MC_COMMIT_EFLAGS(EFlags);
203 IEM_MC_ADVANCE_RIP();
204 IEM_MC_END();
205 break;
206
207 case IEMMODE_64BIT:
208 IEM_MC_BEGIN(3, 2);
209 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
210 IEM_MC_ARG(uint64_t, u64Src, 1);
211 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
213
214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
215 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
216 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
217 IEM_MC_FETCH_EFLAGS(EFlags);
218 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
220 else
221 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
222
223 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
224 IEM_MC_COMMIT_EFLAGS(EFlags);
225 IEM_MC_ADVANCE_RIP();
226 IEM_MC_END();
227 break;
228 }
229 }
230 return VINF_SUCCESS;
231}
232
233
234/**
235 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
236 * the destination.
237 *
238 * @param pImpl Pointer to the instruction implementation (assembly).
239 */
240FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
241{
242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
243 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
244
245 /*
246 * If rm is denoting a register, no more instruction bytes.
247 */
248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
249 {
250 IEM_MC_BEGIN(3, 0);
251 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
252 IEM_MC_ARG(uint8_t, u8Src, 1);
253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
254
255 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
256 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
257 IEM_MC_REF_EFLAGS(pEFlags);
258 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
259
260 IEM_MC_ADVANCE_RIP();
261 IEM_MC_END();
262 }
263 else
264 {
265 /*
266 * We're accessing memory.
267 */
268 IEM_MC_BEGIN(3, 1);
269 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
270 IEM_MC_ARG(uint8_t, u8Src, 1);
271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
273
274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
275 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
276 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
277 IEM_MC_REF_EFLAGS(pEFlags);
278 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
279
280 IEM_MC_ADVANCE_RIP();
281 IEM_MC_END();
282 }
283 return VINF_SUCCESS;
284}
285
286
287/**
288 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
289 * register as the destination.
290 *
291 * @param pImpl Pointer to the instruction implementation (assembly).
292 */
293FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
294{
295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
296 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
297
298 /*
299 * If rm is denoting a register, no more instruction bytes.
300 */
301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
302 {
303 switch (pIemCpu->enmEffOpSize)
304 {
305 case IEMMODE_16BIT:
306 IEM_MC_BEGIN(3, 0);
307 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
308 IEM_MC_ARG(uint16_t, u16Src, 1);
309 IEM_MC_ARG(uint32_t *, pEFlags, 2);
310
311 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
312 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
313 IEM_MC_REF_EFLAGS(pEFlags);
314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
315
316 IEM_MC_ADVANCE_RIP();
317 IEM_MC_END();
318 break;
319
320 case IEMMODE_32BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
323 IEM_MC_ARG(uint32_t, u32Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
327 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
330
331 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
332 IEM_MC_ADVANCE_RIP();
333 IEM_MC_END();
334 break;
335
336 case IEMMODE_64BIT:
337 IEM_MC_BEGIN(3, 0);
338 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
339 IEM_MC_ARG(uint64_t, u64Src, 1);
340 IEM_MC_ARG(uint32_t *, pEFlags, 2);
341
342 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
343 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
344 IEM_MC_REF_EFLAGS(pEFlags);
345 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
346
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350 }
351 }
352 else
353 {
354 /*
355 * We're accessing memory.
356 */
357 switch (pIemCpu->enmEffOpSize)
358 {
359 case IEMMODE_16BIT:
360 IEM_MC_BEGIN(3, 1);
361 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
362 IEM_MC_ARG(uint16_t, u16Src, 1);
363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
365
366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
367 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
368 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
369 IEM_MC_REF_EFLAGS(pEFlags);
370 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
371
372 IEM_MC_ADVANCE_RIP();
373 IEM_MC_END();
374 break;
375
376 case IEMMODE_32BIT:
377 IEM_MC_BEGIN(3, 1);
378 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
379 IEM_MC_ARG(uint32_t, u32Src, 1);
380 IEM_MC_ARG(uint32_t *, pEFlags, 2);
381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
382
383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
384 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
385 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
386 IEM_MC_REF_EFLAGS(pEFlags);
387 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
388
389 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
390 IEM_MC_ADVANCE_RIP();
391 IEM_MC_END();
392 break;
393
394 case IEMMODE_64BIT:
395 IEM_MC_BEGIN(3, 1);
396 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
397 IEM_MC_ARG(uint64_t, u64Src, 1);
398 IEM_MC_ARG(uint32_t *, pEFlags, 2);
399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
400
401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
402 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
403 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
404 IEM_MC_REF_EFLAGS(pEFlags);
405 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
406
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410 }
411 }
412 return VINF_SUCCESS;
413}
414
415
416/**
417 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
418 * a byte immediate.
419 *
420 * @param pImpl Pointer to the instruction implementation (assembly).
421 */
422FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
423{
424 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
425 IEMOP_HLP_NO_LOCK_PREFIX();
426
427 IEM_MC_BEGIN(3, 0);
428 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
429 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
430 IEM_MC_ARG(uint32_t *, pEFlags, 2);
431
432 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
433 IEM_MC_REF_EFLAGS(pEFlags);
434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
435
436 IEM_MC_ADVANCE_RIP();
437 IEM_MC_END();
438 return VINF_SUCCESS;
439}
440
441
442/**
443 * Common worker for instructions like ADD, AND, OR, ++ with working on
444 * AX/EAX/RAX with a word/dword immediate.
445 *
446 * @param pImpl Pointer to the instruction implementation (assembly).
447 */
448FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
449{
450 switch (pIemCpu->enmEffOpSize)
451 {
452 case IEMMODE_16BIT:
453 {
454 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
455 IEMOP_HLP_NO_LOCK_PREFIX();
456
457 IEM_MC_BEGIN(3, 0);
458 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
459 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
460 IEM_MC_ARG(uint32_t *, pEFlags, 2);
461
462 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
463 IEM_MC_REF_EFLAGS(pEFlags);
464 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
465
466 IEM_MC_ADVANCE_RIP();
467 IEM_MC_END();
468 return VINF_SUCCESS;
469 }
470
471 case IEMMODE_32BIT:
472 {
473 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
474 IEMOP_HLP_NO_LOCK_PREFIX();
475
476 IEM_MC_BEGIN(3, 0);
477 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
478 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
479 IEM_MC_ARG(uint32_t *, pEFlags, 2);
480
481 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
482 IEM_MC_REF_EFLAGS(pEFlags);
483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
484
485 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
486 IEM_MC_ADVANCE_RIP();
487 IEM_MC_END();
488 return VINF_SUCCESS;
489 }
490
491 case IEMMODE_64BIT:
492 {
493 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
494 IEMOP_HLP_NO_LOCK_PREFIX();
495
496 IEM_MC_BEGIN(3, 0);
497 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
498 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
499 IEM_MC_ARG(uint32_t *, pEFlags, 2);
500
501 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
502 IEM_MC_REF_EFLAGS(pEFlags);
503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
504
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
511 }
512}
513
514
515/** Opcodes 0xf1, 0xd6. */
516FNIEMOP_DEF(iemOp_Invalid)
517{
518 IEMOP_MNEMONIC("Invalid");
519 return IEMOP_RAISE_INVALID_OPCODE();
520}
521
522
523
524/** @name ..... opcodes.
525 *
526 * @{
527 */
528
529/** @} */
530
531
532/** @name Two byte opcodes (first byte 0x0f).
533 *
534 * @{
535 */
536
537/** Opcode 0x0f 0x00 /0. */
538FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
539{
540 IEMOP_MNEMONIC("sldt Rv/Mw");
541 IEMOP_HLP_NO_REAL_OR_V86_MODE();
542
543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
544 {
545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
546 switch (pIemCpu->enmEffOpSize)
547 {
548 case IEMMODE_16BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint16_t, u16Ldtr);
551 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
552 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 break;
556
557 case IEMMODE_32BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint32_t, u32Ldtr);
560 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
561 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 break;
565
566 case IEMMODE_64BIT:
567 IEM_MC_BEGIN(0, 1);
568 IEM_MC_LOCAL(uint64_t, u64Ldtr);
569 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
570 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
571 IEM_MC_ADVANCE_RIP();
572 IEM_MC_END();
573 break;
574
575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
576 }
577 }
578 else
579 {
580 IEM_MC_BEGIN(0, 2);
581 IEM_MC_LOCAL(uint16_t, u16Ldtr);
582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
585 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
586 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
587 IEM_MC_ADVANCE_RIP();
588 IEM_MC_END();
589 }
590 return VINF_SUCCESS;
591}
592
593
594/** Opcode 0x0f 0x00 /1. */
595FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
596{
597 IEMOP_MNEMONIC("str Rv/Mw");
598 IEMOP_HLP_NO_REAL_OR_V86_MODE();
599
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 switch (pIemCpu->enmEffOpSize)
604 {
605 case IEMMODE_16BIT:
606 IEM_MC_BEGIN(0, 1);
607 IEM_MC_LOCAL(uint16_t, u16Tr);
608 IEM_MC_FETCH_TR_U16(u16Tr);
609 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 break;
613
614 case IEMMODE_32BIT:
615 IEM_MC_BEGIN(0, 1);
616 IEM_MC_LOCAL(uint32_t, u32Tr);
617 IEM_MC_FETCH_TR_U32(u32Tr);
618 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
619 IEM_MC_ADVANCE_RIP();
620 IEM_MC_END();
621 break;
622
623 case IEMMODE_64BIT:
624 IEM_MC_BEGIN(0, 1);
625 IEM_MC_LOCAL(uint64_t, u64Tr);
626 IEM_MC_FETCH_TR_U64(u64Tr);
627 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
628 IEM_MC_ADVANCE_RIP();
629 IEM_MC_END();
630 break;
631
632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
633 }
634 }
635 else
636 {
637 IEM_MC_BEGIN(0, 2);
638 IEM_MC_LOCAL(uint16_t, u16Tr);
639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
642 IEM_MC_FETCH_TR_U16(u16Tr);
643 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
644 IEM_MC_ADVANCE_RIP();
645 IEM_MC_END();
646 }
647 return VINF_SUCCESS;
648}
649
650
651/** Opcode 0x0f 0x00 /2. */
652FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
653{
654 IEMOP_MNEMONIC("lldt Ew");
655 IEMOP_HLP_NO_REAL_OR_V86_MODE();
656
657 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
658 {
659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
660 IEM_MC_BEGIN(1, 0);
661 IEM_MC_ARG(uint16_t, u16Sel, 0);
662 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
663 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
664 IEM_MC_END();
665 }
666 else
667 {
668 IEM_MC_BEGIN(1, 1);
669 IEM_MC_ARG(uint16_t, u16Sel, 0);
670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
671 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
674 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
675 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
676 IEM_MC_END();
677 }
678 return VINF_SUCCESS;
679}
680
681
682/** Opcode 0x0f 0x00 /3. */
683FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
684{
685 IEMOP_MNEMONIC("ltr Ew");
686 IEMOP_HLP_NO_REAL_OR_V86_MODE();
687
688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
689 {
690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
691 IEM_MC_BEGIN(1, 0);
692 IEM_MC_ARG(uint16_t, u16Sel, 0);
693 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
694 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
695 IEM_MC_END();
696 }
697 else
698 {
699 IEM_MC_BEGIN(1, 1);
700 IEM_MC_ARG(uint16_t, u16Sel, 0);
701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
702 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
705 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
706 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
707 IEM_MC_END();
708 }
709 return VINF_SUCCESS;
710}
711
712
713/** Opcode 0x0f 0x00 /4. */
714FNIEMOP_STUB_1(iemOp_Grp6_verr, uint8_t, bRm);
715
716
717/** Opcode 0x0f 0x00 /5. */
718FNIEMOP_STUB_1(iemOp_Grp6_verw, uint8_t, bRm);
719
720
721/** Opcode 0x0f 0x00. */
722FNIEMOP_DEF(iemOp_Grp6)
723{
724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
725 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
726 {
727 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
728 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
729 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
730 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
731 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
732 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
733 case 6: return IEMOP_RAISE_INVALID_OPCODE();
734 case 7: return IEMOP_RAISE_INVALID_OPCODE();
735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
736 }
737
738}
739
740
741/** Opcode 0x0f 0x01 /0. */
742FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
743{
744 IEMOP_MNEMONIC("sgdt Ms");
745 IEMOP_HLP_64BIT_OP_SIZE();
746 IEM_MC_BEGIN(3, 1);
747 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
748 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
749 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
752 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
753 IEM_MC_END();
754 return VINF_SUCCESS;
755}
756
757
758/** Opcode 0x0f 0x01 /0. */
759FNIEMOP_DEF(iemOp_Grp7_vmcall)
760{
761 IEMOP_BITCH_ABOUT_STUB();
762 return IEMOP_RAISE_INVALID_OPCODE();
763}
764
765
766/** Opcode 0x0f 0x01 /0. */
767FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
768{
769 IEMOP_BITCH_ABOUT_STUB();
770 return IEMOP_RAISE_INVALID_OPCODE();
771}
772
773
774/** Opcode 0x0f 0x01 /0. */
775FNIEMOP_DEF(iemOp_Grp7_vmresume)
776{
777 IEMOP_BITCH_ABOUT_STUB();
778 return IEMOP_RAISE_INVALID_OPCODE();
779}
780
781
782/** Opcode 0x0f 0x01 /0. */
783FNIEMOP_DEF(iemOp_Grp7_vmxoff)
784{
785 IEMOP_BITCH_ABOUT_STUB();
786 return IEMOP_RAISE_INVALID_OPCODE();
787}
788
789
790/** Opcode 0x0f 0x01 /1. */
791FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
792{
793 IEMOP_MNEMONIC("sidt Ms");
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(3, 1);
796 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /1. */
808FNIEMOP_DEF(iemOp_Grp7_monitor)
809{
810 NOREF(pIemCpu);
811 IEMOP_BITCH_ABOUT_STUB();
812 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
813}
814
815
816/** Opcode 0x0f 0x01 /1. */
817FNIEMOP_DEF(iemOp_Grp7_mwait)
818{
819 NOREF(pIemCpu);
820 IEMOP_BITCH_ABOUT_STUB();
821 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
822}
823
824
825/** Opcode 0x0f 0x01 /2. */
826FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
827{
828 IEMOP_HLP_NO_LOCK_PREFIX();
829
830 IEMOP_HLP_64BIT_OP_SIZE();
831 IEM_MC_BEGIN(3, 1);
832 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
833 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
834 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
836 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
837 IEM_MC_END();
838 return VINF_SUCCESS;
839}
840
841
842/** Opcode 0x0f 0x01 /2. */
843FNIEMOP_DEF(iemOp_Grp7_xgetbv)
844{
845 AssertFailed();
846 return IEMOP_RAISE_INVALID_OPCODE();
847}
848
849
850/** Opcode 0x0f 0x01 /2. */
851FNIEMOP_DEF(iemOp_Grp7_xsetbv)
852{
853 AssertFailed();
854 return IEMOP_RAISE_INVALID_OPCODE();
855}
856
857
858/** Opcode 0x0f 0x01 /3. */
859FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
860{
861 IEMOP_HLP_NO_LOCK_PREFIX();
862
863 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
864 ? IEMMODE_64BIT
865 : pIemCpu->enmEffOpSize;
866 IEM_MC_BEGIN(3, 1);
867 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
868 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
869 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
871 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
872 IEM_MC_END();
873 return VINF_SUCCESS;
874}
875
876
877/** Opcode 0x0f 0x01 0xd8. */
878FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
879
880/** Opcode 0x0f 0x01 0xd9. */
881FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
882
883/** Opcode 0x0f 0x01 0xda. */
884FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
885
886/** Opcode 0x0f 0x01 0xdb. */
887FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
888
889/** Opcode 0x0f 0x01 0xdc. */
890FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
891
892/** Opcode 0x0f 0x01 0xdd. */
893FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
894
895/** Opcode 0x0f 0x01 0xde. */
896FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
897
898/** Opcode 0x0f 0x01 0xdf. */
899FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
900
901/** Opcode 0x0f 0x01 /4. */
902FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
903{
904 IEMOP_HLP_NO_LOCK_PREFIX();
905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
906 {
907 switch (pIemCpu->enmEffOpSize)
908 {
909 case IEMMODE_16BIT:
910 IEM_MC_BEGIN(0, 1);
911 IEM_MC_LOCAL(uint16_t, u16Tmp);
912 IEM_MC_FETCH_CR0_U16(u16Tmp);
913 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
914 IEM_MC_ADVANCE_RIP();
915 IEM_MC_END();
916 return VINF_SUCCESS;
917
918 case IEMMODE_32BIT:
919 IEM_MC_BEGIN(0, 1);
920 IEM_MC_LOCAL(uint32_t, u32Tmp);
921 IEM_MC_FETCH_CR0_U32(u32Tmp);
922 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
923 IEM_MC_ADVANCE_RIP();
924 IEM_MC_END();
925 return VINF_SUCCESS;
926
927 case IEMMODE_64BIT:
928 IEM_MC_BEGIN(0, 1);
929 IEM_MC_LOCAL(uint64_t, u64Tmp);
930 IEM_MC_FETCH_CR0_U64(u64Tmp);
931 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
932 IEM_MC_ADVANCE_RIP();
933 IEM_MC_END();
934 return VINF_SUCCESS;
935
936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
937 }
938 }
939 else
940 {
941 /* Ignore operand size here, memory refs are always 16-bit. */
942 IEM_MC_BEGIN(0, 2);
943 IEM_MC_LOCAL(uint16_t, u16Tmp);
944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
946 IEM_MC_FETCH_CR0_U16(u16Tmp);
947 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
948 IEM_MC_ADVANCE_RIP();
949 IEM_MC_END();
950 return VINF_SUCCESS;
951 }
952}
953
954
955/** Opcode 0x0f 0x01 /6. */
956FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
957{
958 /* The operand size is effectively ignored, all is 16-bit and only the
959 lower 3-bits are used. */
960 IEMOP_HLP_NO_LOCK_PREFIX();
961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
962 {
963 IEM_MC_BEGIN(1, 0);
964 IEM_MC_ARG(uint16_t, u16Tmp, 0);
965 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
966 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
967 IEM_MC_END();
968 }
969 else
970 {
971 IEM_MC_BEGIN(1, 1);
972 IEM_MC_ARG(uint16_t, u16Tmp, 0);
973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
975 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
976 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
977 IEM_MC_END();
978 }
979 return VINF_SUCCESS;
980}
981
982
983/** Opcode 0x0f 0x01 /7. */
984FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
985{
986 IEMOP_HLP_NO_LOCK_PREFIX();
987 IEM_MC_BEGIN(1, 1);
988 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
990 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
991 IEM_MC_END();
992 return VINF_SUCCESS;
993}
994
995
996/** Opcode 0x0f 0x01 /7. */
997FNIEMOP_DEF(iemOp_Grp7_swapgs)
998{
999 NOREF(pIemCpu);
1000 IEMOP_BITCH_ABOUT_STUB();
1001 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1002}
1003
1004
1005/** Opcode 0x0f 0x01 /7. */
1006FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1007{
1008 NOREF(pIemCpu);
1009 IEMOP_BITCH_ABOUT_STUB();
1010 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1011}
1012
1013
1014/** Opcode 0x0f 0x01. */
1015FNIEMOP_DEF(iemOp_Grp7)
1016{
1017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1018 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1019 {
1020 case 0:
1021 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1022 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1023 switch (bRm & X86_MODRM_RM_MASK)
1024 {
1025 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1026 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1027 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1028 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1029 }
1030 return IEMOP_RAISE_INVALID_OPCODE();
1031
1032 case 1:
1033 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1034 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1035 switch (bRm & X86_MODRM_RM_MASK)
1036 {
1037 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1038 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1039 }
1040 return IEMOP_RAISE_INVALID_OPCODE();
1041
1042 case 2:
1043 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1044 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1045 switch (bRm & X86_MODRM_RM_MASK)
1046 {
1047 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1048 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1049 }
1050 return IEMOP_RAISE_INVALID_OPCODE();
1051
1052 case 3:
1053 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1054 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1055 switch (bRm & X86_MODRM_RM_MASK)
1056 {
1057 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1058 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1059 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1060 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1061 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1062 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1063 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1064 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1066 }
1067
1068 case 4:
1069 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1070
1071 case 5:
1072 return IEMOP_RAISE_INVALID_OPCODE();
1073
1074 case 6:
1075 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1076
1077 case 7:
1078 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1079 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1080 switch (bRm & X86_MODRM_RM_MASK)
1081 {
1082 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1083 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1084 }
1085 return IEMOP_RAISE_INVALID_OPCODE();
1086
1087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1088 }
1089}
1090
1091
1092/** Opcode 0x0f 0x02. */
1093FNIEMOP_STUB(iemOp_lar_Gv_Ew);
1094/** Opcode 0x0f 0x03. */
1095FNIEMOP_STUB(iemOp_lsl_Gv_Ew);
1096/** Opcode 0x0f 0x04. */
1097FNIEMOP_STUB(iemOp_syscall);
1098
1099
1100/** Opcode 0x0f 0x05. */
1101FNIEMOP_DEF(iemOp_clts)
1102{
1103 IEMOP_MNEMONIC("clts");
1104 IEMOP_HLP_NO_LOCK_PREFIX();
1105 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1106}
1107
1108
1109/** Opcode 0x0f 0x06. */
1110FNIEMOP_STUB(iemOp_sysret);
1111/** Opcode 0x0f 0x08. */
1112FNIEMOP_STUB(iemOp_invd);
1113
1114
1115/** Opcode 0x0f 0x09. */
1116FNIEMOP_DEF(iemOp_wbinvd)
1117{
1118 IEMOP_MNEMONIC("wbinvd");
1119 IEMOP_HLP_NO_LOCK_PREFIX();
1120 IEM_MC_BEGIN(0, 0);
1121 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 return VINF_SUCCESS; /* ignore for now */
1125}
1126
1127
1128/** Opcode 0x0f 0x0b. */
1129FNIEMOP_STUB(iemOp_ud2);
1130
1131/** Opcode 0x0f 0x0d. */
1132FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1133{
1134 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1135 if (!IEM_IS_AMD_CPUID_FEATURES_ANY_PRESENT(X86_CPUID_EXT_FEATURE_EDX_LONG_MODE | X86_CPUID_AMD_FEATURE_EDX_3DNOW,
1136 X86_CPUID_AMD_FEATURE_ECX_3DNOWPRF))
1137 {
1138 IEMOP_MNEMONIC("GrpP");
1139 return IEMOP_RAISE_INVALID_OPCODE();
1140 }
1141
1142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1144 {
1145 IEMOP_MNEMONIC("GrpP");
1146 return IEMOP_RAISE_INVALID_OPCODE();
1147 }
1148
1149 IEMOP_HLP_NO_LOCK_PREFIX();
1150 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1151 {
1152 case 2: /* Aliased to /0 for the time being. */
1153 case 4: /* Aliased to /0 for the time being. */
1154 case 5: /* Aliased to /0 for the time being. */
1155 case 6: /* Aliased to /0 for the time being. */
1156 case 7: /* Aliased to /0 for the time being. */
1157 case 0: IEMOP_MNEMONIC("prefetch"); break;
1158 case 1: IEMOP_MNEMONIC("prefetchw "); break;
1159 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1161 }
1162
1163 IEM_MC_BEGIN(0, 1);
1164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 /* Currently a NOP. */
1167 IEM_MC_ADVANCE_RIP();
1168 IEM_MC_END();
1169 return VINF_SUCCESS;
1170}
1171
1172
1173/** Opcode 0x0f 0x0e. */
1174FNIEMOP_STUB(iemOp_femms);
1175
1176
1177/** Opcode 0x0f 0x0f 0x0c. */
1178FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1179
1180/** Opcode 0x0f 0x0f 0x0d. */
1181FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1182
1183/** Opcode 0x0f 0x0f 0x1c. */
1184FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1185
1186/** Opcode 0x0f 0x0f 0x1d. */
1187FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1188
1189/** Opcode 0x0f 0x0f 0x8a. */
1190FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1191
1192/** Opcode 0x0f 0x0f 0x8e. */
1193FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1194
1195/** Opcode 0x0f 0x0f 0x90. */
1196FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1197
1198/** Opcode 0x0f 0x0f 0x94. */
1199FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1200
1201/** Opcode 0x0f 0x0f 0x96. */
1202FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1203
1204/** Opcode 0x0f 0x0f 0x97. */
1205FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1206
1207/** Opcode 0x0f 0x0f 0x9a. */
1208FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1209
1210/** Opcode 0x0f 0x0f 0x9e. */
1211FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1212
1213/** Opcode 0x0f 0x0f 0xa0. */
1214FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1215
1216/** Opcode 0x0f 0x0f 0xa4. */
1217FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1218
1219/** Opcode 0x0f 0x0f 0xa6. */
1220FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1221
1222/** Opcode 0x0f 0x0f 0xa7. */
1223FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1224
1225/** Opcode 0x0f 0x0f 0xaa. */
1226FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1227
1228/** Opcode 0x0f 0x0f 0xae. */
1229FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1230
1231/** Opcode 0x0f 0x0f 0xb0. */
1232FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1233
1234/** Opcode 0x0f 0x0f 0xb4. */
1235FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1236
1237/** Opcode 0x0f 0x0f 0xb6. */
1238FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1239
1240/** Opcode 0x0f 0x0f 0xb7. */
1241FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1242
1243/** Opcode 0x0f 0x0f 0xbb. */
1244FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1245
1246/** Opcode 0x0f 0x0f 0xbf. */
1247FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1248
1249
1250/** Opcode 0x0f 0x0f. */
1251FNIEMOP_DEF(iemOp_3Dnow)
1252{
1253 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_AMD_FEATURE_EDX_3DNOW))
1254 {
1255 IEMOP_MNEMONIC("3Dnow");
1256 return IEMOP_RAISE_INVALID_OPCODE();
1257 }
1258
1259 /* This is pretty sparse, use switch instead of table. */
1260 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1261 switch (b)
1262 {
1263 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1264 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1265 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1266 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1267 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1268 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1269 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1270 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1271 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1272 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1273 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1274 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1275 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1276 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1277 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1278 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1279 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1280 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1281 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1282 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1283 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1284 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1285 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1286 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1287 default:
1288 return IEMOP_RAISE_INVALID_OPCODE();
1289 }
1290}
1291
1292
1293/** Opcode 0x0f 0x10. */
1294FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1295/** Opcode 0x0f 0x11. */
1296FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1297/** Opcode 0x0f 0x12. */
1298FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq);
1299/** Opcode 0x0f 0x13. */
1300FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq);
1301/** Opcode 0x0f 0x14. */
1302FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1303/** Opcode 0x0f 0x15. */
1304FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1305/** Opcode 0x0f 0x16. */
1306FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq);
1307/** Opcode 0x0f 0x17. */
1308FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq);
1309
1310
1311/** Opcode 0x0f 0x18. */
1312FNIEMOP_DEF(iemOp_prefetch_Grp16)
1313{
1314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1315 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1316 {
1317 IEMOP_HLP_NO_LOCK_PREFIX();
1318 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1319 {
1320 case 4: /* Aliased to /0 for the time being according to AMD. */
1321 case 5: /* Aliased to /0 for the time being according to AMD. */
1322 case 6: /* Aliased to /0 for the time being according to AMD. */
1323 case 7: /* Aliased to /0 for the time being according to AMD. */
1324 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1325 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1326 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1327 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1329 }
1330
1331 IEM_MC_BEGIN(0, 1);
1332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1334 /* Currently a NOP. */
1335 IEM_MC_ADVANCE_RIP();
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 return IEMOP_RAISE_INVALID_OPCODE();
1341}
1342
1343
1344/** Opcode 0x0f 0x19..0x1f. */
1345FNIEMOP_DEF(iemOp_nop_Ev)
1346{
1347 IEMOP_HLP_NO_LOCK_PREFIX();
1348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1350 {
1351 IEM_MC_BEGIN(0, 0);
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 else
1356 {
1357 IEM_MC_BEGIN(0, 1);
1358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1360 /* Currently a NOP. */
1361 IEM_MC_ADVANCE_RIP();
1362 IEM_MC_END();
1363 }
1364 return VINF_SUCCESS;
1365}
1366
1367
1368/** Opcode 0x0f 0x20. */
1369FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1370{
1371 /* mod is ignored, as is operand size overrides. */
1372 IEMOP_MNEMONIC("mov Rd,Cd");
1373 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1374 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1375 else
1376 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1377
1378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1379 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1380 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1381 {
1382 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1383 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1384 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1385 iCrReg |= 8;
1386 }
1387 switch (iCrReg)
1388 {
1389 case 0: case 2: case 3: case 4: case 8:
1390 break;
1391 default:
1392 return IEMOP_RAISE_INVALID_OPCODE();
1393 }
1394 IEMOP_HLP_DONE_DECODING();
1395
1396 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1397}
1398
1399
1400/** Opcode 0x0f 0x21. */
1401FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1402{
1403 IEMOP_MNEMONIC("mov Rd,Dd");
1404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1405 IEMOP_HLP_NO_LOCK_PREFIX();
1406 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1407 return IEMOP_RAISE_INVALID_OPCODE();
1408 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1409 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1410 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1411}
1412
1413
1414/** Opcode 0x0f 0x22. */
1415FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1416{
1417 /* mod is ignored, as is operand size overrides. */
1418 IEMOP_MNEMONIC("mov Cd,Rd");
1419 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1420 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1421 else
1422 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1423
1424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1425 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1426 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1427 {
1428 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1429 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1430 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1431 iCrReg |= 8;
1432 }
1433 switch (iCrReg)
1434 {
1435 case 0: case 2: case 3: case 4: case 8:
1436 break;
1437 default:
1438 return IEMOP_RAISE_INVALID_OPCODE();
1439 }
1440 IEMOP_HLP_DONE_DECODING();
1441
1442 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1443}
1444
1445
1446/** Opcode 0x0f 0x23. */
1447FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1448{
1449 IEMOP_MNEMONIC("mov Dd,Rd");
1450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1452 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1453 return IEMOP_RAISE_INVALID_OPCODE();
1454 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1455 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1456 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1457}
1458
1459
1460/** Opcode 0x0f 0x24. */
1461FNIEMOP_DEF(iemOp_mov_Rd_Td)
1462{
1463 IEMOP_MNEMONIC("mov Rd,Td");
1464 /* The RM byte is not considered, see testcase. */
1465 return IEMOP_RAISE_INVALID_OPCODE();
1466}
1467
1468
1469/** Opcode 0x0f 0x26. */
1470FNIEMOP_DEF(iemOp_mov_Td_Rd)
1471{
1472 IEMOP_MNEMONIC("mov Td,Rd");
1473 /* The RM byte is not considered, see testcase. */
1474 return IEMOP_RAISE_INVALID_OPCODE();
1475}
1476
1477
1478/** Opcode 0x0f 0x28. */
1479FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1480/** Opcode 0x0f 0x29. */
1481FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1482/** Opcode 0x0f 0x2a. */
1483FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey);
1484/** Opcode 0x0f 0x2b. */
1485FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1486/** Opcode 0x0f 0x2c. */
1487FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd);
1488/** Opcode 0x0f 0x2d. */
1489FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1490/** Opcode 0x0f 0x2e. */
1491FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd);
1492/** Opcode 0x0f 0x2f. */
1493FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1494
1495
1496/** Opcode 0x0f 0x30. */
1497FNIEMOP_DEF(iemOp_wrmsr)
1498{
1499 IEMOP_MNEMONIC("wrmsr");
1500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1501 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1502}
1503
1504
1505/** Opcode 0x0f 0x31. */
1506FNIEMOP_DEF(iemOp_rdtsc)
1507{
1508 IEMOP_MNEMONIC("rdtsc");
1509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1510 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1511}
1512
1513
1514/** Opcode 0x0f 0x33. */
1515FNIEMOP_DEF(iemOp_rdmsr)
1516{
1517 IEMOP_MNEMONIC("rdmsr");
1518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1519 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1520}
1521
1522
1523/** Opcode 0x0f 0x34. */
1524FNIEMOP_STUB(iemOp_rdpmc);
1525/** Opcode 0x0f 0x34. */
1526FNIEMOP_STUB(iemOp_sysenter);
1527/** Opcode 0x0f 0x35. */
1528FNIEMOP_STUB(iemOp_sysexit);
1529/** Opcode 0x0f 0x37. */
1530FNIEMOP_STUB(iemOp_getsec);
1531/** Opcode 0x0f 0x38. */
1532FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1533/** Opcode 0x0f 0x3a. */
1534FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1535/** Opcode 0x0f 0x3c (?). */
1536FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1537
1538/**
1539 * Implements a conditional move.
1540 *
1541 * Wish there was an obvious way to do this where we could share and reduce
1542 * code bloat.
1543 *
1544 * @param a_Cnd The conditional "microcode" operation.
1545 */
1546#define CMOV_X(a_Cnd) \
1547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1549 { \
1550 switch (pIemCpu->enmEffOpSize) \
1551 { \
1552 case IEMMODE_16BIT: \
1553 IEM_MC_BEGIN(0, 1); \
1554 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1555 a_Cnd { \
1556 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1557 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1558 } IEM_MC_ENDIF(); \
1559 IEM_MC_ADVANCE_RIP(); \
1560 IEM_MC_END(); \
1561 return VINF_SUCCESS; \
1562 \
1563 case IEMMODE_32BIT: \
1564 IEM_MC_BEGIN(0, 1); \
1565 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1566 a_Cnd { \
1567 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1568 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1569 } IEM_MC_ELSE() { \
1570 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1571 } IEM_MC_ENDIF(); \
1572 IEM_MC_ADVANCE_RIP(); \
1573 IEM_MC_END(); \
1574 return VINF_SUCCESS; \
1575 \
1576 case IEMMODE_64BIT: \
1577 IEM_MC_BEGIN(0, 1); \
1578 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1579 a_Cnd { \
1580 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1581 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1582 } IEM_MC_ENDIF(); \
1583 IEM_MC_ADVANCE_RIP(); \
1584 IEM_MC_END(); \
1585 return VINF_SUCCESS; \
1586 \
1587 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1588 } \
1589 } \
1590 else \
1591 { \
1592 switch (pIemCpu->enmEffOpSize) \
1593 { \
1594 case IEMMODE_16BIT: \
1595 IEM_MC_BEGIN(0, 2); \
1596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1597 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1599 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1600 a_Cnd { \
1601 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1602 } IEM_MC_ENDIF(); \
1603 IEM_MC_ADVANCE_RIP(); \
1604 IEM_MC_END(); \
1605 return VINF_SUCCESS; \
1606 \
1607 case IEMMODE_32BIT: \
1608 IEM_MC_BEGIN(0, 2); \
1609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1610 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1612 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1613 a_Cnd { \
1614 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1615 } IEM_MC_ELSE() { \
1616 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1617 } IEM_MC_ENDIF(); \
1618 IEM_MC_ADVANCE_RIP(); \
1619 IEM_MC_END(); \
1620 return VINF_SUCCESS; \
1621 \
1622 case IEMMODE_64BIT: \
1623 IEM_MC_BEGIN(0, 2); \
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1625 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1627 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1628 a_Cnd { \
1629 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1630 } IEM_MC_ENDIF(); \
1631 IEM_MC_ADVANCE_RIP(); \
1632 IEM_MC_END(); \
1633 return VINF_SUCCESS; \
1634 \
1635 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1636 } \
1637 } do {} while (0)
1638
1639
1640
1641/** Opcode 0x0f 0x40. */
1642FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1643{
1644 IEMOP_MNEMONIC("cmovo Gv,Ev");
1645 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1646}
1647
1648
1649/** Opcode 0x0f 0x41. */
1650FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1651{
1652 IEMOP_MNEMONIC("cmovno Gv,Ev");
1653 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1654}
1655
1656
1657/** Opcode 0x0f 0x42. */
1658FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1659{
1660 IEMOP_MNEMONIC("cmovc Gv,Ev");
1661 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1662}
1663
1664
1665/** Opcode 0x0f 0x43. */
1666FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1667{
1668 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1669 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1670}
1671
1672
1673/** Opcode 0x0f 0x44. */
1674FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1675{
1676 IEMOP_MNEMONIC("cmove Gv,Ev");
1677 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1678}
1679
1680
1681/** Opcode 0x0f 0x45. */
1682FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1683{
1684 IEMOP_MNEMONIC("cmovne Gv,Ev");
1685 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1686}
1687
1688
1689/** Opcode 0x0f 0x46. */
1690FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1691{
1692 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1693 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1694}
1695
1696
1697/** Opcode 0x0f 0x47. */
1698FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1699{
1700 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1701 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1702}
1703
1704
1705/** Opcode 0x0f 0x48. */
1706FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1707{
1708 IEMOP_MNEMONIC("cmovs Gv,Ev");
1709 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1710}
1711
1712
1713/** Opcode 0x0f 0x49. */
1714FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1715{
1716 IEMOP_MNEMONIC("cmovns Gv,Ev");
1717 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1718}
1719
1720
1721/** Opcode 0x0f 0x4a. */
1722FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1723{
1724 IEMOP_MNEMONIC("cmovp Gv,Ev");
1725 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1726}
1727
1728
1729/** Opcode 0x0f 0x4b. */
1730FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1731{
1732 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1733 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1734}
1735
1736
1737/** Opcode 0x0f 0x4c. */
1738FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1739{
1740 IEMOP_MNEMONIC("cmovl Gv,Ev");
1741 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1742}
1743
1744
1745/** Opcode 0x0f 0x4d. */
1746FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1747{
1748 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1749 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1750}
1751
1752
1753/** Opcode 0x0f 0x4e. */
1754FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1755{
1756 IEMOP_MNEMONIC("cmovle Gv,Ev");
1757 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1758}
1759
1760
1761/** Opcode 0x0f 0x4f. */
1762FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1763{
1764 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1765 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1766}
1767
1768#undef CMOV_X
1769
1770/** Opcode 0x0f 0x50. */
1771FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1772/** Opcode 0x0f 0x51. */
1773FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1774/** Opcode 0x0f 0x52. */
1775FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1776/** Opcode 0x0f 0x53. */
1777FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1778/** Opcode 0x0f 0x54. */
1779FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1780/** Opcode 0x0f 0x55. */
1781FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1782/** Opcode 0x0f 0x56. */
1783FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1784/** Opcode 0x0f 0x57. */
1785FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1786/** Opcode 0x0f 0x58. */
1787FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd);
1788/** Opcode 0x0f 0x59. */
1789FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);
1790/** Opcode 0x0f 0x5a. */
1791FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1792/** Opcode 0x0f 0x5b. */
1793FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1794/** Opcode 0x0f 0x5c. */
1795FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1796/** Opcode 0x0f 0x5d. */
1797FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1798/** Opcode 0x0f 0x5e. */
1799FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1800/** Opcode 0x0f 0x5f. */
1801FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1802/** Opcode 0x0f 0x60. */
1803FNIEMOP_STUB(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq);
1804/** Opcode 0x0f 0x61. */
1805FNIEMOP_STUB(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq);
1806/** Opcode 0x0f 0x62. */
1807FNIEMOP_STUB(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq);
1808/** Opcode 0x0f 0x63. */
1809FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
1810/** Opcode 0x0f 0x64. */
1811FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
1812/** Opcode 0x0f 0x65. */
1813FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
1814/** Opcode 0x0f 0x66. */
1815FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
1816/** Opcode 0x0f 0x67. */
1817FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
1818/** Opcode 0x0f 0x68. */
1819FNIEMOP_STUB(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq);
1820/** Opcode 0x0f 0x69. */
1821FNIEMOP_STUB(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq);
1822/** Opcode 0x0f 0x6a. */
1823FNIEMOP_STUB(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq);
1824/** Opcode 0x0f 0x6b. */
1825FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
1826/** Opcode 0x0f 0x6c. */
1827FNIEMOP_STUB(iemOp_punpcklqdq_Vdq_Wdq);
1828/** Opcode 0x0f 0x6d. */
1829FNIEMOP_STUB(iemOp_punpckhqdq_Vdq_Wdq);
1830/** Opcode 0x0f 0x6e. */
1831FNIEMOP_STUB(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey);
1832/** Opcode 0x0f 0x6f. */
1833FNIEMOP_STUB(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq);
1834/** Opcode 0x0f 0x70. */
1835FNIEMOP_STUB(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib);
1836
1837/** Opcode 0x0f 0x71 11/2. */
1838FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
1839
1840/** Opcode 0x66 0x0f 0x71 11/2. */
1841FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
1842
1843/** Opcode 0x0f 0x71 11/4. */
1844FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
1845
1846/** Opcode 0x66 0x0f 0x71 11/4. */
1847FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
1848
1849/** Opcode 0x0f 0x71 11/6. */
1850FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
1851
1852/** Opcode 0x66 0x0f 0x71 11/6. */
1853FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
1854
1855
1856/** Opcode 0x0f 0x71. */
1857FNIEMOP_DEF(iemOp_Grp12)
1858{
1859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1860 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1863 {
1864 case 0: case 1: case 3: case 5: case 7:
1865 return IEMOP_RAISE_INVALID_OPCODE();
1866 case 2:
1867 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1868 {
1869 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
1870 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
1871 default: return IEMOP_RAISE_INVALID_OPCODE();
1872 }
1873 case 4:
1874 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1875 {
1876 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
1877 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
1878 default: return IEMOP_RAISE_INVALID_OPCODE();
1879 }
1880 case 6:
1881 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1882 {
1883 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
1884 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
1885 default: return IEMOP_RAISE_INVALID_OPCODE();
1886 }
1887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1888 }
1889}
1890
1891
1892/** Opcode 0x0f 0x72 11/2. */
1893FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
1894
1895/** Opcode 0x66 0x0f 0x72 11/2. */
1896FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
1897
1898/** Opcode 0x0f 0x72 11/4. */
1899FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
1900
1901/** Opcode 0x66 0x0f 0x72 11/4. */
1902FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
1903
1904/** Opcode 0x0f 0x72 11/6. */
1905FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
1906
1907/** Opcode 0x66 0x0f 0x72 11/6. */
1908FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
1909
1910
1911/** Opcode 0x0f 0x72. */
1912FNIEMOP_DEF(iemOp_Grp13)
1913{
1914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1915 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1916 return IEMOP_RAISE_INVALID_OPCODE();
1917 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1918 {
1919 case 0: case 1: case 3: case 5: case 7:
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921 case 2:
1922 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1923 {
1924 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
1925 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
1926 default: return IEMOP_RAISE_INVALID_OPCODE();
1927 }
1928 case 4:
1929 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1930 {
1931 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
1932 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
1933 default: return IEMOP_RAISE_INVALID_OPCODE();
1934 }
1935 case 6:
1936 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1937 {
1938 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
1939 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
1940 default: return IEMOP_RAISE_INVALID_OPCODE();
1941 }
1942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1943 }
1944}
1945
1946
1947/** Opcode 0x0f 0x73 11/2. */
1948FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
1949
1950/** Opcode 0x66 0x0f 0x73 11/2. */
1951FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
1952
1953/** Opcode 0x66 0x0f 0x73 11/3. */
1954FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm);
1955
1956/** Opcode 0x0f 0x73 11/6. */
1957FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
1958
1959/** Opcode 0x66 0x0f 0x73 11/6. */
1960FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
1961
1962/** Opcode 0x66 0x0f 0x73 11/7. */
1963FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm);
1964
1965
1966/** Opcode 0x0f 0x73. */
1967FNIEMOP_DEF(iemOp_Grp14)
1968{
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1971 return IEMOP_RAISE_INVALID_OPCODE();
1972 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1973 {
1974 case 0: case 1: case 4: case 5:
1975 return IEMOP_RAISE_INVALID_OPCODE();
1976 case 2:
1977 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1978 {
1979 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
1980 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
1981 default: return IEMOP_RAISE_INVALID_OPCODE();
1982 }
1983 case 3:
1984 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1985 {
1986 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
1987 default: return IEMOP_RAISE_INVALID_OPCODE();
1988 }
1989 case 6:
1990 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1991 {
1992 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
1993 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
1994 default: return IEMOP_RAISE_INVALID_OPCODE();
1995 }
1996 case 7:
1997 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1998 {
1999 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2000 default: return IEMOP_RAISE_INVALID_OPCODE();
2001 }
2002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2003 }
2004}
2005
2006
2007/** Opcode 0x0f 0x74. */
2008FNIEMOP_STUB(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq);
2009/** Opcode 0x0f 0x75. */
2010FNIEMOP_STUB(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq);
2011/** Opcode 0x0f 0x76. */
2012FNIEMOP_STUB(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq);
2013/** Opcode 0x0f 0x77. */
2014FNIEMOP_STUB(iemOp_emms);
2015/** Opcode 0x0f 0x78. */
2016FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2017/** Opcode 0x0f 0x79. */
2018FNIEMOP_UD_STUB(iemOp_vmwrite);
2019/** Opcode 0x0f 0x7c. */
2020FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2021/** Opcode 0x0f 0x7d. */
2022FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2023/** Opcode 0x0f 0x7e. */
2024FNIEMOP_STUB(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq);
2025/** Opcode 0x0f 0x7f. */
2026FNIEMOP_STUB(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq);
2027
2028
2029/** Opcode 0x0f 0x80. */
2030FNIEMOP_DEF(iemOp_jo_Jv)
2031{
2032 IEMOP_MNEMONIC("jo Jv");
2033 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2034 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2035 {
2036 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2037 IEMOP_HLP_NO_LOCK_PREFIX();
2038
2039 IEM_MC_BEGIN(0, 0);
2040 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2041 IEM_MC_REL_JMP_S16(i16Imm);
2042 } IEM_MC_ELSE() {
2043 IEM_MC_ADVANCE_RIP();
2044 } IEM_MC_ENDIF();
2045 IEM_MC_END();
2046 }
2047 else
2048 {
2049 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2050 IEMOP_HLP_NO_LOCK_PREFIX();
2051
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2054 IEM_MC_REL_JMP_S32(i32Imm);
2055 } IEM_MC_ELSE() {
2056 IEM_MC_ADVANCE_RIP();
2057 } IEM_MC_ENDIF();
2058 IEM_MC_END();
2059 }
2060 return VINF_SUCCESS;
2061}
2062
2063
2064/** Opcode 0x0f 0x81. */
2065FNIEMOP_DEF(iemOp_jno_Jv)
2066{
2067 IEMOP_MNEMONIC("jno Jv");
2068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2069 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2070 {
2071 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2072 IEMOP_HLP_NO_LOCK_PREFIX();
2073
2074 IEM_MC_BEGIN(0, 0);
2075 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2076 IEM_MC_ADVANCE_RIP();
2077 } IEM_MC_ELSE() {
2078 IEM_MC_REL_JMP_S16(i16Imm);
2079 } IEM_MC_ENDIF();
2080 IEM_MC_END();
2081 }
2082 else
2083 {
2084 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2085 IEMOP_HLP_NO_LOCK_PREFIX();
2086
2087 IEM_MC_BEGIN(0, 0);
2088 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2089 IEM_MC_ADVANCE_RIP();
2090 } IEM_MC_ELSE() {
2091 IEM_MC_REL_JMP_S32(i32Imm);
2092 } IEM_MC_ENDIF();
2093 IEM_MC_END();
2094 }
2095 return VINF_SUCCESS;
2096}
2097
2098
2099/** Opcode 0x0f 0x82. */
2100FNIEMOP_DEF(iemOp_jc_Jv)
2101{
2102 IEMOP_MNEMONIC("jc/jb/jnae Jv");
2103 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2104 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2105 {
2106 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2107 IEMOP_HLP_NO_LOCK_PREFIX();
2108
2109 IEM_MC_BEGIN(0, 0);
2110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2111 IEM_MC_REL_JMP_S16(i16Imm);
2112 } IEM_MC_ELSE() {
2113 IEM_MC_ADVANCE_RIP();
2114 } IEM_MC_ENDIF();
2115 IEM_MC_END();
2116 }
2117 else
2118 {
2119 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2120 IEMOP_HLP_NO_LOCK_PREFIX();
2121
2122 IEM_MC_BEGIN(0, 0);
2123 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2124 IEM_MC_REL_JMP_S32(i32Imm);
2125 } IEM_MC_ELSE() {
2126 IEM_MC_ADVANCE_RIP();
2127 } IEM_MC_ENDIF();
2128 IEM_MC_END();
2129 }
2130 return VINF_SUCCESS;
2131}
2132
2133
2134/** Opcode 0x0f 0x83. */
2135FNIEMOP_DEF(iemOp_jnc_Jv)
2136{
2137 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
2138 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2139 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2140 {
2141 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2142 IEMOP_HLP_NO_LOCK_PREFIX();
2143
2144 IEM_MC_BEGIN(0, 0);
2145 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2146 IEM_MC_ADVANCE_RIP();
2147 } IEM_MC_ELSE() {
2148 IEM_MC_REL_JMP_S16(i16Imm);
2149 } IEM_MC_ENDIF();
2150 IEM_MC_END();
2151 }
2152 else
2153 {
2154 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2155 IEMOP_HLP_NO_LOCK_PREFIX();
2156
2157 IEM_MC_BEGIN(0, 0);
2158 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2159 IEM_MC_ADVANCE_RIP();
2160 } IEM_MC_ELSE() {
2161 IEM_MC_REL_JMP_S32(i32Imm);
2162 } IEM_MC_ENDIF();
2163 IEM_MC_END();
2164 }
2165 return VINF_SUCCESS;
2166}
2167
2168
2169/** Opcode 0x0f 0x84. */
2170FNIEMOP_DEF(iemOp_je_Jv)
2171{
2172 IEMOP_MNEMONIC("je/jz Jv");
2173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2174 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2175 {
2176 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2177 IEMOP_HLP_NO_LOCK_PREFIX();
2178
2179 IEM_MC_BEGIN(0, 0);
2180 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2181 IEM_MC_REL_JMP_S16(i16Imm);
2182 } IEM_MC_ELSE() {
2183 IEM_MC_ADVANCE_RIP();
2184 } IEM_MC_ENDIF();
2185 IEM_MC_END();
2186 }
2187 else
2188 {
2189 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2190 IEMOP_HLP_NO_LOCK_PREFIX();
2191
2192 IEM_MC_BEGIN(0, 0);
2193 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2194 IEM_MC_REL_JMP_S32(i32Imm);
2195 } IEM_MC_ELSE() {
2196 IEM_MC_ADVANCE_RIP();
2197 } IEM_MC_ENDIF();
2198 IEM_MC_END();
2199 }
2200 return VINF_SUCCESS;
2201}
2202
2203
2204/** Opcode 0x0f 0x85. */
2205FNIEMOP_DEF(iemOp_jne_Jv)
2206{
2207 IEMOP_MNEMONIC("jne/jnz Jv");
2208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2209 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2210 {
2211 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2212 IEMOP_HLP_NO_LOCK_PREFIX();
2213
2214 IEM_MC_BEGIN(0, 0);
2215 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2216 IEM_MC_ADVANCE_RIP();
2217 } IEM_MC_ELSE() {
2218 IEM_MC_REL_JMP_S16(i16Imm);
2219 } IEM_MC_ENDIF();
2220 IEM_MC_END();
2221 }
2222 else
2223 {
2224 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2225 IEMOP_HLP_NO_LOCK_PREFIX();
2226
2227 IEM_MC_BEGIN(0, 0);
2228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2229 IEM_MC_ADVANCE_RIP();
2230 } IEM_MC_ELSE() {
2231 IEM_MC_REL_JMP_S32(i32Imm);
2232 } IEM_MC_ENDIF();
2233 IEM_MC_END();
2234 }
2235 return VINF_SUCCESS;
2236}
2237
2238
2239/** Opcode 0x0f 0x86. */
2240FNIEMOP_DEF(iemOp_jbe_Jv)
2241{
2242 IEMOP_MNEMONIC("jbe/jna Jv");
2243 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2244 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2245 {
2246 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2247 IEMOP_HLP_NO_LOCK_PREFIX();
2248
2249 IEM_MC_BEGIN(0, 0);
2250 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2251 IEM_MC_REL_JMP_S16(i16Imm);
2252 } IEM_MC_ELSE() {
2253 IEM_MC_ADVANCE_RIP();
2254 } IEM_MC_ENDIF();
2255 IEM_MC_END();
2256 }
2257 else
2258 {
2259 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2260 IEMOP_HLP_NO_LOCK_PREFIX();
2261
2262 IEM_MC_BEGIN(0, 0);
2263 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2264 IEM_MC_REL_JMP_S32(i32Imm);
2265 } IEM_MC_ELSE() {
2266 IEM_MC_ADVANCE_RIP();
2267 } IEM_MC_ENDIF();
2268 IEM_MC_END();
2269 }
2270 return VINF_SUCCESS;
2271}
2272
2273
2274/** Opcode 0x0f 0x87. */
2275FNIEMOP_DEF(iemOp_jnbe_Jv)
2276{
2277 IEMOP_MNEMONIC("jnbe/ja Jv");
2278 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2279 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2280 {
2281 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2282 IEMOP_HLP_NO_LOCK_PREFIX();
2283
2284 IEM_MC_BEGIN(0, 0);
2285 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2286 IEM_MC_ADVANCE_RIP();
2287 } IEM_MC_ELSE() {
2288 IEM_MC_REL_JMP_S16(i16Imm);
2289 } IEM_MC_ENDIF();
2290 IEM_MC_END();
2291 }
2292 else
2293 {
2294 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2295 IEMOP_HLP_NO_LOCK_PREFIX();
2296
2297 IEM_MC_BEGIN(0, 0);
2298 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2299 IEM_MC_ADVANCE_RIP();
2300 } IEM_MC_ELSE() {
2301 IEM_MC_REL_JMP_S32(i32Imm);
2302 } IEM_MC_ENDIF();
2303 IEM_MC_END();
2304 }
2305 return VINF_SUCCESS;
2306}
2307
2308
2309/** Opcode 0x0f 0x88. */
2310FNIEMOP_DEF(iemOp_js_Jv)
2311{
2312 IEMOP_MNEMONIC("js Jv");
2313 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2314 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2315 {
2316 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2317 IEMOP_HLP_NO_LOCK_PREFIX();
2318
2319 IEM_MC_BEGIN(0, 0);
2320 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2321 IEM_MC_REL_JMP_S16(i16Imm);
2322 } IEM_MC_ELSE() {
2323 IEM_MC_ADVANCE_RIP();
2324 } IEM_MC_ENDIF();
2325 IEM_MC_END();
2326 }
2327 else
2328 {
2329 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2330 IEMOP_HLP_NO_LOCK_PREFIX();
2331
2332 IEM_MC_BEGIN(0, 0);
2333 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2334 IEM_MC_REL_JMP_S32(i32Imm);
2335 } IEM_MC_ELSE() {
2336 IEM_MC_ADVANCE_RIP();
2337 } IEM_MC_ENDIF();
2338 IEM_MC_END();
2339 }
2340 return VINF_SUCCESS;
2341}
2342
2343
2344/** Opcode 0x0f 0x89. */
2345FNIEMOP_DEF(iemOp_jns_Jv)
2346{
2347 IEMOP_MNEMONIC("jns Jv");
2348 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2349 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2350 {
2351 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2352 IEMOP_HLP_NO_LOCK_PREFIX();
2353
2354 IEM_MC_BEGIN(0, 0);
2355 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2356 IEM_MC_ADVANCE_RIP();
2357 } IEM_MC_ELSE() {
2358 IEM_MC_REL_JMP_S16(i16Imm);
2359 } IEM_MC_ENDIF();
2360 IEM_MC_END();
2361 }
2362 else
2363 {
2364 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2365 IEMOP_HLP_NO_LOCK_PREFIX();
2366
2367 IEM_MC_BEGIN(0, 0);
2368 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2369 IEM_MC_ADVANCE_RIP();
2370 } IEM_MC_ELSE() {
2371 IEM_MC_REL_JMP_S32(i32Imm);
2372 } IEM_MC_ENDIF();
2373 IEM_MC_END();
2374 }
2375 return VINF_SUCCESS;
2376}
2377
2378
2379/** Opcode 0x0f 0x8a. */
2380FNIEMOP_DEF(iemOp_jp_Jv)
2381{
2382 IEMOP_MNEMONIC("jp Jv");
2383 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2384 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2385 {
2386 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2387 IEMOP_HLP_NO_LOCK_PREFIX();
2388
2389 IEM_MC_BEGIN(0, 0);
2390 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2391 IEM_MC_REL_JMP_S16(i16Imm);
2392 } IEM_MC_ELSE() {
2393 IEM_MC_ADVANCE_RIP();
2394 } IEM_MC_ENDIF();
2395 IEM_MC_END();
2396 }
2397 else
2398 {
2399 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2400 IEMOP_HLP_NO_LOCK_PREFIX();
2401
2402 IEM_MC_BEGIN(0, 0);
2403 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2404 IEM_MC_REL_JMP_S32(i32Imm);
2405 } IEM_MC_ELSE() {
2406 IEM_MC_ADVANCE_RIP();
2407 } IEM_MC_ENDIF();
2408 IEM_MC_END();
2409 }
2410 return VINF_SUCCESS;
2411}
2412
2413
2414/** Opcode 0x0f 0x8b. */
2415FNIEMOP_DEF(iemOp_jnp_Jv)
2416{
2417 IEMOP_MNEMONIC("jo Jv");
2418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2419 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2420 {
2421 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2422 IEMOP_HLP_NO_LOCK_PREFIX();
2423
2424 IEM_MC_BEGIN(0, 0);
2425 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2426 IEM_MC_ADVANCE_RIP();
2427 } IEM_MC_ELSE() {
2428 IEM_MC_REL_JMP_S16(i16Imm);
2429 } IEM_MC_ENDIF();
2430 IEM_MC_END();
2431 }
2432 else
2433 {
2434 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2435 IEMOP_HLP_NO_LOCK_PREFIX();
2436
2437 IEM_MC_BEGIN(0, 0);
2438 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2439 IEM_MC_ADVANCE_RIP();
2440 } IEM_MC_ELSE() {
2441 IEM_MC_REL_JMP_S32(i32Imm);
2442 } IEM_MC_ENDIF();
2443 IEM_MC_END();
2444 }
2445 return VINF_SUCCESS;
2446}
2447
2448
2449/** Opcode 0x0f 0x8c. */
2450FNIEMOP_DEF(iemOp_jl_Jv)
2451{
2452 IEMOP_MNEMONIC("jl/jnge Jv");
2453 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2454 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2455 {
2456 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2457 IEMOP_HLP_NO_LOCK_PREFIX();
2458
2459 IEM_MC_BEGIN(0, 0);
2460 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2461 IEM_MC_REL_JMP_S16(i16Imm);
2462 } IEM_MC_ELSE() {
2463 IEM_MC_ADVANCE_RIP();
2464 } IEM_MC_ENDIF();
2465 IEM_MC_END();
2466 }
2467 else
2468 {
2469 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2470 IEMOP_HLP_NO_LOCK_PREFIX();
2471
2472 IEM_MC_BEGIN(0, 0);
2473 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2474 IEM_MC_REL_JMP_S32(i32Imm);
2475 } IEM_MC_ELSE() {
2476 IEM_MC_ADVANCE_RIP();
2477 } IEM_MC_ENDIF();
2478 IEM_MC_END();
2479 }
2480 return VINF_SUCCESS;
2481}
2482
2483
2484/** Opcode 0x0f 0x8d. */
2485FNIEMOP_DEF(iemOp_jnl_Jv)
2486{
2487 IEMOP_MNEMONIC("jnl/jge Jv");
2488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2489 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2490 {
2491 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2492 IEMOP_HLP_NO_LOCK_PREFIX();
2493
2494 IEM_MC_BEGIN(0, 0);
2495 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2496 IEM_MC_ADVANCE_RIP();
2497 } IEM_MC_ELSE() {
2498 IEM_MC_REL_JMP_S16(i16Imm);
2499 } IEM_MC_ENDIF();
2500 IEM_MC_END();
2501 }
2502 else
2503 {
2504 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2505 IEMOP_HLP_NO_LOCK_PREFIX();
2506
2507 IEM_MC_BEGIN(0, 0);
2508 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2509 IEM_MC_ADVANCE_RIP();
2510 } IEM_MC_ELSE() {
2511 IEM_MC_REL_JMP_S32(i32Imm);
2512 } IEM_MC_ENDIF();
2513 IEM_MC_END();
2514 }
2515 return VINF_SUCCESS;
2516}
2517
2518
2519/** Opcode 0x0f 0x8e. */
2520FNIEMOP_DEF(iemOp_jle_Jv)
2521{
2522 IEMOP_MNEMONIC("jle/jng Jv");
2523 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2524 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2525 {
2526 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2527 IEMOP_HLP_NO_LOCK_PREFIX();
2528
2529 IEM_MC_BEGIN(0, 0);
2530 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2531 IEM_MC_REL_JMP_S16(i16Imm);
2532 } IEM_MC_ELSE() {
2533 IEM_MC_ADVANCE_RIP();
2534 } IEM_MC_ENDIF();
2535 IEM_MC_END();
2536 }
2537 else
2538 {
2539 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2540 IEMOP_HLP_NO_LOCK_PREFIX();
2541
2542 IEM_MC_BEGIN(0, 0);
2543 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2544 IEM_MC_REL_JMP_S32(i32Imm);
2545 } IEM_MC_ELSE() {
2546 IEM_MC_ADVANCE_RIP();
2547 } IEM_MC_ENDIF();
2548 IEM_MC_END();
2549 }
2550 return VINF_SUCCESS;
2551}
2552
2553
2554/** Opcode 0x0f 0x8f. */
2555FNIEMOP_DEF(iemOp_jnle_Jv)
2556{
2557 IEMOP_MNEMONIC("jnle/jg Jv");
2558 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2559 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2560 {
2561 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2562 IEMOP_HLP_NO_LOCK_PREFIX();
2563
2564 IEM_MC_BEGIN(0, 0);
2565 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2566 IEM_MC_ADVANCE_RIP();
2567 } IEM_MC_ELSE() {
2568 IEM_MC_REL_JMP_S16(i16Imm);
2569 } IEM_MC_ENDIF();
2570 IEM_MC_END();
2571 }
2572 else
2573 {
2574 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2575 IEMOP_HLP_NO_LOCK_PREFIX();
2576
2577 IEM_MC_BEGIN(0, 0);
2578 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2579 IEM_MC_ADVANCE_RIP();
2580 } IEM_MC_ELSE() {
2581 IEM_MC_REL_JMP_S32(i32Imm);
2582 } IEM_MC_ENDIF();
2583 IEM_MC_END();
2584 }
2585 return VINF_SUCCESS;
2586}
2587
2588
2589/** Opcode 0x0f 0x90. */
2590FNIEMOP_DEF(iemOp_seto_Eb)
2591{
2592 IEMOP_MNEMONIC("seto Eb");
2593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2594 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2595
2596 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2597 * any way. AMD says it's "unused", whatever that means. We're
2598 * ignoring for now. */
2599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2600 {
2601 /* register target */
2602 IEM_MC_BEGIN(0, 0);
2603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2604 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2605 } IEM_MC_ELSE() {
2606 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2607 } IEM_MC_ENDIF();
2608 IEM_MC_ADVANCE_RIP();
2609 IEM_MC_END();
2610 }
2611 else
2612 {
2613 /* memory target */
2614 IEM_MC_BEGIN(0, 1);
2615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2617 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2618 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2619 } IEM_MC_ELSE() {
2620 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2621 } IEM_MC_ENDIF();
2622 IEM_MC_ADVANCE_RIP();
2623 IEM_MC_END();
2624 }
2625 return VINF_SUCCESS;
2626}
2627
2628
2629/** Opcode 0x0f 0x91. */
2630FNIEMOP_DEF(iemOp_setno_Eb)
2631{
2632 IEMOP_MNEMONIC("setno Eb");
2633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2634 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2635
2636 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2637 * any way. AMD says it's "unused", whatever that means. We're
2638 * ignoring for now. */
2639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2640 {
2641 /* register target */
2642 IEM_MC_BEGIN(0, 0);
2643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2644 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2645 } IEM_MC_ELSE() {
2646 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2647 } IEM_MC_ENDIF();
2648 IEM_MC_ADVANCE_RIP();
2649 IEM_MC_END();
2650 }
2651 else
2652 {
2653 /* memory target */
2654 IEM_MC_BEGIN(0, 1);
2655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2658 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2659 } IEM_MC_ELSE() {
2660 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2661 } IEM_MC_ENDIF();
2662 IEM_MC_ADVANCE_RIP();
2663 IEM_MC_END();
2664 }
2665 return VINF_SUCCESS;
2666}
2667
2668
2669/** Opcode 0x0f 0x92. */
2670FNIEMOP_DEF(iemOp_setc_Eb)
2671{
2672 IEMOP_MNEMONIC("setc Eb");
2673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2674 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2675
2676 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2677 * any way. AMD says it's "unused", whatever that means. We're
2678 * ignoring for now. */
2679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2680 {
2681 /* register target */
2682 IEM_MC_BEGIN(0, 0);
2683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2684 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2685 } IEM_MC_ELSE() {
2686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2687 } IEM_MC_ENDIF();
2688 IEM_MC_ADVANCE_RIP();
2689 IEM_MC_END();
2690 }
2691 else
2692 {
2693 /* memory target */
2694 IEM_MC_BEGIN(0, 1);
2695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2698 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2699 } IEM_MC_ELSE() {
2700 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2701 } IEM_MC_ENDIF();
2702 IEM_MC_ADVANCE_RIP();
2703 IEM_MC_END();
2704 }
2705 return VINF_SUCCESS;
2706}
2707
2708
2709/** Opcode 0x0f 0x93. */
2710FNIEMOP_DEF(iemOp_setnc_Eb)
2711{
2712 IEMOP_MNEMONIC("setnc Eb");
2713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2714 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2715
2716 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2717 * any way. AMD says it's "unused", whatever that means. We're
2718 * ignoring for now. */
2719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2720 {
2721 /* register target */
2722 IEM_MC_BEGIN(0, 0);
2723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2724 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2725 } IEM_MC_ELSE() {
2726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2727 } IEM_MC_ENDIF();
2728 IEM_MC_ADVANCE_RIP();
2729 IEM_MC_END();
2730 }
2731 else
2732 {
2733 /* memory target */
2734 IEM_MC_BEGIN(0, 1);
2735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2737 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2738 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2739 } IEM_MC_ELSE() {
2740 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2741 } IEM_MC_ENDIF();
2742 IEM_MC_ADVANCE_RIP();
2743 IEM_MC_END();
2744 }
2745 return VINF_SUCCESS;
2746}
2747
2748
2749/** Opcode 0x0f 0x94. */
2750FNIEMOP_DEF(iemOp_sete_Eb)
2751{
2752 IEMOP_MNEMONIC("sete Eb");
2753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2754 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2755
2756 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2757 * any way. AMD says it's "unused", whatever that means. We're
2758 * ignoring for now. */
2759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2760 {
2761 /* register target */
2762 IEM_MC_BEGIN(0, 0);
2763 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2764 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2765 } IEM_MC_ELSE() {
2766 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2767 } IEM_MC_ENDIF();
2768 IEM_MC_ADVANCE_RIP();
2769 IEM_MC_END();
2770 }
2771 else
2772 {
2773 /* memory target */
2774 IEM_MC_BEGIN(0, 1);
2775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2778 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2779 } IEM_MC_ELSE() {
2780 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2781 } IEM_MC_ENDIF();
2782 IEM_MC_ADVANCE_RIP();
2783 IEM_MC_END();
2784 }
2785 return VINF_SUCCESS;
2786}
2787
2788
2789/** Opcode 0x0f 0x95. */
2790FNIEMOP_DEF(iemOp_setne_Eb)
2791{
2792 IEMOP_MNEMONIC("setne Eb");
2793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2794 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2795
2796 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2797 * any way. AMD says it's "unused", whatever that means. We're
2798 * ignoring for now. */
2799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2800 {
2801 /* register target */
2802 IEM_MC_BEGIN(0, 0);
2803 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2804 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2805 } IEM_MC_ELSE() {
2806 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2807 } IEM_MC_ENDIF();
2808 IEM_MC_ADVANCE_RIP();
2809 IEM_MC_END();
2810 }
2811 else
2812 {
2813 /* memory target */
2814 IEM_MC_BEGIN(0, 1);
2815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2818 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2819 } IEM_MC_ELSE() {
2820 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2821 } IEM_MC_ENDIF();
2822 IEM_MC_ADVANCE_RIP();
2823 IEM_MC_END();
2824 }
2825 return VINF_SUCCESS;
2826}
2827
2828
2829/** Opcode 0x0f 0x96. */
2830FNIEMOP_DEF(iemOp_setbe_Eb)
2831{
2832 IEMOP_MNEMONIC("setbe Eb");
2833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2834 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2835
2836 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2837 * any way. AMD says it's "unused", whatever that means. We're
2838 * ignoring for now. */
2839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2840 {
2841 /* register target */
2842 IEM_MC_BEGIN(0, 0);
2843 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2844 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2845 } IEM_MC_ELSE() {
2846 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2847 } IEM_MC_ENDIF();
2848 IEM_MC_ADVANCE_RIP();
2849 IEM_MC_END();
2850 }
2851 else
2852 {
2853 /* memory target */
2854 IEM_MC_BEGIN(0, 1);
2855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2857 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2858 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2859 } IEM_MC_ELSE() {
2860 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2861 } IEM_MC_ENDIF();
2862 IEM_MC_ADVANCE_RIP();
2863 IEM_MC_END();
2864 }
2865 return VINF_SUCCESS;
2866}
2867
2868
2869/** Opcode 0x0f 0x97. */
2870FNIEMOP_DEF(iemOp_setnbe_Eb)
2871{
2872 IEMOP_MNEMONIC("setnbe Eb");
2873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2874 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2875
2876 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2877 * any way. AMD says it's "unused", whatever that means. We're
2878 * ignoring for now. */
2879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2880 {
2881 /* register target */
2882 IEM_MC_BEGIN(0, 0);
2883 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2884 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2885 } IEM_MC_ELSE() {
2886 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2887 } IEM_MC_ENDIF();
2888 IEM_MC_ADVANCE_RIP();
2889 IEM_MC_END();
2890 }
2891 else
2892 {
2893 /* memory target */
2894 IEM_MC_BEGIN(0, 1);
2895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2897 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2898 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2899 } IEM_MC_ELSE() {
2900 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2901 } IEM_MC_ENDIF();
2902 IEM_MC_ADVANCE_RIP();
2903 IEM_MC_END();
2904 }
2905 return VINF_SUCCESS;
2906}
2907
2908
2909/** Opcode 0x0f 0x98. */
2910FNIEMOP_DEF(iemOp_sets_Eb)
2911{
2912 IEMOP_MNEMONIC("sets Eb");
2913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2914 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2915
2916 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2917 * any way. AMD says it's "unused", whatever that means. We're
2918 * ignoring for now. */
2919 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2920 {
2921 /* register target */
2922 IEM_MC_BEGIN(0, 0);
2923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2924 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2925 } IEM_MC_ELSE() {
2926 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2927 } IEM_MC_ENDIF();
2928 IEM_MC_ADVANCE_RIP();
2929 IEM_MC_END();
2930 }
2931 else
2932 {
2933 /* memory target */
2934 IEM_MC_BEGIN(0, 1);
2935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2937 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2938 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2939 } IEM_MC_ELSE() {
2940 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2941 } IEM_MC_ENDIF();
2942 IEM_MC_ADVANCE_RIP();
2943 IEM_MC_END();
2944 }
2945 return VINF_SUCCESS;
2946}
2947
2948
2949/** Opcode 0x0f 0x99. */
2950FNIEMOP_DEF(iemOp_setns_Eb)
2951{
2952 IEMOP_MNEMONIC("setns Eb");
2953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2954 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2955
2956 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2957 * any way. AMD says it's "unused", whatever that means. We're
2958 * ignoring for now. */
2959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2960 {
2961 /* register target */
2962 IEM_MC_BEGIN(0, 0);
2963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2964 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2965 } IEM_MC_ELSE() {
2966 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2967 } IEM_MC_ENDIF();
2968 IEM_MC_ADVANCE_RIP();
2969 IEM_MC_END();
2970 }
2971 else
2972 {
2973 /* memory target */
2974 IEM_MC_BEGIN(0, 1);
2975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2978 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2979 } IEM_MC_ELSE() {
2980 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2981 } IEM_MC_ENDIF();
2982 IEM_MC_ADVANCE_RIP();
2983 IEM_MC_END();
2984 }
2985 return VINF_SUCCESS;
2986}
2987
2988
2989/** Opcode 0x0f 0x9a. */
2990FNIEMOP_DEF(iemOp_setp_Eb)
2991{
2992 IEMOP_MNEMONIC("setnp Eb");
2993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2994 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2995
2996 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2997 * any way. AMD says it's "unused", whatever that means. We're
2998 * ignoring for now. */
2999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3000 {
3001 /* register target */
3002 IEM_MC_BEGIN(0, 0);
3003 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3004 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3005 } IEM_MC_ELSE() {
3006 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3007 } IEM_MC_ENDIF();
3008 IEM_MC_ADVANCE_RIP();
3009 IEM_MC_END();
3010 }
3011 else
3012 {
3013 /* memory target */
3014 IEM_MC_BEGIN(0, 1);
3015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3017 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3018 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3019 } IEM_MC_ELSE() {
3020 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3021 } IEM_MC_ENDIF();
3022 IEM_MC_ADVANCE_RIP();
3023 IEM_MC_END();
3024 }
3025 return VINF_SUCCESS;
3026}
3027
3028
3029/** Opcode 0x0f 0x9b. */
3030FNIEMOP_DEF(iemOp_setnp_Eb)
3031{
3032 IEMOP_MNEMONIC("setnp Eb");
3033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3034 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3035
3036 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3037 * any way. AMD says it's "unused", whatever that means. We're
3038 * ignoring for now. */
3039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3040 {
3041 /* register target */
3042 IEM_MC_BEGIN(0, 0);
3043 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3044 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3045 } IEM_MC_ELSE() {
3046 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3047 } IEM_MC_ENDIF();
3048 IEM_MC_ADVANCE_RIP();
3049 IEM_MC_END();
3050 }
3051 else
3052 {
3053 /* memory target */
3054 IEM_MC_BEGIN(0, 1);
3055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3057 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3058 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3059 } IEM_MC_ELSE() {
3060 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3061 } IEM_MC_ENDIF();
3062 IEM_MC_ADVANCE_RIP();
3063 IEM_MC_END();
3064 }
3065 return VINF_SUCCESS;
3066}
3067
3068
3069/** Opcode 0x0f 0x9c. */
3070FNIEMOP_DEF(iemOp_setl_Eb)
3071{
3072 IEMOP_MNEMONIC("setl Eb");
3073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3074 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3075
3076 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3077 * any way. AMD says it's "unused", whatever that means. We're
3078 * ignoring for now. */
3079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3080 {
3081 /* register target */
3082 IEM_MC_BEGIN(0, 0);
3083 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3084 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3085 } IEM_MC_ELSE() {
3086 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3087 } IEM_MC_ENDIF();
3088 IEM_MC_ADVANCE_RIP();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /* memory target */
3094 IEM_MC_BEGIN(0, 1);
3095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3097 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3098 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3099 } IEM_MC_ELSE() {
3100 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3101 } IEM_MC_ENDIF();
3102 IEM_MC_ADVANCE_RIP();
3103 IEM_MC_END();
3104 }
3105 return VINF_SUCCESS;
3106}
3107
3108
3109/** Opcode 0x0f 0x9d. */
3110FNIEMOP_DEF(iemOp_setnl_Eb)
3111{
3112 IEMOP_MNEMONIC("setnl Eb");
3113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3114 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3115
3116 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3117 * any way. AMD says it's "unused", whatever that means. We're
3118 * ignoring for now. */
3119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3120 {
3121 /* register target */
3122 IEM_MC_BEGIN(0, 0);
3123 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3124 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3125 } IEM_MC_ELSE() {
3126 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3127 } IEM_MC_ENDIF();
3128 IEM_MC_ADVANCE_RIP();
3129 IEM_MC_END();
3130 }
3131 else
3132 {
3133 /* memory target */
3134 IEM_MC_BEGIN(0, 1);
3135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3137 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3138 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3139 } IEM_MC_ELSE() {
3140 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3141 } IEM_MC_ENDIF();
3142 IEM_MC_ADVANCE_RIP();
3143 IEM_MC_END();
3144 }
3145 return VINF_SUCCESS;
3146}
3147
3148
3149/** Opcode 0x0f 0x9e. */
3150FNIEMOP_DEF(iemOp_setle_Eb)
3151{
3152 IEMOP_MNEMONIC("setle Eb");
3153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3154 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3155
3156 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3157 * any way. AMD says it's "unused", whatever that means. We're
3158 * ignoring for now. */
3159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3160 {
3161 /* register target */
3162 IEM_MC_BEGIN(0, 0);
3163 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3164 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3165 } IEM_MC_ELSE() {
3166 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3167 } IEM_MC_ENDIF();
3168 IEM_MC_ADVANCE_RIP();
3169 IEM_MC_END();
3170 }
3171 else
3172 {
3173 /* memory target */
3174 IEM_MC_BEGIN(0, 1);
3175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3177 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3178 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3179 } IEM_MC_ELSE() {
3180 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3181 } IEM_MC_ENDIF();
3182 IEM_MC_ADVANCE_RIP();
3183 IEM_MC_END();
3184 }
3185 return VINF_SUCCESS;
3186}
3187
3188
3189/** Opcode 0x0f 0x9f. */
3190FNIEMOP_DEF(iemOp_setnle_Eb)
3191{
3192 IEMOP_MNEMONIC("setnle Eb");
3193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3194 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3195
3196 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3197 * any way. AMD says it's "unused", whatever that means. We're
3198 * ignoring for now. */
3199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3200 {
3201 /* register target */
3202 IEM_MC_BEGIN(0, 0);
3203 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3204 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3205 } IEM_MC_ELSE() {
3206 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3207 } IEM_MC_ENDIF();
3208 IEM_MC_ADVANCE_RIP();
3209 IEM_MC_END();
3210 }
3211 else
3212 {
3213 /* memory target */
3214 IEM_MC_BEGIN(0, 1);
3215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3217 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3218 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3219 } IEM_MC_ELSE() {
3220 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3221 } IEM_MC_ENDIF();
3222 IEM_MC_ADVANCE_RIP();
3223 IEM_MC_END();
3224 }
3225 return VINF_SUCCESS;
3226}
3227
3228
3229/**
3230 * Common 'push segment-register' helper.
3231 */
3232FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
3233{
3234 IEMOP_HLP_NO_LOCK_PREFIX();
3235 if (iReg < X86_SREG_FS)
3236 IEMOP_HLP_NO_64BIT();
3237 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3238
3239 switch (pIemCpu->enmEffOpSize)
3240 {
3241 case IEMMODE_16BIT:
3242 IEM_MC_BEGIN(0, 1);
3243 IEM_MC_LOCAL(uint16_t, u16Value);
3244 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
3245 IEM_MC_PUSH_U16(u16Value);
3246 IEM_MC_ADVANCE_RIP();
3247 IEM_MC_END();
3248 break;
3249
3250 case IEMMODE_32BIT:
3251 IEM_MC_BEGIN(0, 1);
3252 IEM_MC_LOCAL(uint32_t, u32Value);
3253 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
3254 IEM_MC_PUSH_U32(u32Value);
3255 IEM_MC_ADVANCE_RIP();
3256 IEM_MC_END();
3257 break;
3258
3259 case IEMMODE_64BIT:
3260 IEM_MC_BEGIN(0, 1);
3261 IEM_MC_LOCAL(uint64_t, u64Value);
3262 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
3263 IEM_MC_PUSH_U64(u64Value);
3264 IEM_MC_ADVANCE_RIP();
3265 IEM_MC_END();
3266 break;
3267 }
3268
3269 return VINF_SUCCESS;
3270}
3271
3272
3273/** Opcode 0x0f 0xa0. */
3274FNIEMOP_DEF(iemOp_push_fs)
3275{
3276 IEMOP_MNEMONIC("push fs");
3277 IEMOP_HLP_NO_LOCK_PREFIX();
3278 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
3279}
3280
3281
3282/** Opcode 0x0f 0xa1. */
3283FNIEMOP_DEF(iemOp_pop_fs)
3284{
3285 IEMOP_MNEMONIC("pop fs");
3286 IEMOP_HLP_NO_LOCK_PREFIX();
3287 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
3288}
3289
3290
3291/** Opcode 0x0f 0xa2. */
3292FNIEMOP_DEF(iemOp_cpuid)
3293{
3294 IEMOP_MNEMONIC("cpuid");
3295 IEMOP_HLP_NO_LOCK_PREFIX();
3296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
3297}
3298
3299
3300/**
3301 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
3302 * iemOp_bts_Ev_Gv.
3303 */
3304FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
3305{
3306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3308
3309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3310 {
3311 /* register destination. */
3312 IEMOP_HLP_NO_LOCK_PREFIX();
3313 switch (pIemCpu->enmEffOpSize)
3314 {
3315 case IEMMODE_16BIT:
3316 IEM_MC_BEGIN(3, 0);
3317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3318 IEM_MC_ARG(uint16_t, u16Src, 1);
3319 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3320
3321 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3322 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
3323 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3324 IEM_MC_REF_EFLAGS(pEFlags);
3325 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3326
3327 IEM_MC_ADVANCE_RIP();
3328 IEM_MC_END();
3329 return VINF_SUCCESS;
3330
3331 case IEMMODE_32BIT:
3332 IEM_MC_BEGIN(3, 0);
3333 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3334 IEM_MC_ARG(uint32_t, u32Src, 1);
3335 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3336
3337 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3338 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
3339 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3340 IEM_MC_REF_EFLAGS(pEFlags);
3341 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3342
3343 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3344 IEM_MC_ADVANCE_RIP();
3345 IEM_MC_END();
3346 return VINF_SUCCESS;
3347
3348 case IEMMODE_64BIT:
3349 IEM_MC_BEGIN(3, 0);
3350 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3351 IEM_MC_ARG(uint64_t, u64Src, 1);
3352 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3353
3354 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3355 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
3356 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3357 IEM_MC_REF_EFLAGS(pEFlags);
3358 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3359
3360 IEM_MC_ADVANCE_RIP();
3361 IEM_MC_END();
3362 return VINF_SUCCESS;
3363
3364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3365 }
3366 }
3367 else
3368 {
3369 /* memory destination. */
3370
3371 uint32_t fAccess;
3372 if (pImpl->pfnLockedU16)
3373 fAccess = IEM_ACCESS_DATA_RW;
3374 else /* BT */
3375 {
3376 IEMOP_HLP_NO_LOCK_PREFIX();
3377 fAccess = IEM_ACCESS_DATA_R;
3378 }
3379
3380 /** @todo test negative bit offsets! */
3381 switch (pIemCpu->enmEffOpSize)
3382 {
3383 case IEMMODE_16BIT:
3384 IEM_MC_BEGIN(3, 2);
3385 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3386 IEM_MC_ARG(uint16_t, u16Src, 1);
3387 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3389 IEM_MC_LOCAL(int16_t, i16AddrAdj);
3390
3391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3392 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3393 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
3394 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
3395 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
3396 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
3397 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
3398 IEM_MC_FETCH_EFLAGS(EFlags);
3399
3400 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3401 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3403 else
3404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3406
3407 IEM_MC_COMMIT_EFLAGS(EFlags);
3408 IEM_MC_ADVANCE_RIP();
3409 IEM_MC_END();
3410 return VINF_SUCCESS;
3411
3412 case IEMMODE_32BIT:
3413 IEM_MC_BEGIN(3, 2);
3414 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3415 IEM_MC_ARG(uint32_t, u32Src, 1);
3416 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3418 IEM_MC_LOCAL(int32_t, i32AddrAdj);
3419
3420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3421 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3422 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
3423 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
3424 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
3425 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
3426 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
3427 IEM_MC_FETCH_EFLAGS(EFlags);
3428
3429 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3430 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3432 else
3433 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3434 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3435
3436 IEM_MC_COMMIT_EFLAGS(EFlags);
3437 IEM_MC_ADVANCE_RIP();
3438 IEM_MC_END();
3439 return VINF_SUCCESS;
3440
3441 case IEMMODE_64BIT:
3442 IEM_MC_BEGIN(3, 2);
3443 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3444 IEM_MC_ARG(uint64_t, u64Src, 1);
3445 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3447 IEM_MC_LOCAL(int64_t, i64AddrAdj);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3450 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3451 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
3452 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
3453 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
3454 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
3455 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
3456 IEM_MC_FETCH_EFLAGS(EFlags);
3457
3458 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3459 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3461 else
3462 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3463 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3464
3465 IEM_MC_COMMIT_EFLAGS(EFlags);
3466 IEM_MC_ADVANCE_RIP();
3467 IEM_MC_END();
3468 return VINF_SUCCESS;
3469
3470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3471 }
3472 }
3473}
3474
3475
3476/** Opcode 0x0f 0xa3. */
3477FNIEMOP_DEF(iemOp_bt_Ev_Gv)
3478{
3479 IEMOP_MNEMONIC("bt Gv,Gv");
3480 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
3481}
3482
3483
3484/**
3485 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
3486 */
3487FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
3488{
3489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3490 IEMOP_HLP_NO_LOCK_PREFIX();
3491 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3492
3493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3494 {
3495 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3496 IEMOP_HLP_NO_LOCK_PREFIX();
3497
3498 switch (pIemCpu->enmEffOpSize)
3499 {
3500 case IEMMODE_16BIT:
3501 IEM_MC_BEGIN(4, 0);
3502 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3503 IEM_MC_ARG(uint16_t, u16Src, 1);
3504 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3505 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3506
3507 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3508 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3509 IEM_MC_REF_EFLAGS(pEFlags);
3510 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3511
3512 IEM_MC_ADVANCE_RIP();
3513 IEM_MC_END();
3514 return VINF_SUCCESS;
3515
3516 case IEMMODE_32BIT:
3517 IEM_MC_BEGIN(4, 0);
3518 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3519 IEM_MC_ARG(uint32_t, u32Src, 1);
3520 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3521 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3522
3523 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3524 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3525 IEM_MC_REF_EFLAGS(pEFlags);
3526 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3527
3528 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3529 IEM_MC_ADVANCE_RIP();
3530 IEM_MC_END();
3531 return VINF_SUCCESS;
3532
3533 case IEMMODE_64BIT:
3534 IEM_MC_BEGIN(4, 0);
3535 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3536 IEM_MC_ARG(uint64_t, u64Src, 1);
3537 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3538 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3539
3540 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3541 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3542 IEM_MC_REF_EFLAGS(pEFlags);
3543 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3544
3545 IEM_MC_ADVANCE_RIP();
3546 IEM_MC_END();
3547 return VINF_SUCCESS;
3548
3549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3550 }
3551 }
3552 else
3553 {
3554 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3555
3556 switch (pIemCpu->enmEffOpSize)
3557 {
3558 case IEMMODE_16BIT:
3559 IEM_MC_BEGIN(4, 2);
3560 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3561 IEM_MC_ARG(uint16_t, u16Src, 1);
3562 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3563 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3565
3566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3567 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3568 IEM_MC_ASSIGN(cShiftArg, cShift);
3569 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3570 IEM_MC_FETCH_EFLAGS(EFlags);
3571 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3572 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3573
3574 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3575 IEM_MC_COMMIT_EFLAGS(EFlags);
3576 IEM_MC_ADVANCE_RIP();
3577 IEM_MC_END();
3578 return VINF_SUCCESS;
3579
3580 case IEMMODE_32BIT:
3581 IEM_MC_BEGIN(4, 2);
3582 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3583 IEM_MC_ARG(uint32_t, u32Src, 1);
3584 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3585 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3587
3588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3589 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3590 IEM_MC_ASSIGN(cShiftArg, cShift);
3591 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3592 IEM_MC_FETCH_EFLAGS(EFlags);
3593 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3594 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3595
3596 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3597 IEM_MC_COMMIT_EFLAGS(EFlags);
3598 IEM_MC_ADVANCE_RIP();
3599 IEM_MC_END();
3600 return VINF_SUCCESS;
3601
3602 case IEMMODE_64BIT:
3603 IEM_MC_BEGIN(4, 2);
3604 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3605 IEM_MC_ARG(uint64_t, u64Src, 1);
3606 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3607 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3609
3610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3611 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3612 IEM_MC_ASSIGN(cShiftArg, cShift);
3613 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3614 IEM_MC_FETCH_EFLAGS(EFlags);
3615 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3616 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3617
3618 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3619 IEM_MC_COMMIT_EFLAGS(EFlags);
3620 IEM_MC_ADVANCE_RIP();
3621 IEM_MC_END();
3622 return VINF_SUCCESS;
3623
3624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3625 }
3626 }
3627}
3628
3629
3630/**
3631 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
3632 */
3633FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
3634{
3635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3636 IEMOP_HLP_NO_LOCK_PREFIX();
3637 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3638
3639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3640 {
3641 IEMOP_HLP_NO_LOCK_PREFIX();
3642
3643 switch (pIemCpu->enmEffOpSize)
3644 {
3645 case IEMMODE_16BIT:
3646 IEM_MC_BEGIN(4, 0);
3647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3648 IEM_MC_ARG(uint16_t, u16Src, 1);
3649 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3650 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3651
3652 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3653 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3654 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3655 IEM_MC_REF_EFLAGS(pEFlags);
3656 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3657
3658 IEM_MC_ADVANCE_RIP();
3659 IEM_MC_END();
3660 return VINF_SUCCESS;
3661
3662 case IEMMODE_32BIT:
3663 IEM_MC_BEGIN(4, 0);
3664 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3665 IEM_MC_ARG(uint32_t, u32Src, 1);
3666 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3667 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3668
3669 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3670 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3671 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3672 IEM_MC_REF_EFLAGS(pEFlags);
3673 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3674
3675 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3676 IEM_MC_ADVANCE_RIP();
3677 IEM_MC_END();
3678 return VINF_SUCCESS;
3679
3680 case IEMMODE_64BIT:
3681 IEM_MC_BEGIN(4, 0);
3682 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3683 IEM_MC_ARG(uint64_t, u64Src, 1);
3684 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3685 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3686
3687 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3688 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3689 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3690 IEM_MC_REF_EFLAGS(pEFlags);
3691 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3692
3693 IEM_MC_ADVANCE_RIP();
3694 IEM_MC_END();
3695 return VINF_SUCCESS;
3696
3697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3698 }
3699 }
3700 else
3701 {
3702 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3703
3704 switch (pIemCpu->enmEffOpSize)
3705 {
3706 case IEMMODE_16BIT:
3707 IEM_MC_BEGIN(4, 2);
3708 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3709 IEM_MC_ARG(uint16_t, u16Src, 1);
3710 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3711 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3713
3714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3715 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3716 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3717 IEM_MC_FETCH_EFLAGS(EFlags);
3718 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3719 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3720
3721 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3722 IEM_MC_COMMIT_EFLAGS(EFlags);
3723 IEM_MC_ADVANCE_RIP();
3724 IEM_MC_END();
3725 return VINF_SUCCESS;
3726
3727 case IEMMODE_32BIT:
3728 IEM_MC_BEGIN(4, 2);
3729 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3730 IEM_MC_ARG(uint32_t, u32Src, 1);
3731 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3732 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3734
3735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3736 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3737 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3738 IEM_MC_FETCH_EFLAGS(EFlags);
3739 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3740 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3741
3742 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3743 IEM_MC_COMMIT_EFLAGS(EFlags);
3744 IEM_MC_ADVANCE_RIP();
3745 IEM_MC_END();
3746 return VINF_SUCCESS;
3747
3748 case IEMMODE_64BIT:
3749 IEM_MC_BEGIN(4, 2);
3750 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3751 IEM_MC_ARG(uint64_t, u64Src, 1);
3752 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3753 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3755
3756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3757 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3758 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3759 IEM_MC_FETCH_EFLAGS(EFlags);
3760 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3761 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3762
3763 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3764 IEM_MC_COMMIT_EFLAGS(EFlags);
3765 IEM_MC_ADVANCE_RIP();
3766 IEM_MC_END();
3767 return VINF_SUCCESS;
3768
3769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3770 }
3771 }
3772}
3773
3774
3775
3776/** Opcode 0x0f 0xa4. */
3777FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
3778{
3779 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
3780 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
3781}
3782
3783
3784/** Opcode 0x0f 0xa7. */
3785FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
3786{
3787 IEMOP_MNEMONIC("shld Ev,Gv,CL");
3788 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
3789}
3790
3791
3792/** Opcode 0x0f 0xa8. */
3793FNIEMOP_DEF(iemOp_push_gs)
3794{
3795 IEMOP_MNEMONIC("push gs");
3796 IEMOP_HLP_NO_LOCK_PREFIX();
3797 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
3798}
3799
3800
3801/** Opcode 0x0f 0xa9. */
3802FNIEMOP_DEF(iemOp_pop_gs)
3803{
3804 IEMOP_MNEMONIC("pop gs");
3805 IEMOP_HLP_NO_LOCK_PREFIX();
3806 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
3807}
3808
3809
3810/** Opcode 0x0f 0xaa. */
3811FNIEMOP_STUB(iemOp_rsm);
3812
3813
3814/** Opcode 0x0f 0xab. */
3815FNIEMOP_DEF(iemOp_bts_Ev_Gv)
3816{
3817 IEMOP_MNEMONIC("bts Ev,Gv");
3818 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
3819}
3820
3821
3822/** Opcode 0x0f 0xac. */
3823FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
3824{
3825 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
3826 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
3827}
3828
3829
3830/** Opcode 0x0f 0xad. */
3831FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
3832{
3833 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
3834 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
3835}
3836
3837
3838/** Opcode 0x0f 0xae mem/0. */
3839FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
3840{
3841 IEMOP_MNEMONIC("fxsave m512");
3842 IEMOP_HLP_NO_LOCK_PREFIX();
3843 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3844 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3845
3846 IEM_MC_BEGIN(3, 1);
3847 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3848 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3849 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3851 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
3852 IEM_MC_END();
3853 return VINF_SUCCESS;
3854}
3855
3856
3857/** Opcode 0x0f 0xae mem/1. */
3858FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
3859{
3860 IEMOP_MNEMONIC("fxrstor m512");
3861 IEMOP_HLP_NO_LOCK_PREFIX();
3862 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3863 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3864
3865 IEM_MC_BEGIN(3, 1);
3866 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3867 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3868 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3870 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
3871 IEM_MC_END();
3872 return VINF_SUCCESS;
3873}
3874
3875
3876/** Opcode 0x0f 0xae mem/2. */
3877FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
3878
3879/** Opcode 0x0f 0xae mem/3. */
3880FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
3881
3882/** Opcode 0x0f 0xae mem/4. */
3883FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
3884
3885/** Opcode 0x0f 0xae mem/5. */
3886FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
3887
3888/** Opcode 0x0f 0xae mem/6. */
3889FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
3890
3891/** Opcode 0x0f 0xae mem/7. */
3892FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
3893
3894/** Opcode 0x0f 0xae 11b/5. */
3895FNIEMOP_STUB_1(iemOp_Grp15_lfence, uint8_t, bRm);
3896
3897/** Opcode 0x0f 0xae 11b/6. */
3898FNIEMOP_STUB_1(iemOp_Grp15_mfence, uint8_t, bRm);
3899
3900/** Opcode 0x0f 0xae 11b/7. */
3901FNIEMOP_STUB_1(iemOp_Grp15_sfence, uint8_t, bRm);
3902
3903/** Opcode 0xf3 0x0f 0xae 11b/0. */
3904FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
3905
3906/** Opcode 0xf3 0x0f 0xae 11b/1. */
3907FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
3908
3909/** Opcode 0xf3 0x0f 0xae 11b/2. */
3910FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
3911
3912/** Opcode 0xf3 0x0f 0xae 11b/3. */
3913FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
3914
3915
3916/** Opcode 0x0f 0xae. */
3917FNIEMOP_DEF(iemOp_Grp15)
3918{
3919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3920 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3921 {
3922 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3923 {
3924 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
3925 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
3926 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
3927 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
3928 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
3929 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
3930 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
3931 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
3932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3933 }
3934 }
3935 else
3936 {
3937 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
3938 {
3939 case 0:
3940 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3941 {
3942 case 0: return IEMOP_RAISE_INVALID_OPCODE();
3943 case 1: return IEMOP_RAISE_INVALID_OPCODE();
3944 case 2: return IEMOP_RAISE_INVALID_OPCODE();
3945 case 3: return IEMOP_RAISE_INVALID_OPCODE();
3946 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3947 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
3948 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
3949 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
3950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3951 }
3952 break;
3953
3954 case IEM_OP_PRF_REPZ:
3955 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3956 {
3957 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
3958 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
3959 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
3960 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
3961 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3962 case 5: return IEMOP_RAISE_INVALID_OPCODE();
3963 case 6: return IEMOP_RAISE_INVALID_OPCODE();
3964 case 7: return IEMOP_RAISE_INVALID_OPCODE();
3965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3966 }
3967 break;
3968
3969 default:
3970 return IEMOP_RAISE_INVALID_OPCODE();
3971 }
3972 }
3973}
3974
3975
3976/** Opcode 0x0f 0xaf. */
3977FNIEMOP_DEF(iemOp_imul_Gv_Ev)
3978{
3979 IEMOP_MNEMONIC("imul Gv,Ev");
3980 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3981 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
3982}
3983
3984
3985/** Opcode 0x0f 0xb0. */
3986FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
3987{
3988 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
3989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3990
3991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3992 {
3993 IEMOP_HLP_DONE_DECODING();
3994 IEM_MC_BEGIN(4, 0);
3995 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3996 IEM_MC_ARG(uint8_t *, pu8Al, 1);
3997 IEM_MC_ARG(uint8_t, u8Src, 2);
3998 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3999
4000 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4001 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4002 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
4003 IEM_MC_REF_EFLAGS(pEFlags);
4004 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4005 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
4006 else
4007 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
4008
4009 IEM_MC_ADVANCE_RIP();
4010 IEM_MC_END();
4011 }
4012 else
4013 {
4014 IEM_MC_BEGIN(4, 3);
4015 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4016 IEM_MC_ARG(uint8_t *, pu8Al, 1);
4017 IEM_MC_ARG(uint8_t, u8Src, 2);
4018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4020 IEM_MC_LOCAL(uint8_t, u8Al);
4021
4022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4023 IEMOP_HLP_DONE_DECODING();
4024 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4025 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4026 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
4027 IEM_MC_FETCH_EFLAGS(EFlags);
4028 IEM_MC_REF_LOCAL(pu8Al, u8Al);
4029 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4030 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
4031 else
4032 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
4033
4034 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
4035 IEM_MC_COMMIT_EFLAGS(EFlags);
4036 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
4037 IEM_MC_ADVANCE_RIP();
4038 IEM_MC_END();
4039 }
4040 return VINF_SUCCESS;
4041}
4042
4043/** Opcode 0x0f 0xb1. */
4044FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
4045{
4046 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
4047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4048
4049 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4050 {
4051 IEMOP_HLP_DONE_DECODING();
4052 switch (pIemCpu->enmEffOpSize)
4053 {
4054 case IEMMODE_16BIT:
4055 IEM_MC_BEGIN(4, 0);
4056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4057 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
4058 IEM_MC_ARG(uint16_t, u16Src, 2);
4059 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4060
4061 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4062 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4063 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
4064 IEM_MC_REF_EFLAGS(pEFlags);
4065 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4066 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
4067 else
4068 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
4069
4070 IEM_MC_ADVANCE_RIP();
4071 IEM_MC_END();
4072 return VINF_SUCCESS;
4073
4074 case IEMMODE_32BIT:
4075 IEM_MC_BEGIN(4, 0);
4076 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4077 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
4078 IEM_MC_ARG(uint32_t, u32Src, 2);
4079 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4080
4081 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4082 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4083 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
4084 IEM_MC_REF_EFLAGS(pEFlags);
4085 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4086 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
4087 else
4088 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
4089
4090 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
4091 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4092 IEM_MC_ADVANCE_RIP();
4093 IEM_MC_END();
4094 return VINF_SUCCESS;
4095
4096 case IEMMODE_64BIT:
4097 IEM_MC_BEGIN(4, 0);
4098 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4099 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
4100#ifdef RT_ARCH_X86
4101 IEM_MC_ARG(uint64_t *, pu64Src, 2);
4102#else
4103 IEM_MC_ARG(uint64_t, u64Src, 2);
4104#endif
4105 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4106
4107 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4108 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
4109 IEM_MC_REF_EFLAGS(pEFlags);
4110#ifdef RT_ARCH_X86
4111 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4112 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4113 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
4114 else
4115 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
4116#else
4117 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4118 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4119 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
4120 else
4121 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
4122#endif
4123
4124 IEM_MC_ADVANCE_RIP();
4125 IEM_MC_END();
4126 return VINF_SUCCESS;
4127
4128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4129 }
4130 }
4131 else
4132 {
4133 switch (pIemCpu->enmEffOpSize)
4134 {
4135 case IEMMODE_16BIT:
4136 IEM_MC_BEGIN(4, 3);
4137 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4138 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
4139 IEM_MC_ARG(uint16_t, u16Src, 2);
4140 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4142 IEM_MC_LOCAL(uint16_t, u16Ax);
4143
4144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4145 IEMOP_HLP_DONE_DECODING();
4146 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4147 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4148 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
4149 IEM_MC_FETCH_EFLAGS(EFlags);
4150 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
4151 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4152 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
4153 else
4154 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
4155
4156 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4157 IEM_MC_COMMIT_EFLAGS(EFlags);
4158 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
4159 IEM_MC_ADVANCE_RIP();
4160 IEM_MC_END();
4161 return VINF_SUCCESS;
4162
4163 case IEMMODE_32BIT:
4164 IEM_MC_BEGIN(4, 3);
4165 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4166 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
4167 IEM_MC_ARG(uint32_t, u32Src, 2);
4168 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4170 IEM_MC_LOCAL(uint32_t, u32Eax);
4171
4172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4173 IEMOP_HLP_DONE_DECODING();
4174 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4175 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4176 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
4177 IEM_MC_FETCH_EFLAGS(EFlags);
4178 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
4179 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4180 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
4181 else
4182 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
4183
4184 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4185 IEM_MC_COMMIT_EFLAGS(EFlags);
4186 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
4187 IEM_MC_ADVANCE_RIP();
4188 IEM_MC_END();
4189 return VINF_SUCCESS;
4190
4191 case IEMMODE_64BIT:
4192 IEM_MC_BEGIN(4, 3);
4193 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4194 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
4195#ifdef RT_ARCH_X86
4196 IEM_MC_ARG(uint64_t *, pu64Src, 2);
4197#else
4198 IEM_MC_ARG(uint64_t, u64Src, 2);
4199#endif
4200 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4202 IEM_MC_LOCAL(uint64_t, u64Rax);
4203
4204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4205 IEMOP_HLP_DONE_DECODING();
4206 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4207 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
4208 IEM_MC_FETCH_EFLAGS(EFlags);
4209 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
4210#ifdef RT_ARCH_X86
4211 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4212 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4213 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
4214 else
4215 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
4216#else
4217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4218 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4219 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
4220 else
4221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
4222#endif
4223
4224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4225 IEM_MC_COMMIT_EFLAGS(EFlags);
4226 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
4227 IEM_MC_ADVANCE_RIP();
4228 IEM_MC_END();
4229 return VINF_SUCCESS;
4230
4231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4232 }
4233 }
4234}
4235
4236
4237FNIEMOP_DEF_1(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg)
4238{
4239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4240 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4241
4242 /* The source cannot be a register. */
4243 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4244 return IEMOP_RAISE_INVALID_OPCODE();
4245 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
4246
4247 switch (pIemCpu->enmEffOpSize)
4248 {
4249 case IEMMODE_16BIT:
4250 IEM_MC_BEGIN(5, 1);
4251 IEM_MC_ARG(uint16_t, uSel, 0);
4252 IEM_MC_ARG(uint16_t, offSeg, 1);
4253 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
4254 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
4255 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
4256 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4258 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
4259 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
4260 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
4261 IEM_MC_END();
4262 return VINF_SUCCESS;
4263
4264 case IEMMODE_32BIT:
4265 IEM_MC_BEGIN(5, 1);
4266 IEM_MC_ARG(uint16_t, uSel, 0);
4267 IEM_MC_ARG(uint32_t, offSeg, 1);
4268 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
4269 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
4270 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
4271 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
4272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4273 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
4274 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
4275 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
4276 IEM_MC_END();
4277 return VINF_SUCCESS;
4278
4279 case IEMMODE_64BIT:
4280 IEM_MC_BEGIN(5, 1);
4281 IEM_MC_ARG(uint16_t, uSel, 0);
4282 IEM_MC_ARG(uint64_t, offSeg, 1);
4283 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
4284 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
4285 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
4286 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
4287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4288 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
4289 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
4290 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
4291 IEM_MC_END();
4292 return VINF_SUCCESS;
4293
4294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4295 }
4296}
4297
4298
4299/** Opcode 0x0f 0xb2. */
4300FNIEMOP_DEF(iemOp_lss_Gv_Mp)
4301{
4302 IEMOP_MNEMONIC("lss Gv,Mp");
4303 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_SS);
4304}
4305
4306
4307/** Opcode 0x0f 0xb3. */
4308FNIEMOP_DEF(iemOp_btr_Ev_Gv)
4309{
4310 IEMOP_MNEMONIC("btr Ev,Gv");
4311 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
4312}
4313
4314
4315/** Opcode 0x0f 0xb4. */
4316FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
4317{
4318 IEMOP_MNEMONIC("lfs Gv,Mp");
4319 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_FS);
4320}
4321
4322
4323/** Opcode 0x0f 0xb5. */
4324FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
4325{
4326 IEMOP_MNEMONIC("lgs Gv,Mp");
4327 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_GS);
4328}
4329
4330
4331/** Opcode 0x0f 0xb6. */
4332FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
4333{
4334 IEMOP_MNEMONIC("movzx Gv,Eb");
4335
4336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4337 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4338
4339 /*
4340 * If rm is denoting a register, no more instruction bytes.
4341 */
4342 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4343 {
4344 switch (pIemCpu->enmEffOpSize)
4345 {
4346 case IEMMODE_16BIT:
4347 IEM_MC_BEGIN(0, 1);
4348 IEM_MC_LOCAL(uint16_t, u16Value);
4349 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4350 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4351 IEM_MC_ADVANCE_RIP();
4352 IEM_MC_END();
4353 return VINF_SUCCESS;
4354
4355 case IEMMODE_32BIT:
4356 IEM_MC_BEGIN(0, 1);
4357 IEM_MC_LOCAL(uint32_t, u32Value);
4358 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4359 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4360 IEM_MC_ADVANCE_RIP();
4361 IEM_MC_END();
4362 return VINF_SUCCESS;
4363
4364 case IEMMODE_64BIT:
4365 IEM_MC_BEGIN(0, 1);
4366 IEM_MC_LOCAL(uint64_t, u64Value);
4367 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4368 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4369 IEM_MC_ADVANCE_RIP();
4370 IEM_MC_END();
4371 return VINF_SUCCESS;
4372
4373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4374 }
4375 }
4376 else
4377 {
4378 /*
4379 * We're loading a register from memory.
4380 */
4381 switch (pIemCpu->enmEffOpSize)
4382 {
4383 case IEMMODE_16BIT:
4384 IEM_MC_BEGIN(0, 2);
4385 IEM_MC_LOCAL(uint16_t, u16Value);
4386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4388 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
4389 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4390 IEM_MC_ADVANCE_RIP();
4391 IEM_MC_END();
4392 return VINF_SUCCESS;
4393
4394 case IEMMODE_32BIT:
4395 IEM_MC_BEGIN(0, 2);
4396 IEM_MC_LOCAL(uint32_t, u32Value);
4397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4399 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4400 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4401 IEM_MC_ADVANCE_RIP();
4402 IEM_MC_END();
4403 return VINF_SUCCESS;
4404
4405 case IEMMODE_64BIT:
4406 IEM_MC_BEGIN(0, 2);
4407 IEM_MC_LOCAL(uint64_t, u64Value);
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4410 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4411 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4412 IEM_MC_ADVANCE_RIP();
4413 IEM_MC_END();
4414 return VINF_SUCCESS;
4415
4416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4417 }
4418 }
4419}
4420
4421
4422/** Opcode 0x0f 0xb7. */
4423FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
4424{
4425 IEMOP_MNEMONIC("movzx Gv,Ew");
4426
4427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4428 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4429
4430 /** @todo Not entirely sure how the operand size prefix is handled here,
4431 * assuming that it will be ignored. Would be nice to have a few
4432 * test for this. */
4433 /*
4434 * If rm is denoting a register, no more instruction bytes.
4435 */
4436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4437 {
4438 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4439 {
4440 IEM_MC_BEGIN(0, 1);
4441 IEM_MC_LOCAL(uint32_t, u32Value);
4442 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4443 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4444 IEM_MC_ADVANCE_RIP();
4445 IEM_MC_END();
4446 }
4447 else
4448 {
4449 IEM_MC_BEGIN(0, 1);
4450 IEM_MC_LOCAL(uint64_t, u64Value);
4451 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4452 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4453 IEM_MC_ADVANCE_RIP();
4454 IEM_MC_END();
4455 }
4456 }
4457 else
4458 {
4459 /*
4460 * We're loading a register from memory.
4461 */
4462 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4463 {
4464 IEM_MC_BEGIN(0, 2);
4465 IEM_MC_LOCAL(uint32_t, u32Value);
4466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4468 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4469 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4470 IEM_MC_ADVANCE_RIP();
4471 IEM_MC_END();
4472 }
4473 else
4474 {
4475 IEM_MC_BEGIN(0, 2);
4476 IEM_MC_LOCAL(uint64_t, u64Value);
4477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4479 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4480 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4481 IEM_MC_ADVANCE_RIP();
4482 IEM_MC_END();
4483 }
4484 }
4485 return VINF_SUCCESS;
4486}
4487
4488
4489/** Opcode 0x0f 0xb8. */
4490FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
4491
4492
4493/** Opcode 0x0f 0xb9. */
4494FNIEMOP_DEF(iemOp_Grp10)
4495{
4496 Log(("iemOp_Grp10 -> #UD\n"));
4497 return IEMOP_RAISE_INVALID_OPCODE();
4498}
4499
4500
4501/** Opcode 0x0f 0xba. */
4502FNIEMOP_DEF(iemOp_Grp8)
4503{
4504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4505 PCIEMOPBINSIZES pImpl;
4506 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
4507 {
4508 case 0: case 1: case 2: case 3:
4509 return IEMOP_RAISE_INVALID_OPCODE();
4510 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
4511 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
4512 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
4513 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
4514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4515 }
4516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4517
4518 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4519 {
4520 /* register destination. */
4521 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4522 IEMOP_HLP_NO_LOCK_PREFIX();
4523
4524 switch (pIemCpu->enmEffOpSize)
4525 {
4526 case IEMMODE_16BIT:
4527 IEM_MC_BEGIN(3, 0);
4528 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4529 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
4530 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4531
4532 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4533 IEM_MC_REF_EFLAGS(pEFlags);
4534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4535
4536 IEM_MC_ADVANCE_RIP();
4537 IEM_MC_END();
4538 return VINF_SUCCESS;
4539
4540 case IEMMODE_32BIT:
4541 IEM_MC_BEGIN(3, 0);
4542 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4543 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
4544 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4545
4546 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4547 IEM_MC_REF_EFLAGS(pEFlags);
4548 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4549
4550 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4551 IEM_MC_ADVANCE_RIP();
4552 IEM_MC_END();
4553 return VINF_SUCCESS;
4554
4555 case IEMMODE_64BIT:
4556 IEM_MC_BEGIN(3, 0);
4557 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4558 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
4559 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4560
4561 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4562 IEM_MC_REF_EFLAGS(pEFlags);
4563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4564
4565 IEM_MC_ADVANCE_RIP();
4566 IEM_MC_END();
4567 return VINF_SUCCESS;
4568
4569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4570 }
4571 }
4572 else
4573 {
4574 /* memory destination. */
4575
4576 uint32_t fAccess;
4577 if (pImpl->pfnLockedU16)
4578 fAccess = IEM_ACCESS_DATA_RW;
4579 else /* BT */
4580 {
4581 IEMOP_HLP_NO_LOCK_PREFIX();
4582 fAccess = IEM_ACCESS_DATA_R;
4583 }
4584
4585 /** @todo test negative bit offsets! */
4586 switch (pIemCpu->enmEffOpSize)
4587 {
4588 case IEMMODE_16BIT:
4589 IEM_MC_BEGIN(3, 1);
4590 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4591 IEM_MC_ARG(uint16_t, u16Src, 1);
4592 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4594
4595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4596 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4597 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
4598 IEM_MC_FETCH_EFLAGS(EFlags);
4599 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4600 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4601 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4602 else
4603 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4604 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4605
4606 IEM_MC_COMMIT_EFLAGS(EFlags);
4607 IEM_MC_ADVANCE_RIP();
4608 IEM_MC_END();
4609 return VINF_SUCCESS;
4610
4611 case IEMMODE_32BIT:
4612 IEM_MC_BEGIN(3, 1);
4613 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4614 IEM_MC_ARG(uint32_t, u32Src, 1);
4615 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4617
4618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4619 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4620 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
4621 IEM_MC_FETCH_EFLAGS(EFlags);
4622 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4623 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4624 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4625 else
4626 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4628
4629 IEM_MC_COMMIT_EFLAGS(EFlags);
4630 IEM_MC_ADVANCE_RIP();
4631 IEM_MC_END();
4632 return VINF_SUCCESS;
4633
4634 case IEMMODE_64BIT:
4635 IEM_MC_BEGIN(3, 1);
4636 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4637 IEM_MC_ARG(uint64_t, u64Src, 1);
4638 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4640
4641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4642 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4643 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
4644 IEM_MC_FETCH_EFLAGS(EFlags);
4645 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4646 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4647 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4648 else
4649 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4650 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4651
4652 IEM_MC_COMMIT_EFLAGS(EFlags);
4653 IEM_MC_ADVANCE_RIP();
4654 IEM_MC_END();
4655 return VINF_SUCCESS;
4656
4657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4658 }
4659 }
4660
4661}
4662
4663
4664/** Opcode 0x0f 0xbb. */
4665FNIEMOP_DEF(iemOp_btc_Ev_Gv)
4666{
4667 IEMOP_MNEMONIC("btc Ev,Gv");
4668 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
4669}
4670
4671
4672/** Opcode 0x0f 0xbc. */
4673FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
4674{
4675 IEMOP_MNEMONIC("bsf Gv,Ev");
4676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4677 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
4678}
4679
4680
4681/** Opcode 0x0f 0xbd. */
4682FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
4683{
4684 IEMOP_MNEMONIC("bsr Gv,Ev");
4685 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4686 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
4687}
4688
4689
4690/** Opcode 0x0f 0xbe. */
4691FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
4692{
4693 IEMOP_MNEMONIC("movsx Gv,Eb");
4694
4695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4696 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4697
4698 /*
4699 * If rm is denoting a register, no more instruction bytes.
4700 */
4701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4702 {
4703 switch (pIemCpu->enmEffOpSize)
4704 {
4705 case IEMMODE_16BIT:
4706 IEM_MC_BEGIN(0, 1);
4707 IEM_MC_LOCAL(uint16_t, u16Value);
4708 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4709 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4710 IEM_MC_ADVANCE_RIP();
4711 IEM_MC_END();
4712 return VINF_SUCCESS;
4713
4714 case IEMMODE_32BIT:
4715 IEM_MC_BEGIN(0, 1);
4716 IEM_MC_LOCAL(uint32_t, u32Value);
4717 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4718 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4719 IEM_MC_ADVANCE_RIP();
4720 IEM_MC_END();
4721 return VINF_SUCCESS;
4722
4723 case IEMMODE_64BIT:
4724 IEM_MC_BEGIN(0, 1);
4725 IEM_MC_LOCAL(uint64_t, u64Value);
4726 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4727 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4728 IEM_MC_ADVANCE_RIP();
4729 IEM_MC_END();
4730 return VINF_SUCCESS;
4731
4732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4733 }
4734 }
4735 else
4736 {
4737 /*
4738 * We're loading a register from memory.
4739 */
4740 switch (pIemCpu->enmEffOpSize)
4741 {
4742 case IEMMODE_16BIT:
4743 IEM_MC_BEGIN(0, 2);
4744 IEM_MC_LOCAL(uint16_t, u16Value);
4745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4747 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
4748 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4749 IEM_MC_ADVANCE_RIP();
4750 IEM_MC_END();
4751 return VINF_SUCCESS;
4752
4753 case IEMMODE_32BIT:
4754 IEM_MC_BEGIN(0, 2);
4755 IEM_MC_LOCAL(uint32_t, u32Value);
4756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4758 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4759 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4760 IEM_MC_ADVANCE_RIP();
4761 IEM_MC_END();
4762 return VINF_SUCCESS;
4763
4764 case IEMMODE_64BIT:
4765 IEM_MC_BEGIN(0, 2);
4766 IEM_MC_LOCAL(uint64_t, u64Value);
4767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4769 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4770 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4771 IEM_MC_ADVANCE_RIP();
4772 IEM_MC_END();
4773 return VINF_SUCCESS;
4774
4775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4776 }
4777 }
4778}
4779
4780
4781/** Opcode 0x0f 0xbf. */
4782FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
4783{
4784 IEMOP_MNEMONIC("movsx Gv,Ew");
4785
4786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4787 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4788
4789 /** @todo Not entirely sure how the operand size prefix is handled here,
4790 * assuming that it will be ignored. Would be nice to have a few
4791 * test for this. */
4792 /*
4793 * If rm is denoting a register, no more instruction bytes.
4794 */
4795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4796 {
4797 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4798 {
4799 IEM_MC_BEGIN(0, 1);
4800 IEM_MC_LOCAL(uint32_t, u32Value);
4801 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4802 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 }
4806 else
4807 {
4808 IEM_MC_BEGIN(0, 1);
4809 IEM_MC_LOCAL(uint64_t, u64Value);
4810 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4811 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4812 IEM_MC_ADVANCE_RIP();
4813 IEM_MC_END();
4814 }
4815 }
4816 else
4817 {
4818 /*
4819 * We're loading a register from memory.
4820 */
4821 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4822 {
4823 IEM_MC_BEGIN(0, 2);
4824 IEM_MC_LOCAL(uint32_t, u32Value);
4825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4827 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4828 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4829 IEM_MC_ADVANCE_RIP();
4830 IEM_MC_END();
4831 }
4832 else
4833 {
4834 IEM_MC_BEGIN(0, 2);
4835 IEM_MC_LOCAL(uint64_t, u64Value);
4836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4838 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4839 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4840 IEM_MC_ADVANCE_RIP();
4841 IEM_MC_END();
4842 }
4843 }
4844 return VINF_SUCCESS;
4845}
4846
4847
4848/** Opcode 0x0f 0xc0. */
4849FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
4850{
4851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4852 IEMOP_MNEMONIC("xadd Eb,Gb");
4853
4854 /*
4855 * If rm is denoting a register, no more instruction bytes.
4856 */
4857 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4858 {
4859 IEMOP_HLP_NO_LOCK_PREFIX();
4860
4861 IEM_MC_BEGIN(3, 0);
4862 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4863 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4864 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4865
4866 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4867 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4868 IEM_MC_REF_EFLAGS(pEFlags);
4869 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4870
4871 IEM_MC_ADVANCE_RIP();
4872 IEM_MC_END();
4873 }
4874 else
4875 {
4876 /*
4877 * We're accessing memory.
4878 */
4879 IEM_MC_BEGIN(3, 3);
4880 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4881 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4882 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4883 IEM_MC_LOCAL(uint8_t, u8RegCopy);
4884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4885
4886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4887 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4888 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4889 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
4890 IEM_MC_FETCH_EFLAGS(EFlags);
4891 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4892 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4893 else
4894 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
4895
4896 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
4897 IEM_MC_COMMIT_EFLAGS(EFlags);
4898 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
4899 IEM_MC_ADVANCE_RIP();
4900 IEM_MC_END();
4901 return VINF_SUCCESS;
4902 }
4903 return VINF_SUCCESS;
4904}
4905
4906
4907/** Opcode 0x0f 0xc1. */
4908FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
4909{
4910 IEMOP_MNEMONIC("xadd Ev,Gv");
4911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4912
4913 /*
4914 * If rm is denoting a register, no more instruction bytes.
4915 */
4916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4917 {
4918 IEMOP_HLP_NO_LOCK_PREFIX();
4919
4920 switch (pIemCpu->enmEffOpSize)
4921 {
4922 case IEMMODE_16BIT:
4923 IEM_MC_BEGIN(3, 0);
4924 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4925 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4926 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4927
4928 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4929 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4930 IEM_MC_REF_EFLAGS(pEFlags);
4931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4932
4933 IEM_MC_ADVANCE_RIP();
4934 IEM_MC_END();
4935 return VINF_SUCCESS;
4936
4937 case IEMMODE_32BIT:
4938 IEM_MC_BEGIN(3, 0);
4939 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4940 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4941 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4942
4943 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4944 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4945 IEM_MC_REF_EFLAGS(pEFlags);
4946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
4947
4948 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4949 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4950 IEM_MC_ADVANCE_RIP();
4951 IEM_MC_END();
4952 return VINF_SUCCESS;
4953
4954 case IEMMODE_64BIT:
4955 IEM_MC_BEGIN(3, 0);
4956 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4957 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4958 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4959
4960 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4961 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4962 IEM_MC_REF_EFLAGS(pEFlags);
4963 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
4964
4965 IEM_MC_ADVANCE_RIP();
4966 IEM_MC_END();
4967 return VINF_SUCCESS;
4968
4969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4970 }
4971 }
4972 else
4973 {
4974 /*
4975 * We're accessing memory.
4976 */
4977 switch (pIemCpu->enmEffOpSize)
4978 {
4979 case IEMMODE_16BIT:
4980 IEM_MC_BEGIN(3, 3);
4981 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4982 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4983 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4984 IEM_MC_LOCAL(uint16_t, u16RegCopy);
4985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4986
4987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4988 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4989 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4990 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
4991 IEM_MC_FETCH_EFLAGS(EFlags);
4992 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4993 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4994 else
4995 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
4996
4997 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4998 IEM_MC_COMMIT_EFLAGS(EFlags);
4999 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
5000 IEM_MC_ADVANCE_RIP();
5001 IEM_MC_END();
5002 return VINF_SUCCESS;
5003
5004 case IEMMODE_32BIT:
5005 IEM_MC_BEGIN(3, 3);
5006 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5007 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
5008 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5009 IEM_MC_LOCAL(uint32_t, u32RegCopy);
5010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5011
5012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5013 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
5014 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5015 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
5016 IEM_MC_FETCH_EFLAGS(EFlags);
5017 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5018 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
5019 else
5020 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
5021
5022 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5023 IEM_MC_COMMIT_EFLAGS(EFlags);
5024 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
5025 IEM_MC_ADVANCE_RIP();
5026 IEM_MC_END();
5027 return VINF_SUCCESS;
5028
5029 case IEMMODE_64BIT:
5030 IEM_MC_BEGIN(3, 3);
5031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5032 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
5033 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5034 IEM_MC_LOCAL(uint64_t, u64RegCopy);
5035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5036
5037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5038 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
5039 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5040 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
5041 IEM_MC_FETCH_EFLAGS(EFlags);
5042 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5043 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
5044 else
5045 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
5046
5047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5048 IEM_MC_COMMIT_EFLAGS(EFlags);
5049 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
5050 IEM_MC_ADVANCE_RIP();
5051 IEM_MC_END();
5052 return VINF_SUCCESS;
5053
5054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5055 }
5056 }
5057}
5058
5059/** Opcode 0x0f 0xc2. */
5060FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
5061
5062/** Opcode 0x0f 0xc3. */
5063FNIEMOP_STUB(iemOp_movnti_My_Gy);
5064
5065/** Opcode 0x0f 0xc4. */
5066FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
5067
5068/** Opcode 0x0f 0xc5. */
5069FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
5070
5071/** Opcode 0x0f 0xc6. */
5072FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
5073
5074
5075/** Opcode 0x0f 0xc7 !11/1. */
5076FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
5077{
5078 IEMOP_MNEMONIC("cmpxchg8b Mq");
5079
5080 IEM_MC_BEGIN(4, 3);
5081 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
5082 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
5083 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
5084 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
5085 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
5086 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
5087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5088
5089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5090 IEMOP_HLP_DONE_DECODING();
5091 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
5092
5093 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
5094 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
5095 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
5096
5097 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
5098 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
5099 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
5100
5101 IEM_MC_FETCH_EFLAGS(EFlags);
5102 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5103 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
5104 else
5105 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
5106
5107 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
5108 IEM_MC_COMMIT_EFLAGS(EFlags);
5109 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
5110 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
5111 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
5112 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
5113 IEM_MC_ENDIF();
5114 IEM_MC_ADVANCE_RIP();
5115
5116 IEM_MC_END();
5117 return VINF_SUCCESS;
5118}
5119
5120
5121/** Opcode REX.W 0x0f 0xc7 !11/1. */
5122FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
5123
5124/** Opcode 0x0f 0xc7 11/6. */
5125FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
5126
5127/** Opcode 0x0f 0xc7 !11/6. */
5128FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
5129
5130/** Opcode 0x66 0x0f 0xc7 !11/6. */
5131FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
5132
5133/** Opcode 0xf3 0x0f 0xc7 !11/6. */
5134FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
5135
5136/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
5137FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
5138
5139
5140/** Opcode 0x0f 0xc7. */
5141FNIEMOP_DEF(iemOp_Grp9)
5142{
5143 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
5144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5146 {
5147 case 0: case 2: case 3: case 4: case 5:
5148 return IEMOP_RAISE_INVALID_OPCODE();
5149 case 1:
5150 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
5151 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
5152 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
5153 return IEMOP_RAISE_INVALID_OPCODE();
5154 if (bRm & IEM_OP_PRF_SIZE_REX_W)
5155 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
5156 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
5157 case 6:
5158 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5159 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
5160 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
5161 {
5162 case 0:
5163 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
5164 case IEM_OP_PRF_SIZE_OP:
5165 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
5166 case IEM_OP_PRF_REPZ:
5167 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
5168 default:
5169 return IEMOP_RAISE_INVALID_OPCODE();
5170 }
5171 case 7:
5172 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
5173 {
5174 case 0:
5175 case IEM_OP_PRF_REPZ:
5176 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
5177 default:
5178 return IEMOP_RAISE_INVALID_OPCODE();
5179 }
5180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5181 }
5182}
5183
5184
5185/**
5186 * Common 'bswap register' helper.
5187 */
5188FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
5189{
5190 IEMOP_HLP_NO_LOCK_PREFIX();
5191 switch (pIemCpu->enmEffOpSize)
5192 {
5193 case IEMMODE_16BIT:
5194 IEM_MC_BEGIN(1, 0);
5195 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5196 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
5197 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
5198 IEM_MC_ADVANCE_RIP();
5199 IEM_MC_END();
5200 return VINF_SUCCESS;
5201
5202 case IEMMODE_32BIT:
5203 IEM_MC_BEGIN(1, 0);
5204 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5205 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
5206 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5207 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
5208 IEM_MC_ADVANCE_RIP();
5209 IEM_MC_END();
5210 return VINF_SUCCESS;
5211
5212 case IEMMODE_64BIT:
5213 IEM_MC_BEGIN(1, 0);
5214 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5215 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
5216 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
5217 IEM_MC_ADVANCE_RIP();
5218 IEM_MC_END();
5219 return VINF_SUCCESS;
5220
5221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5222 }
5223}
5224
5225
5226/** Opcode 0x0f 0xc8. */
5227FNIEMOP_DEF(iemOp_bswap_rAX_r8)
5228{
5229 IEMOP_MNEMONIC("bswap rAX/r8");
5230 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexReg);
5231}
5232
5233
5234/** Opcode 0x0f 0xc9. */
5235FNIEMOP_DEF(iemOp_bswap_rCX_r9)
5236{
5237 IEMOP_MNEMONIC("bswap rCX/r9");
5238 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexReg);
5239}
5240
5241
5242/** Opcode 0x0f 0xca. */
5243FNIEMOP_DEF(iemOp_bswap_rDX_r10)
5244{
5245 IEMOP_MNEMONIC("bswap rDX/r9");
5246 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexReg);
5247}
5248
5249
5250/** Opcode 0x0f 0xcb. */
5251FNIEMOP_DEF(iemOp_bswap_rBX_r11)
5252{
5253 IEMOP_MNEMONIC("bswap rBX/r9");
5254 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexReg);
5255}
5256
5257
5258/** Opcode 0x0f 0xcc. */
5259FNIEMOP_DEF(iemOp_bswap_rSP_r12)
5260{
5261 IEMOP_MNEMONIC("bswap rSP/r12");
5262 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexReg);
5263}
5264
5265
5266/** Opcode 0x0f 0xcd. */
5267FNIEMOP_DEF(iemOp_bswap_rBP_r13)
5268{
5269 IEMOP_MNEMONIC("bswap rBP/r13");
5270 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexReg);
5271}
5272
5273
5274/** Opcode 0x0f 0xce. */
5275FNIEMOP_DEF(iemOp_bswap_rSI_r14)
5276{
5277 IEMOP_MNEMONIC("bswap rSI/r14");
5278 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexReg);
5279}
5280
5281
5282/** Opcode 0x0f 0xcf. */
5283FNIEMOP_DEF(iemOp_bswap_rDI_r15)
5284{
5285 IEMOP_MNEMONIC("bswap rDI/r15");
5286 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexReg);
5287}
5288
5289
5290
5291/** Opcode 0x0f 0xd0. */
5292FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
5293/** Opcode 0x0f 0xd1. */
5294FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
5295/** Opcode 0x0f 0xd2. */
5296FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
5297/** Opcode 0x0f 0xd3. */
5298FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
5299/** Opcode 0x0f 0xd4. */
5300FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
5301/** Opcode 0x0f 0xd5. */
5302FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
5303/** Opcode 0x0f 0xd6. */
5304FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
5305/** Opcode 0x0f 0xd7. */
5306FNIEMOP_STUB(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq);
5307/** Opcode 0x0f 0xd8. */
5308FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
5309/** Opcode 0x0f 0xd9. */
5310FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
5311/** Opcode 0x0f 0xda. */
5312FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
5313/** Opcode 0x0f 0xdb. */
5314FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
5315/** Opcode 0x0f 0xdc. */
5316FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
5317/** Opcode 0x0f 0xdd. */
5318FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
5319/** Opcode 0x0f 0xde. */
5320FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
5321/** Opcode 0x0f 0xdf. */
5322FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
5323/** Opcode 0x0f 0xe0. */
5324FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
5325/** Opcode 0x0f 0xe1. */
5326FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
5327/** Opcode 0x0f 0xe2. */
5328FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
5329/** Opcode 0x0f 0xe3. */
5330FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
5331/** Opcode 0x0f 0xe4. */
5332FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
5333/** Opcode 0x0f 0xe5. */
5334FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
5335/** Opcode 0x0f 0xe6. */
5336FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
5337/** Opcode 0x0f 0xe7. */
5338FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
5339/** Opcode 0x0f 0xe8. */
5340FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
5341/** Opcode 0x0f 0xe9. */
5342FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
5343/** Opcode 0x0f 0xea. */
5344FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
5345/** Opcode 0x0f 0xeb. */
5346FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
5347/** Opcode 0x0f 0xec. */
5348FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
5349/** Opcode 0x0f 0xed. */
5350FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
5351/** Opcode 0x0f 0xee. */
5352FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
5353/** Opcode 0x0f 0xef. */
5354FNIEMOP_STUB(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq);
5355/** Opcode 0x0f 0xf0. */
5356FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
5357/** Opcode 0x0f 0xf1. */
5358FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
5359/** Opcode 0x0f 0xf2. */
5360FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
5361/** Opcode 0x0f 0xf3. */
5362FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
5363/** Opcode 0x0f 0xf4. */
5364FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
5365/** Opcode 0x0f 0xf5. */
5366FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
5367/** Opcode 0x0f 0xf6. */
5368FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
5369/** Opcode 0x0f 0xf7. */
5370FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
5371/** Opcode 0x0f 0xf8. */
5372FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq);
5373/** Opcode 0x0f 0xf9. */
5374FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
5375/** Opcode 0x0f 0xfa. */
5376FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
5377/** Opcode 0x0f 0xfb. */
5378FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
5379/** Opcode 0x0f 0xfc. */
5380FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
5381/** Opcode 0x0f 0xfd. */
5382FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
5383/** Opcode 0x0f 0xfe. */
5384FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
5385
5386
5387const PFNIEMOP g_apfnTwoByteMap[256] =
5388{
5389 /* 0x00 */ iemOp_Grp6,
5390 /* 0x01 */ iemOp_Grp7,
5391 /* 0x02 */ iemOp_lar_Gv_Ew,
5392 /* 0x03 */ iemOp_lsl_Gv_Ew,
5393 /* 0x04 */ iemOp_Invalid,
5394 /* 0x05 */ iemOp_syscall,
5395 /* 0x06 */ iemOp_clts,
5396 /* 0x07 */ iemOp_sysret,
5397 /* 0x08 */ iemOp_invd,
5398 /* 0x09 */ iemOp_wbinvd,
5399 /* 0x0a */ iemOp_Invalid,
5400 /* 0x0b */ iemOp_ud2,
5401 /* 0x0c */ iemOp_Invalid,
5402 /* 0x0d */ iemOp_nop_Ev_GrpP,
5403 /* 0x0e */ iemOp_femms,
5404 /* 0x0f */ iemOp_3Dnow,
5405 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
5406 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
5407 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
5408 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
5409 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
5410 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
5411 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
5412 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
5413 /* 0x18 */ iemOp_prefetch_Grp16,
5414 /* 0x19 */ iemOp_nop_Ev,
5415 /* 0x1a */ iemOp_nop_Ev,
5416 /* 0x1b */ iemOp_nop_Ev,
5417 /* 0x1c */ iemOp_nop_Ev,
5418 /* 0x1d */ iemOp_nop_Ev,
5419 /* 0x1e */ iemOp_nop_Ev,
5420 /* 0x1f */ iemOp_nop_Ev,
5421 /* 0x20 */ iemOp_mov_Rd_Cd,
5422 /* 0x21 */ iemOp_mov_Rd_Dd,
5423 /* 0x22 */ iemOp_mov_Cd_Rd,
5424 /* 0x23 */ iemOp_mov_Dd_Rd,
5425 /* 0x24 */ iemOp_mov_Rd_Td,
5426 /* 0x25 */ iemOp_Invalid,
5427 /* 0x26 */ iemOp_mov_Td_Rd,
5428 /* 0x27 */ iemOp_Invalid,
5429 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
5430 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
5431 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
5432 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
5433 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
5434 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
5435 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
5436 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
5437 /* 0x30 */ iemOp_wrmsr,
5438 /* 0x31 */ iemOp_rdtsc,
5439 /* 0x32 */ iemOp_rdmsr,
5440 /* 0x33 */ iemOp_rdpmc,
5441 /* 0x34 */ iemOp_sysenter,
5442 /* 0x35 */ iemOp_sysexit,
5443 /* 0x36 */ iemOp_Invalid,
5444 /* 0x37 */ iemOp_getsec,
5445 /* 0x38 */ iemOp_3byte_Esc_A4,
5446 /* 0x39 */ iemOp_Invalid,
5447 /* 0x3a */ iemOp_3byte_Esc_A5,
5448 /* 0x3b */ iemOp_Invalid,
5449 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
5450 /* 0x3d */ iemOp_Invalid,
5451 /* 0x3e */ iemOp_Invalid,
5452 /* 0x3f */ iemOp_Invalid,
5453 /* 0x40 */ iemOp_cmovo_Gv_Ev,
5454 /* 0x41 */ iemOp_cmovno_Gv_Ev,
5455 /* 0x42 */ iemOp_cmovc_Gv_Ev,
5456 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
5457 /* 0x44 */ iemOp_cmove_Gv_Ev,
5458 /* 0x45 */ iemOp_cmovne_Gv_Ev,
5459 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
5460 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
5461 /* 0x48 */ iemOp_cmovs_Gv_Ev,
5462 /* 0x49 */ iemOp_cmovns_Gv_Ev,
5463 /* 0x4a */ iemOp_cmovp_Gv_Ev,
5464 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
5465 /* 0x4c */ iemOp_cmovl_Gv_Ev,
5466 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
5467 /* 0x4e */ iemOp_cmovle_Gv_Ev,
5468 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
5469 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
5470 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
5471 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
5472 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
5473 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
5474 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
5475 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
5476 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
5477 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
5478 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
5479 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
5480 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
5481 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
5482 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
5483 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
5484 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
5485 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
5486 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
5487 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
5488 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
5489 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
5490 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
5491 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
5492 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
5493 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
5494 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
5495 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
5496 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
5497 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
5498 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
5499 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
5500 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
5501 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
5502 /* 0x71 */ iemOp_Grp12,
5503 /* 0x72 */ iemOp_Grp13,
5504 /* 0x73 */ iemOp_Grp14,
5505 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
5506 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
5507 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
5508 /* 0x77 */ iemOp_emms,
5509 /* 0x78 */ iemOp_vmread_AmdGrp17,
5510 /* 0x79 */ iemOp_vmwrite,
5511 /* 0x7a */ iemOp_Invalid,
5512 /* 0x7b */ iemOp_Invalid,
5513 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
5514 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
5515 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
5516 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
5517 /* 0x80 */ iemOp_jo_Jv,
5518 /* 0x81 */ iemOp_jno_Jv,
5519 /* 0x82 */ iemOp_jc_Jv,
5520 /* 0x83 */ iemOp_jnc_Jv,
5521 /* 0x84 */ iemOp_je_Jv,
5522 /* 0x85 */ iemOp_jne_Jv,
5523 /* 0x86 */ iemOp_jbe_Jv,
5524 /* 0x87 */ iemOp_jnbe_Jv,
5525 /* 0x88 */ iemOp_js_Jv,
5526 /* 0x89 */ iemOp_jns_Jv,
5527 /* 0x8a */ iemOp_jp_Jv,
5528 /* 0x8b */ iemOp_jnp_Jv,
5529 /* 0x8c */ iemOp_jl_Jv,
5530 /* 0x8d */ iemOp_jnl_Jv,
5531 /* 0x8e */ iemOp_jle_Jv,
5532 /* 0x8f */ iemOp_jnle_Jv,
5533 /* 0x90 */ iemOp_seto_Eb,
5534 /* 0x91 */ iemOp_setno_Eb,
5535 /* 0x92 */ iemOp_setc_Eb,
5536 /* 0x93 */ iemOp_setnc_Eb,
5537 /* 0x94 */ iemOp_sete_Eb,
5538 /* 0x95 */ iemOp_setne_Eb,
5539 /* 0x96 */ iemOp_setbe_Eb,
5540 /* 0x97 */ iemOp_setnbe_Eb,
5541 /* 0x98 */ iemOp_sets_Eb,
5542 /* 0x99 */ iemOp_setns_Eb,
5543 /* 0x9a */ iemOp_setp_Eb,
5544 /* 0x9b */ iemOp_setnp_Eb,
5545 /* 0x9c */ iemOp_setl_Eb,
5546 /* 0x9d */ iemOp_setnl_Eb,
5547 /* 0x9e */ iemOp_setle_Eb,
5548 /* 0x9f */ iemOp_setnle_Eb,
5549 /* 0xa0 */ iemOp_push_fs,
5550 /* 0xa1 */ iemOp_pop_fs,
5551 /* 0xa2 */ iemOp_cpuid,
5552 /* 0xa3 */ iemOp_bt_Ev_Gv,
5553 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
5554 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
5555 /* 0xa6 */ iemOp_Invalid,
5556 /* 0xa7 */ iemOp_Invalid,
5557 /* 0xa8 */ iemOp_push_gs,
5558 /* 0xa9 */ iemOp_pop_gs,
5559 /* 0xaa */ iemOp_rsm,
5560 /* 0xab */ iemOp_bts_Ev_Gv,
5561 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
5562 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
5563 /* 0xae */ iemOp_Grp15,
5564 /* 0xaf */ iemOp_imul_Gv_Ev,
5565 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
5566 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
5567 /* 0xb2 */ iemOp_lss_Gv_Mp,
5568 /* 0xb3 */ iemOp_btr_Ev_Gv,
5569 /* 0xb4 */ iemOp_lfs_Gv_Mp,
5570 /* 0xb5 */ iemOp_lgs_Gv_Mp,
5571 /* 0xb6 */ iemOp_movzx_Gv_Eb,
5572 /* 0xb7 */ iemOp_movzx_Gv_Ew,
5573 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
5574 /* 0xb9 */ iemOp_Grp10,
5575 /* 0xba */ iemOp_Grp8,
5576 /* 0xbd */ iemOp_btc_Ev_Gv,
5577 /* 0xbc */ iemOp_bsf_Gv_Ev,
5578 /* 0xbd */ iemOp_bsr_Gv_Ev,
5579 /* 0xbe */ iemOp_movsx_Gv_Eb,
5580 /* 0xbf */ iemOp_movsx_Gv_Ew,
5581 /* 0xc0 */ iemOp_xadd_Eb_Gb,
5582 /* 0xc1 */ iemOp_xadd_Ev_Gv,
5583 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
5584 /* 0xc3 */ iemOp_movnti_My_Gy,
5585 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
5586 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
5587 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
5588 /* 0xc7 */ iemOp_Grp9,
5589 /* 0xc8 */ iemOp_bswap_rAX_r8,
5590 /* 0xc9 */ iemOp_bswap_rCX_r9,
5591 /* 0xca */ iemOp_bswap_rDX_r10,
5592 /* 0xcb */ iemOp_bswap_rBX_r11,
5593 /* 0xcc */ iemOp_bswap_rSP_r12,
5594 /* 0xcd */ iemOp_bswap_rBP_r13,
5595 /* 0xce */ iemOp_bswap_rSI_r14,
5596 /* 0xcf */ iemOp_bswap_rDI_r15,
5597 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
5598 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
5599 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
5600 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
5601 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
5602 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
5603 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
5604 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
5605 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
5606 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
5607 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
5608 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
5609 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
5610 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
5611 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
5612 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
5613 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
5614 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
5615 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
5616 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
5617 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
5618 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
5619 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
5620 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
5621 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
5622 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
5623 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
5624 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
5625 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
5626 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
5627 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
5628 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
5629 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
5630 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
5631 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
5632 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
5633 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
5634 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
5635 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
5636 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
5637 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
5638 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
5639 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
5640 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
5641 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
5642 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
5643 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
5644 /* 0xff */ iemOp_Invalid
5645};
5646
5647/** @} */
5648
5649
5650/** @name One byte opcodes.
5651 *
5652 * @{
5653 */
5654
5655/** Opcode 0x00. */
5656FNIEMOP_DEF(iemOp_add_Eb_Gb)
5657{
5658 IEMOP_MNEMONIC("add Eb,Gb");
5659 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
5660}
5661
5662
5663/** Opcode 0x01. */
5664FNIEMOP_DEF(iemOp_add_Ev_Gv)
5665{
5666 IEMOP_MNEMONIC("add Ev,Gv");
5667 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
5668}
5669
5670
5671/** Opcode 0x02. */
5672FNIEMOP_DEF(iemOp_add_Gb_Eb)
5673{
5674 IEMOP_MNEMONIC("add Gb,Eb");
5675 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
5676}
5677
5678
5679/** Opcode 0x03. */
5680FNIEMOP_DEF(iemOp_add_Gv_Ev)
5681{
5682 IEMOP_MNEMONIC("add Gv,Ev");
5683 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
5684}
5685
5686
5687/** Opcode 0x04. */
5688FNIEMOP_DEF(iemOp_add_Al_Ib)
5689{
5690 IEMOP_MNEMONIC("add al,Ib");
5691 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
5692}
5693
5694
5695/** Opcode 0x05. */
5696FNIEMOP_DEF(iemOp_add_eAX_Iz)
5697{
5698 IEMOP_MNEMONIC("add rAX,Iz");
5699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
5700}
5701
5702
5703/** Opcode 0x06. */
5704FNIEMOP_DEF(iemOp_push_ES)
5705{
5706 IEMOP_MNEMONIC("push es");
5707 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
5708}
5709
5710
5711/** Opcode 0x07. */
5712FNIEMOP_DEF(iemOp_pop_ES)
5713{
5714 IEMOP_MNEMONIC("pop es");
5715 IEMOP_HLP_NO_64BIT();
5716 IEMOP_HLP_NO_LOCK_PREFIX();
5717 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
5718}
5719
5720
5721/** Opcode 0x08. */
5722FNIEMOP_DEF(iemOp_or_Eb_Gb)
5723{
5724 IEMOP_MNEMONIC("or Eb,Gb");
5725 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5726 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
5727}
5728
5729
5730/** Opcode 0x09. */
5731FNIEMOP_DEF(iemOp_or_Ev_Gv)
5732{
5733 IEMOP_MNEMONIC("or Ev,Gv ");
5734 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
5736}
5737
5738
5739/** Opcode 0x0a. */
5740FNIEMOP_DEF(iemOp_or_Gb_Eb)
5741{
5742 IEMOP_MNEMONIC("or Gb,Eb");
5743 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5744 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
5745}
5746
5747
5748/** Opcode 0x0b. */
5749FNIEMOP_DEF(iemOp_or_Gv_Ev)
5750{
5751 IEMOP_MNEMONIC("or Gv,Ev");
5752 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5753 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
5754}
5755
5756
5757/** Opcode 0x0c. */
5758FNIEMOP_DEF(iemOp_or_Al_Ib)
5759{
5760 IEMOP_MNEMONIC("or al,Ib");
5761 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5762 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
5763}
5764
5765
5766/** Opcode 0x0d. */
5767FNIEMOP_DEF(iemOp_or_eAX_Iz)
5768{
5769 IEMOP_MNEMONIC("or rAX,Iz");
5770 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5771 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
5772}
5773
5774
5775/** Opcode 0x0e. */
5776FNIEMOP_DEF(iemOp_push_CS)
5777{
5778 IEMOP_MNEMONIC("push cs");
5779 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
5780}
5781
5782
5783/** Opcode 0x0f. */
5784FNIEMOP_DEF(iemOp_2byteEscape)
5785{
5786 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5787 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
5788}
5789
5790/** Opcode 0x10. */
5791FNIEMOP_DEF(iemOp_adc_Eb_Gb)
5792{
5793 IEMOP_MNEMONIC("adc Eb,Gb");
5794 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
5795}
5796
5797
5798/** Opcode 0x11. */
5799FNIEMOP_DEF(iemOp_adc_Ev_Gv)
5800{
5801 IEMOP_MNEMONIC("adc Ev,Gv");
5802 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
5803}
5804
5805
5806/** Opcode 0x12. */
5807FNIEMOP_DEF(iemOp_adc_Gb_Eb)
5808{
5809 IEMOP_MNEMONIC("adc Gb,Eb");
5810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
5811}
5812
5813
5814/** Opcode 0x13. */
5815FNIEMOP_DEF(iemOp_adc_Gv_Ev)
5816{
5817 IEMOP_MNEMONIC("adc Gv,Ev");
5818 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
5819}
5820
5821
5822/** Opcode 0x14. */
5823FNIEMOP_DEF(iemOp_adc_Al_Ib)
5824{
5825 IEMOP_MNEMONIC("adc al,Ib");
5826 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
5827}
5828
5829
5830/** Opcode 0x15. */
5831FNIEMOP_DEF(iemOp_adc_eAX_Iz)
5832{
5833 IEMOP_MNEMONIC("adc rAX,Iz");
5834 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
5835}
5836
5837
5838/** Opcode 0x16. */
5839FNIEMOP_DEF(iemOp_push_SS)
5840{
5841 IEMOP_MNEMONIC("push ss");
5842 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
5843}
5844
5845
5846/** Opcode 0x17. */
5847FNIEMOP_DEF(iemOp_pop_SS)
5848{
5849 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
5850 IEMOP_HLP_NO_LOCK_PREFIX();
5851 IEMOP_HLP_NO_64BIT();
5852 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
5853}
5854
5855
5856/** Opcode 0x18. */
5857FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
5858{
5859 IEMOP_MNEMONIC("sbb Eb,Gb");
5860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
5861}
5862
5863
5864/** Opcode 0x19. */
5865FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
5866{
5867 IEMOP_MNEMONIC("sbb Ev,Gv");
5868 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
5869}
5870
5871
5872/** Opcode 0x1a. */
5873FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
5874{
5875 IEMOP_MNEMONIC("sbb Gb,Eb");
5876 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
5877}
5878
5879
5880/** Opcode 0x1b. */
5881FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
5882{
5883 IEMOP_MNEMONIC("sbb Gv,Ev");
5884 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
5885}
5886
5887
5888/** Opcode 0x1c. */
5889FNIEMOP_DEF(iemOp_sbb_Al_Ib)
5890{
5891 IEMOP_MNEMONIC("sbb al,Ib");
5892 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
5893}
5894
5895
5896/** Opcode 0x1d. */
5897FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
5898{
5899 IEMOP_MNEMONIC("sbb rAX,Iz");
5900 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
5901}
5902
5903
5904/** Opcode 0x1e. */
5905FNIEMOP_DEF(iemOp_push_DS)
5906{
5907 IEMOP_MNEMONIC("push ds");
5908 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
5909}
5910
5911
5912/** Opcode 0x1f. */
5913FNIEMOP_DEF(iemOp_pop_DS)
5914{
5915 IEMOP_MNEMONIC("pop ds");
5916 IEMOP_HLP_NO_LOCK_PREFIX();
5917 IEMOP_HLP_NO_64BIT();
5918 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
5919}
5920
5921
5922/** Opcode 0x20. */
5923FNIEMOP_DEF(iemOp_and_Eb_Gb)
5924{
5925 IEMOP_MNEMONIC("and Eb,Gb");
5926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5927 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
5928}
5929
5930
5931/** Opcode 0x21. */
5932FNIEMOP_DEF(iemOp_and_Ev_Gv)
5933{
5934 IEMOP_MNEMONIC("and Ev,Gv");
5935 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5936 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
5937}
5938
5939
5940/** Opcode 0x22. */
5941FNIEMOP_DEF(iemOp_and_Gb_Eb)
5942{
5943 IEMOP_MNEMONIC("and Gb,Eb");
5944 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5945 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
5946}
5947
5948
5949/** Opcode 0x23. */
5950FNIEMOP_DEF(iemOp_and_Gv_Ev)
5951{
5952 IEMOP_MNEMONIC("and Gv,Ev");
5953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5954 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
5955}
5956
5957
5958/** Opcode 0x24. */
5959FNIEMOP_DEF(iemOp_and_Al_Ib)
5960{
5961 IEMOP_MNEMONIC("and al,Ib");
5962 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5963 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
5964}
5965
5966
5967/** Opcode 0x25. */
5968FNIEMOP_DEF(iemOp_and_eAX_Iz)
5969{
5970 IEMOP_MNEMONIC("and rAX,Iz");
5971 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5972 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
5973}
5974
5975
5976/** Opcode 0x26. */
5977FNIEMOP_DEF(iemOp_seg_ES)
5978{
5979 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
5980 pIemCpu->iEffSeg = X86_SREG_ES;
5981
5982 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5983 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5984}
5985
5986
5987/** Opcode 0x27. */
5988FNIEMOP_STUB(iemOp_daa);
5989
5990
5991/** Opcode 0x28. */
5992FNIEMOP_DEF(iemOp_sub_Eb_Gb)
5993{
5994 IEMOP_MNEMONIC("sub Eb,Gb");
5995 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
5996}
5997
5998
5999/** Opcode 0x29. */
6000FNIEMOP_DEF(iemOp_sub_Ev_Gv)
6001{
6002 IEMOP_MNEMONIC("sub Ev,Gv");
6003 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
6004}
6005
6006
6007/** Opcode 0x2a. */
6008FNIEMOP_DEF(iemOp_sub_Gb_Eb)
6009{
6010 IEMOP_MNEMONIC("sub Gb,Eb");
6011 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
6012}
6013
6014
6015/** Opcode 0x2b. */
6016FNIEMOP_DEF(iemOp_sub_Gv_Ev)
6017{
6018 IEMOP_MNEMONIC("sub Gv,Ev");
6019 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
6020}
6021
6022
6023/** Opcode 0x2c. */
6024FNIEMOP_DEF(iemOp_sub_Al_Ib)
6025{
6026 IEMOP_MNEMONIC("sub al,Ib");
6027 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
6028}
6029
6030
6031/** Opcode 0x2d. */
6032FNIEMOP_DEF(iemOp_sub_eAX_Iz)
6033{
6034 IEMOP_MNEMONIC("sub rAX,Iz");
6035 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
6036}
6037
6038
6039/** Opcode 0x2e. */
6040FNIEMOP_DEF(iemOp_seg_CS)
6041{
6042 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
6043 pIemCpu->iEffSeg = X86_SREG_CS;
6044
6045 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6046 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6047}
6048
6049
6050/** Opcode 0x2f. */
6051FNIEMOP_STUB(iemOp_das);
6052
6053
6054/** Opcode 0x30. */
6055FNIEMOP_DEF(iemOp_xor_Eb_Gb)
6056{
6057 IEMOP_MNEMONIC("xor Eb,Gb");
6058 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6059 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
6060}
6061
6062
6063/** Opcode 0x31. */
6064FNIEMOP_DEF(iemOp_xor_Ev_Gv)
6065{
6066 IEMOP_MNEMONIC("xor Ev,Gv");
6067 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6068 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
6069}
6070
6071
6072/** Opcode 0x32. */
6073FNIEMOP_DEF(iemOp_xor_Gb_Eb)
6074{
6075 IEMOP_MNEMONIC("xor Gb,Eb");
6076 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6077 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
6078}
6079
6080
6081/** Opcode 0x33. */
6082FNIEMOP_DEF(iemOp_xor_Gv_Ev)
6083{
6084 IEMOP_MNEMONIC("xor Gv,Ev");
6085 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6086 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
6087}
6088
6089
6090/** Opcode 0x34. */
6091FNIEMOP_DEF(iemOp_xor_Al_Ib)
6092{
6093 IEMOP_MNEMONIC("xor al,Ib");
6094 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6095 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
6096}
6097
6098
6099/** Opcode 0x35. */
6100FNIEMOP_DEF(iemOp_xor_eAX_Iz)
6101{
6102 IEMOP_MNEMONIC("xor rAX,Iz");
6103 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6104 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
6105}
6106
6107
6108/** Opcode 0x36. */
6109FNIEMOP_DEF(iemOp_seg_SS)
6110{
6111 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
6112 pIemCpu->iEffSeg = X86_SREG_SS;
6113
6114 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6115 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6116}
6117
6118
6119/** Opcode 0x37. */
6120FNIEMOP_STUB(iemOp_aaa);
6121
6122
6123/** Opcode 0x38. */
6124FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
6125{
6126 IEMOP_MNEMONIC("cmp Eb,Gb");
6127 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
6128 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
6129}
6130
6131
6132/** Opcode 0x39. */
6133FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
6134{
6135 IEMOP_MNEMONIC("cmp Ev,Gv");
6136 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
6137 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
6138}
6139
6140
6141/** Opcode 0x3a. */
6142FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
6143{
6144 IEMOP_MNEMONIC("cmp Gb,Eb");
6145 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
6146}
6147
6148
6149/** Opcode 0x3b. */
6150FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
6151{
6152 IEMOP_MNEMONIC("cmp Gv,Ev");
6153 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
6154}
6155
6156
6157/** Opcode 0x3c. */
6158FNIEMOP_DEF(iemOp_cmp_Al_Ib)
6159{
6160 IEMOP_MNEMONIC("cmp al,Ib");
6161 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
6162}
6163
6164
6165/** Opcode 0x3d. */
6166FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
6167{
6168 IEMOP_MNEMONIC("cmp rAX,Iz");
6169 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
6170}
6171
6172
6173/** Opcode 0x3e. */
6174FNIEMOP_DEF(iemOp_seg_DS)
6175{
6176 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
6177 pIemCpu->iEffSeg = X86_SREG_DS;
6178
6179 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6180 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6181}
6182
6183
6184/** Opcode 0x3f. */
6185FNIEMOP_STUB(iemOp_aas);
6186
6187/**
6188 * Common 'inc/dec/not/neg register' helper.
6189 */
6190FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
6191{
6192 IEMOP_HLP_NO_LOCK_PREFIX();
6193 switch (pIemCpu->enmEffOpSize)
6194 {
6195 case IEMMODE_16BIT:
6196 IEM_MC_BEGIN(2, 0);
6197 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6198 IEM_MC_ARG(uint32_t *, pEFlags, 1);
6199 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
6200 IEM_MC_REF_EFLAGS(pEFlags);
6201 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
6202 IEM_MC_ADVANCE_RIP();
6203 IEM_MC_END();
6204 return VINF_SUCCESS;
6205
6206 case IEMMODE_32BIT:
6207 IEM_MC_BEGIN(2, 0);
6208 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6209 IEM_MC_ARG(uint32_t *, pEFlags, 1);
6210 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6211 IEM_MC_REF_EFLAGS(pEFlags);
6212 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
6213 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 return VINF_SUCCESS;
6217
6218 case IEMMODE_64BIT:
6219 IEM_MC_BEGIN(2, 0);
6220 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6221 IEM_MC_ARG(uint32_t *, pEFlags, 1);
6222 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6223 IEM_MC_REF_EFLAGS(pEFlags);
6224 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
6225 IEM_MC_ADVANCE_RIP();
6226 IEM_MC_END();
6227 return VINF_SUCCESS;
6228 }
6229 return VINF_SUCCESS;
6230}
6231
6232
6233/** Opcode 0x40. */
6234FNIEMOP_DEF(iemOp_inc_eAX)
6235{
6236 /*
6237 * This is a REX prefix in 64-bit mode.
6238 */
6239 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6240 {
6241 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
6242
6243 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6244 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6245 }
6246
6247 IEMOP_MNEMONIC("inc eAX");
6248 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
6249}
6250
6251
6252/** Opcode 0x41. */
6253FNIEMOP_DEF(iemOp_inc_eCX)
6254{
6255 /*
6256 * This is a REX prefix in 64-bit mode.
6257 */
6258 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6259 {
6260 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
6261 pIemCpu->uRexB = 1 << 3;
6262
6263 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6264 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6265 }
6266
6267 IEMOP_MNEMONIC("inc eCX");
6268 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
6269}
6270
6271
6272/** Opcode 0x42. */
6273FNIEMOP_DEF(iemOp_inc_eDX)
6274{
6275 /*
6276 * This is a REX prefix in 64-bit mode.
6277 */
6278 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6279 {
6280 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
6281 pIemCpu->uRexIndex = 1 << 3;
6282
6283 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6284 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6285 }
6286
6287 IEMOP_MNEMONIC("inc eDX");
6288 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
6289}
6290
6291
6292
6293/** Opcode 0x43. */
6294FNIEMOP_DEF(iemOp_inc_eBX)
6295{
6296 /*
6297 * This is a REX prefix in 64-bit mode.
6298 */
6299 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6300 {
6301 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
6302 pIemCpu->uRexB = 1 << 3;
6303 pIemCpu->uRexIndex = 1 << 3;
6304
6305 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6306 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6307 }
6308
6309 IEMOP_MNEMONIC("inc eBX");
6310 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
6311}
6312
6313
6314/** Opcode 0x44. */
6315FNIEMOP_DEF(iemOp_inc_eSP)
6316{
6317 /*
6318 * This is a REX prefix in 64-bit mode.
6319 */
6320 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6321 {
6322 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
6323 pIemCpu->uRexReg = 1 << 3;
6324
6325 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6326 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6327 }
6328
6329 IEMOP_MNEMONIC("inc eSP");
6330 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
6331}
6332
6333
6334/** Opcode 0x45. */
6335FNIEMOP_DEF(iemOp_inc_eBP)
6336{
6337 /*
6338 * This is a REX prefix in 64-bit mode.
6339 */
6340 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6341 {
6342 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
6343 pIemCpu->uRexReg = 1 << 3;
6344 pIemCpu->uRexB = 1 << 3;
6345
6346 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6347 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6348 }
6349
6350 IEMOP_MNEMONIC("inc eBP");
6351 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
6352}
6353
6354
6355/** Opcode 0x46. */
6356FNIEMOP_DEF(iemOp_inc_eSI)
6357{
6358 /*
6359 * This is a REX prefix in 64-bit mode.
6360 */
6361 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6362 {
6363 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
6364 pIemCpu->uRexReg = 1 << 3;
6365 pIemCpu->uRexIndex = 1 << 3;
6366
6367 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6368 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6369 }
6370
6371 IEMOP_MNEMONIC("inc eSI");
6372 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
6373}
6374
6375
6376/** Opcode 0x47. */
6377FNIEMOP_DEF(iemOp_inc_eDI)
6378{
6379 /*
6380 * This is a REX prefix in 64-bit mode.
6381 */
6382 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6383 {
6384 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
6385 pIemCpu->uRexReg = 1 << 3;
6386 pIemCpu->uRexB = 1 << 3;
6387 pIemCpu->uRexIndex = 1 << 3;
6388
6389 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6390 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6391 }
6392
6393 IEMOP_MNEMONIC("inc eDI");
6394 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
6395}
6396
6397
6398/** Opcode 0x48. */
6399FNIEMOP_DEF(iemOp_dec_eAX)
6400{
6401 /*
6402 * This is a REX prefix in 64-bit mode.
6403 */
6404 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6405 {
6406 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
6407 iemRecalEffOpSize(pIemCpu);
6408
6409 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6410 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6411 }
6412
6413 IEMOP_MNEMONIC("dec eAX");
6414 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
6415}
6416
6417
6418/** Opcode 0x49. */
6419FNIEMOP_DEF(iemOp_dec_eCX)
6420{
6421 /*
6422 * This is a REX prefix in 64-bit mode.
6423 */
6424 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6425 {
6426 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
6427 pIemCpu->uRexB = 1 << 3;
6428 iemRecalEffOpSize(pIemCpu);
6429
6430 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6431 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6432 }
6433
6434 IEMOP_MNEMONIC("dec eCX");
6435 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
6436}
6437
6438
6439/** Opcode 0x4a. */
6440FNIEMOP_DEF(iemOp_dec_eDX)
6441{
6442 /*
6443 * This is a REX prefix in 64-bit mode.
6444 */
6445 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6446 {
6447 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6448 pIemCpu->uRexIndex = 1 << 3;
6449 iemRecalEffOpSize(pIemCpu);
6450
6451 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6452 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6453 }
6454
6455 IEMOP_MNEMONIC("dec eDX");
6456 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
6457}
6458
6459
6460/** Opcode 0x4b. */
6461FNIEMOP_DEF(iemOp_dec_eBX)
6462{
6463 /*
6464 * This is a REX prefix in 64-bit mode.
6465 */
6466 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6467 {
6468 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6469 pIemCpu->uRexB = 1 << 3;
6470 pIemCpu->uRexIndex = 1 << 3;
6471 iemRecalEffOpSize(pIemCpu);
6472
6473 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6474 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6475 }
6476
6477 IEMOP_MNEMONIC("dec eBX");
6478 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
6479}
6480
6481
6482/** Opcode 0x4c. */
6483FNIEMOP_DEF(iemOp_dec_eSP)
6484{
6485 /*
6486 * This is a REX prefix in 64-bit mode.
6487 */
6488 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6489 {
6490 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
6491 pIemCpu->uRexReg = 1 << 3;
6492 iemRecalEffOpSize(pIemCpu);
6493
6494 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6495 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6496 }
6497
6498 IEMOP_MNEMONIC("dec eSP");
6499 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
6500}
6501
6502
6503/** Opcode 0x4d. */
6504FNIEMOP_DEF(iemOp_dec_eBP)
6505{
6506 /*
6507 * This is a REX prefix in 64-bit mode.
6508 */
6509 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6510 {
6511 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
6512 pIemCpu->uRexReg = 1 << 3;
6513 pIemCpu->uRexB = 1 << 3;
6514 iemRecalEffOpSize(pIemCpu);
6515
6516 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6517 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6518 }
6519
6520 IEMOP_MNEMONIC("dec eBP");
6521 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
6522}
6523
6524
6525/** Opcode 0x4e. */
6526FNIEMOP_DEF(iemOp_dec_eSI)
6527{
6528 /*
6529 * This is a REX prefix in 64-bit mode.
6530 */
6531 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6532 {
6533 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6534 pIemCpu->uRexReg = 1 << 3;
6535 pIemCpu->uRexIndex = 1 << 3;
6536 iemRecalEffOpSize(pIemCpu);
6537
6538 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6539 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6540 }
6541
6542 IEMOP_MNEMONIC("dec eSI");
6543 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
6544}
6545
6546
6547/** Opcode 0x4f. */
6548FNIEMOP_DEF(iemOp_dec_eDI)
6549{
6550 /*
6551 * This is a REX prefix in 64-bit mode.
6552 */
6553 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6554 {
6555 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6556 pIemCpu->uRexReg = 1 << 3;
6557 pIemCpu->uRexB = 1 << 3;
6558 pIemCpu->uRexIndex = 1 << 3;
6559 iemRecalEffOpSize(pIemCpu);
6560
6561 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6562 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6563 }
6564
6565 IEMOP_MNEMONIC("dec eDI");
6566 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
6567}
6568
6569
6570/**
6571 * Common 'push register' helper.
6572 */
6573FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
6574{
6575 IEMOP_HLP_NO_LOCK_PREFIX();
6576 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6577 {
6578 iReg |= pIemCpu->uRexB;
6579 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
6580 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
6581 }
6582
6583 switch (pIemCpu->enmEffOpSize)
6584 {
6585 case IEMMODE_16BIT:
6586 IEM_MC_BEGIN(0, 1);
6587 IEM_MC_LOCAL(uint16_t, u16Value);
6588 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
6589 IEM_MC_PUSH_U16(u16Value);
6590 IEM_MC_ADVANCE_RIP();
6591 IEM_MC_END();
6592 break;
6593
6594 case IEMMODE_32BIT:
6595 IEM_MC_BEGIN(0, 1);
6596 IEM_MC_LOCAL(uint32_t, u32Value);
6597 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
6598 IEM_MC_PUSH_U32(u32Value);
6599 IEM_MC_ADVANCE_RIP();
6600 IEM_MC_END();
6601 break;
6602
6603 case IEMMODE_64BIT:
6604 IEM_MC_BEGIN(0, 1);
6605 IEM_MC_LOCAL(uint64_t, u64Value);
6606 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
6607 IEM_MC_PUSH_U64(u64Value);
6608 IEM_MC_ADVANCE_RIP();
6609 IEM_MC_END();
6610 break;
6611 }
6612
6613 return VINF_SUCCESS;
6614}
6615
6616
6617/** Opcode 0x50. */
6618FNIEMOP_DEF(iemOp_push_eAX)
6619{
6620 IEMOP_MNEMONIC("push rAX");
6621 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
6622}
6623
6624
6625/** Opcode 0x51. */
6626FNIEMOP_DEF(iemOp_push_eCX)
6627{
6628 IEMOP_MNEMONIC("push rCX");
6629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
6630}
6631
6632
6633/** Opcode 0x52. */
6634FNIEMOP_DEF(iemOp_push_eDX)
6635{
6636 IEMOP_MNEMONIC("push rDX");
6637 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
6638}
6639
6640
6641/** Opcode 0x53. */
6642FNIEMOP_DEF(iemOp_push_eBX)
6643{
6644 IEMOP_MNEMONIC("push rBX");
6645 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
6646}
6647
6648
6649/** Opcode 0x54. */
6650FNIEMOP_DEF(iemOp_push_eSP)
6651{
6652 IEMOP_MNEMONIC("push rSP");
6653 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
6654}
6655
6656
6657/** Opcode 0x55. */
6658FNIEMOP_DEF(iemOp_push_eBP)
6659{
6660 IEMOP_MNEMONIC("push rBP");
6661 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
6662}
6663
6664
6665/** Opcode 0x56. */
6666FNIEMOP_DEF(iemOp_push_eSI)
6667{
6668 IEMOP_MNEMONIC("push rSI");
6669 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
6670}
6671
6672
6673/** Opcode 0x57. */
6674FNIEMOP_DEF(iemOp_push_eDI)
6675{
6676 IEMOP_MNEMONIC("push rDI");
6677 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
6678}
6679
6680
6681/**
6682 * Common 'pop register' helper.
6683 */
6684FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
6685{
6686 IEMOP_HLP_NO_LOCK_PREFIX();
6687 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6688 {
6689 iReg |= pIemCpu->uRexB;
6690 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
6691 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
6692 }
6693
6694/** @todo How does this code handle iReg==X86_GREG_xSP. How does a real CPU
6695 * handle it, for that matter (Intel pseudo code hints that the popped
6696 * value is incremented by the stack item size.) Test it, both encodings
6697 * and all three register sizes. */
6698 switch (pIemCpu->enmEffOpSize)
6699 {
6700 case IEMMODE_16BIT:
6701 IEM_MC_BEGIN(0, 1);
6702 IEM_MC_LOCAL(uint16_t, *pu16Dst);
6703 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
6704 IEM_MC_POP_U16(pu16Dst);
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 break;
6708
6709 case IEMMODE_32BIT:
6710 IEM_MC_BEGIN(0, 1);
6711 IEM_MC_LOCAL(uint32_t, *pu32Dst);
6712 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6713 IEM_MC_POP_U32(pu32Dst);
6714 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
6715 IEM_MC_ADVANCE_RIP();
6716 IEM_MC_END();
6717 break;
6718
6719 case IEMMODE_64BIT:
6720 IEM_MC_BEGIN(0, 1);
6721 IEM_MC_LOCAL(uint64_t, *pu64Dst);
6722 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6723 IEM_MC_POP_U64(pu64Dst);
6724 IEM_MC_ADVANCE_RIP();
6725 IEM_MC_END();
6726 break;
6727 }
6728
6729 return VINF_SUCCESS;
6730}
6731
6732
6733/** Opcode 0x58. */
6734FNIEMOP_DEF(iemOp_pop_eAX)
6735{
6736 IEMOP_MNEMONIC("pop rAX");
6737 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
6738}
6739
6740
6741/** Opcode 0x59. */
6742FNIEMOP_DEF(iemOp_pop_eCX)
6743{
6744 IEMOP_MNEMONIC("pop rCX");
6745 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
6746}
6747
6748
6749/** Opcode 0x5a. */
6750FNIEMOP_DEF(iemOp_pop_eDX)
6751{
6752 IEMOP_MNEMONIC("pop rDX");
6753 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
6754}
6755
6756
6757/** Opcode 0x5b. */
6758FNIEMOP_DEF(iemOp_pop_eBX)
6759{
6760 IEMOP_MNEMONIC("pop rBX");
6761 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
6762}
6763
6764
6765/** Opcode 0x5c. */
6766FNIEMOP_DEF(iemOp_pop_eSP)
6767{
6768 IEMOP_MNEMONIC("pop rSP");
6769 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
6770}
6771
6772
6773/** Opcode 0x5d. */
6774FNIEMOP_DEF(iemOp_pop_eBP)
6775{
6776 IEMOP_MNEMONIC("pop rBP");
6777 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
6778}
6779
6780
6781/** Opcode 0x5e. */
6782FNIEMOP_DEF(iemOp_pop_eSI)
6783{
6784 IEMOP_MNEMONIC("pop rSI");
6785 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
6786}
6787
6788
6789/** Opcode 0x5f. */
6790FNIEMOP_DEF(iemOp_pop_eDI)
6791{
6792 IEMOP_MNEMONIC("pop rDI");
6793 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
6794}
6795
6796
6797/** Opcode 0x60. */
6798FNIEMOP_DEF(iemOp_pusha)
6799{
6800 IEMOP_MNEMONIC("pusha");
6801 IEMOP_HLP_NO_64BIT();
6802 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
6803 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
6804 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
6805 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
6806}
6807
6808
6809/** Opcode 0x61. */
6810FNIEMOP_DEF(iemOp_popa)
6811{
6812 IEMOP_MNEMONIC("popa");
6813 IEMOP_HLP_NO_64BIT();
6814 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
6815 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
6816 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
6817 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
6818}
6819
6820
6821/** Opcode 0x62. */
6822FNIEMOP_STUB(iemOp_bound_Gv_Ma);
6823
6824/** Opcode 0x63 - non-64-bit modes. */
6825FNIEMOP_STUB(iemOp_arpl_Ew_Gw);
6826
6827
6828/** Opcode 0x63.
6829 * @note This is a weird one. It works like a regular move instruction if
6830 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
6831 * @todo This definitely needs a testcase to verify the odd cases. */
6832FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
6833{
6834 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
6835
6836 IEMOP_MNEMONIC("movsxd Gv,Ev");
6837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6838
6839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6840 {
6841 /*
6842 * Register to register.
6843 */
6844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6845 IEM_MC_BEGIN(0, 1);
6846 IEM_MC_LOCAL(uint64_t, u64Value);
6847 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6848 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6849 IEM_MC_ADVANCE_RIP();
6850 IEM_MC_END();
6851 }
6852 else
6853 {
6854 /*
6855 * We're loading a register from memory.
6856 */
6857 IEM_MC_BEGIN(0, 2);
6858 IEM_MC_LOCAL(uint64_t, u64Value);
6859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6862 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6863 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6864 IEM_MC_ADVANCE_RIP();
6865 IEM_MC_END();
6866 }
6867 return VINF_SUCCESS;
6868}
6869
6870
6871/** Opcode 0x64. */
6872FNIEMOP_DEF(iemOp_seg_FS)
6873{
6874 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
6875 pIemCpu->iEffSeg = X86_SREG_FS;
6876
6877 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6878 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6879}
6880
6881
6882/** Opcode 0x65. */
6883FNIEMOP_DEF(iemOp_seg_GS)
6884{
6885 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
6886 pIemCpu->iEffSeg = X86_SREG_GS;
6887
6888 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6889 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6890}
6891
6892
6893/** Opcode 0x66. */
6894FNIEMOP_DEF(iemOp_op_size)
6895{
6896 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
6897 iemRecalEffOpSize(pIemCpu);
6898
6899 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6900 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6901}
6902
6903
6904/** Opcode 0x67. */
6905FNIEMOP_DEF(iemOp_addr_size)
6906{
6907 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
6908 switch (pIemCpu->enmDefAddrMode)
6909 {
6910 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
6911 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
6912 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
6913 default: AssertFailed();
6914 }
6915
6916 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6917 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6918}
6919
6920
6921/** Opcode 0x68. */
6922FNIEMOP_DEF(iemOp_push_Iz)
6923{
6924 IEMOP_MNEMONIC("push Iz");
6925 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6926 switch (pIemCpu->enmEffOpSize)
6927 {
6928 case IEMMODE_16BIT:
6929 {
6930 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6931 IEMOP_HLP_NO_LOCK_PREFIX();
6932 IEM_MC_BEGIN(0,0);
6933 IEM_MC_PUSH_U16(u16Imm);
6934 IEM_MC_ADVANCE_RIP();
6935 IEM_MC_END();
6936 return VINF_SUCCESS;
6937 }
6938
6939 case IEMMODE_32BIT:
6940 {
6941 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6942 IEMOP_HLP_NO_LOCK_PREFIX();
6943 IEM_MC_BEGIN(0,0);
6944 IEM_MC_PUSH_U32(u32Imm);
6945 IEM_MC_ADVANCE_RIP();
6946 IEM_MC_END();
6947 return VINF_SUCCESS;
6948 }
6949
6950 case IEMMODE_64BIT:
6951 {
6952 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6953 IEMOP_HLP_NO_LOCK_PREFIX();
6954 IEM_MC_BEGIN(0,0);
6955 IEM_MC_PUSH_U64(u64Imm);
6956 IEM_MC_ADVANCE_RIP();
6957 IEM_MC_END();
6958 return VINF_SUCCESS;
6959 }
6960
6961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6962 }
6963}
6964
6965
6966/** Opcode 0x69. */
6967FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
6968{
6969 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
6970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6971 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6972
6973 switch (pIemCpu->enmEffOpSize)
6974 {
6975 case IEMMODE_16BIT:
6976 {
6977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6978 {
6979 /* register operand */
6980 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6982
6983 IEM_MC_BEGIN(3, 1);
6984 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6985 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
6986 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6987 IEM_MC_LOCAL(uint16_t, u16Tmp);
6988
6989 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6990 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6991 IEM_MC_REF_EFLAGS(pEFlags);
6992 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6993 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6994
6995 IEM_MC_ADVANCE_RIP();
6996 IEM_MC_END();
6997 }
6998 else
6999 {
7000 /* memory operand */
7001 IEM_MC_BEGIN(3, 2);
7002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7003 IEM_MC_ARG(uint16_t, u16Src, 1);
7004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7005 IEM_MC_LOCAL(uint16_t, u16Tmp);
7006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7007
7008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7009 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7010 IEM_MC_ASSIGN(u16Src, u16Imm);
7011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7012 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7013 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
7014 IEM_MC_REF_EFLAGS(pEFlags);
7015 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
7016 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
7017
7018 IEM_MC_ADVANCE_RIP();
7019 IEM_MC_END();
7020 }
7021 return VINF_SUCCESS;
7022 }
7023
7024 case IEMMODE_32BIT:
7025 {
7026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7027 {
7028 /* register operand */
7029 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7031
7032 IEM_MC_BEGIN(3, 1);
7033 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7034 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
7035 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7036 IEM_MC_LOCAL(uint32_t, u32Tmp);
7037
7038 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7039 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7040 IEM_MC_REF_EFLAGS(pEFlags);
7041 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7042 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7043
7044 IEM_MC_ADVANCE_RIP();
7045 IEM_MC_END();
7046 }
7047 else
7048 {
7049 /* memory operand */
7050 IEM_MC_BEGIN(3, 2);
7051 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7052 IEM_MC_ARG(uint32_t, u32Src, 1);
7053 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7054 IEM_MC_LOCAL(uint32_t, u32Tmp);
7055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7056
7057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7058 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7059 IEM_MC_ASSIGN(u32Src, u32Imm);
7060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7061 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7062 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7063 IEM_MC_REF_EFLAGS(pEFlags);
7064 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7065 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7066
7067 IEM_MC_ADVANCE_RIP();
7068 IEM_MC_END();
7069 }
7070 return VINF_SUCCESS;
7071 }
7072
7073 case IEMMODE_64BIT:
7074 {
7075 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7076 {
7077 /* register operand */
7078 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7080
7081 IEM_MC_BEGIN(3, 1);
7082 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7083 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
7084 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7085 IEM_MC_LOCAL(uint64_t, u64Tmp);
7086
7087 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7088 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7089 IEM_MC_REF_EFLAGS(pEFlags);
7090 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7091 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7092
7093 IEM_MC_ADVANCE_RIP();
7094 IEM_MC_END();
7095 }
7096 else
7097 {
7098 /* memory operand */
7099 IEM_MC_BEGIN(3, 2);
7100 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7101 IEM_MC_ARG(uint64_t, u64Src, 1);
7102 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7103 IEM_MC_LOCAL(uint64_t, u64Tmp);
7104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7105
7106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7107 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7108 IEM_MC_ASSIGN(u64Src, u64Imm);
7109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7110 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7111 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7112 IEM_MC_REF_EFLAGS(pEFlags);
7113 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7114 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7115
7116 IEM_MC_ADVANCE_RIP();
7117 IEM_MC_END();
7118 }
7119 return VINF_SUCCESS;
7120 }
7121 }
7122 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
7123}
7124
7125
7126/** Opcode 0x6a. */
7127FNIEMOP_DEF(iemOp_push_Ib)
7128{
7129 IEMOP_MNEMONIC("push Ib");
7130 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7131 IEMOP_HLP_NO_LOCK_PREFIX();
7132 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7133
7134 IEM_MC_BEGIN(0,0);
7135 switch (pIemCpu->enmEffOpSize)
7136 {
7137 case IEMMODE_16BIT:
7138 IEM_MC_PUSH_U16(i8Imm);
7139 break;
7140 case IEMMODE_32BIT:
7141 IEM_MC_PUSH_U32(i8Imm);
7142 break;
7143 case IEMMODE_64BIT:
7144 IEM_MC_PUSH_U64(i8Imm);
7145 break;
7146 }
7147 IEM_MC_ADVANCE_RIP();
7148 IEM_MC_END();
7149 return VINF_SUCCESS;
7150}
7151
7152
7153/** Opcode 0x6b. */
7154FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
7155{
7156 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
7157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7159
7160 switch (pIemCpu->enmEffOpSize)
7161 {
7162 case IEMMODE_16BIT:
7163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7164 {
7165 /* register operand */
7166 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7168
7169 IEM_MC_BEGIN(3, 1);
7170 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7171 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
7172 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7173 IEM_MC_LOCAL(uint16_t, u16Tmp);
7174
7175 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7176 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
7177 IEM_MC_REF_EFLAGS(pEFlags);
7178 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
7179 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
7180
7181 IEM_MC_ADVANCE_RIP();
7182 IEM_MC_END();
7183 }
7184 else
7185 {
7186 /* memory operand */
7187 IEM_MC_BEGIN(3, 2);
7188 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7189 IEM_MC_ARG(uint16_t, u16Src, 1);
7190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7191 IEM_MC_LOCAL(uint16_t, u16Tmp);
7192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7193
7194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7195 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
7196 IEM_MC_ASSIGN(u16Src, u16Imm);
7197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7198 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7199 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
7200 IEM_MC_REF_EFLAGS(pEFlags);
7201 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
7202 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
7203
7204 IEM_MC_ADVANCE_RIP();
7205 IEM_MC_END();
7206 }
7207 return VINF_SUCCESS;
7208
7209 case IEMMODE_32BIT:
7210 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7211 {
7212 /* register operand */
7213 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7215
7216 IEM_MC_BEGIN(3, 1);
7217 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7218 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
7219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7220 IEM_MC_LOCAL(uint32_t, u32Tmp);
7221
7222 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7223 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7224 IEM_MC_REF_EFLAGS(pEFlags);
7225 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7226 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7227
7228 IEM_MC_ADVANCE_RIP();
7229 IEM_MC_END();
7230 }
7231 else
7232 {
7233 /* memory operand */
7234 IEM_MC_BEGIN(3, 2);
7235 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7236 IEM_MC_ARG(uint32_t, u32Src, 1);
7237 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7238 IEM_MC_LOCAL(uint32_t, u32Tmp);
7239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7240
7241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7242 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
7243 IEM_MC_ASSIGN(u32Src, u32Imm);
7244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7245 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7246 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7247 IEM_MC_REF_EFLAGS(pEFlags);
7248 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7249 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7250
7251 IEM_MC_ADVANCE_RIP();
7252 IEM_MC_END();
7253 }
7254 return VINF_SUCCESS;
7255
7256 case IEMMODE_64BIT:
7257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7258 {
7259 /* register operand */
7260 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7262
7263 IEM_MC_BEGIN(3, 1);
7264 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7265 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
7266 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7267 IEM_MC_LOCAL(uint64_t, u64Tmp);
7268
7269 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7270 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7271 IEM_MC_REF_EFLAGS(pEFlags);
7272 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7273 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7274
7275 IEM_MC_ADVANCE_RIP();
7276 IEM_MC_END();
7277 }
7278 else
7279 {
7280 /* memory operand */
7281 IEM_MC_BEGIN(3, 2);
7282 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7283 IEM_MC_ARG(uint64_t, u64Src, 1);
7284 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7285 IEM_MC_LOCAL(uint64_t, u64Tmp);
7286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7287
7288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7289 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
7290 IEM_MC_ASSIGN(u64Src, u64Imm);
7291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7292 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7293 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7294 IEM_MC_REF_EFLAGS(pEFlags);
7295 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7296 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7297
7298 IEM_MC_ADVANCE_RIP();
7299 IEM_MC_END();
7300 }
7301 return VINF_SUCCESS;
7302 }
7303 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
7304}
7305
7306
7307/** Opcode 0x6c. */
7308FNIEMOP_DEF(iemOp_insb_Yb_DX)
7309{
7310 IEMOP_HLP_NO_LOCK_PREFIX();
7311 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7312 {
7313 IEMOP_MNEMONIC("rep ins Yb,DX");
7314 switch (pIemCpu->enmEffAddrMode)
7315 {
7316 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr16);
7317 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr32);
7318 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr64);
7319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7320 }
7321 }
7322 else
7323 {
7324 IEMOP_MNEMONIC("ins Yb,DX");
7325 switch (pIemCpu->enmEffAddrMode)
7326 {
7327 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr16);
7328 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr32);
7329 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr64);
7330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7331 }
7332 }
7333}
7334
7335
7336/** Opcode 0x6d. */
7337FNIEMOP_DEF(iemOp_inswd_Yv_DX)
7338{
7339 IEMOP_HLP_NO_LOCK_PREFIX();
7340 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
7341 {
7342 IEMOP_MNEMONIC("rep ins Yv,DX");
7343 switch (pIemCpu->enmEffOpSize)
7344 {
7345 case IEMMODE_16BIT:
7346 switch (pIemCpu->enmEffAddrMode)
7347 {
7348 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr16);
7349 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr32);
7350 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr64);
7351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7352 }
7353 break;
7354 case IEMMODE_64BIT:
7355 case IEMMODE_32BIT:
7356 switch (pIemCpu->enmEffAddrMode)
7357 {
7358 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr16);
7359 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr32);
7360 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr64);
7361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7362 }
7363 break;
7364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7365 }
7366 }
7367 else
7368 {
7369 IEMOP_MNEMONIC("ins Yv,DX");
7370 switch (pIemCpu->enmEffOpSize)
7371 {
7372 case IEMMODE_16BIT:
7373 switch (pIemCpu->enmEffAddrMode)
7374 {
7375 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr16);
7376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr32);
7377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr64);
7378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7379 }
7380 break;
7381 case IEMMODE_64BIT:
7382 case IEMMODE_32BIT:
7383 switch (pIemCpu->enmEffAddrMode)
7384 {
7385 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr16);
7386 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr32);
7387 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr64);
7388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7389 }
7390 break;
7391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7392 }
7393 }
7394}
7395
7396
7397/** Opcode 0x6e. */
7398FNIEMOP_DEF(iemOp_outsb_Yb_DX)
7399{
7400 IEMOP_HLP_NO_LOCK_PREFIX();
7401 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7402 {
7403 IEMOP_MNEMONIC("rep out DX,Yb");
7404 switch (pIemCpu->enmEffAddrMode)
7405 {
7406 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg);
7407 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg);
7408 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg);
7409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7410 }
7411 }
7412 else
7413 {
7414 IEMOP_MNEMONIC("out DX,Yb");
7415 switch (pIemCpu->enmEffAddrMode)
7416 {
7417 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg);
7418 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg);
7419 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg);
7420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7421 }
7422 }
7423}
7424
7425
7426/** Opcode 0x6f. */
7427FNIEMOP_DEF(iemOp_outswd_Yv_DX)
7428{
7429 IEMOP_HLP_NO_LOCK_PREFIX();
7430 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
7431 {
7432 IEMOP_MNEMONIC("rep outs DX,Yv");
7433 switch (pIemCpu->enmEffOpSize)
7434 {
7435 case IEMMODE_16BIT:
7436 switch (pIemCpu->enmEffAddrMode)
7437 {
7438 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg);
7439 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg);
7440 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg);
7441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7442 }
7443 break;
7444 case IEMMODE_64BIT:
7445 case IEMMODE_32BIT:
7446 switch (pIemCpu->enmEffAddrMode)
7447 {
7448 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg);
7449 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg);
7450 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg);
7451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7452 }
7453 break;
7454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7455 }
7456 }
7457 else
7458 {
7459 IEMOP_MNEMONIC("outs DX,Yv");
7460 switch (pIemCpu->enmEffOpSize)
7461 {
7462 case IEMMODE_16BIT:
7463 switch (pIemCpu->enmEffAddrMode)
7464 {
7465 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg);
7466 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg);
7467 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg);
7468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7469 }
7470 break;
7471 case IEMMODE_64BIT:
7472 case IEMMODE_32BIT:
7473 switch (pIemCpu->enmEffAddrMode)
7474 {
7475 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg);
7476 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg);
7477 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg);
7478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7479 }
7480 break;
7481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7482 }
7483 }
7484}
7485
7486
7487/** Opcode 0x70. */
7488FNIEMOP_DEF(iemOp_jo_Jb)
7489{
7490 IEMOP_MNEMONIC("jo Jb");
7491 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7492 IEMOP_HLP_NO_LOCK_PREFIX();
7493 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7494
7495 IEM_MC_BEGIN(0, 0);
7496 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7497 IEM_MC_REL_JMP_S8(i8Imm);
7498 } IEM_MC_ELSE() {
7499 IEM_MC_ADVANCE_RIP();
7500 } IEM_MC_ENDIF();
7501 IEM_MC_END();
7502 return VINF_SUCCESS;
7503}
7504
7505
7506/** Opcode 0x71. */
7507FNIEMOP_DEF(iemOp_jno_Jb)
7508{
7509 IEMOP_MNEMONIC("jno Jb");
7510 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7511 IEMOP_HLP_NO_LOCK_PREFIX();
7512 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7513
7514 IEM_MC_BEGIN(0, 0);
7515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7516 IEM_MC_ADVANCE_RIP();
7517 } IEM_MC_ELSE() {
7518 IEM_MC_REL_JMP_S8(i8Imm);
7519 } IEM_MC_ENDIF();
7520 IEM_MC_END();
7521 return VINF_SUCCESS;
7522}
7523
7524/** Opcode 0x72. */
7525FNIEMOP_DEF(iemOp_jc_Jb)
7526{
7527 IEMOP_MNEMONIC("jc/jnae Jb");
7528 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7529 IEMOP_HLP_NO_LOCK_PREFIX();
7530 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7531
7532 IEM_MC_BEGIN(0, 0);
7533 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7534 IEM_MC_REL_JMP_S8(i8Imm);
7535 } IEM_MC_ELSE() {
7536 IEM_MC_ADVANCE_RIP();
7537 } IEM_MC_ENDIF();
7538 IEM_MC_END();
7539 return VINF_SUCCESS;
7540}
7541
7542
7543/** Opcode 0x73. */
7544FNIEMOP_DEF(iemOp_jnc_Jb)
7545{
7546 IEMOP_MNEMONIC("jnc/jnb Jb");
7547 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7548 IEMOP_HLP_NO_LOCK_PREFIX();
7549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7550
7551 IEM_MC_BEGIN(0, 0);
7552 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7553 IEM_MC_ADVANCE_RIP();
7554 } IEM_MC_ELSE() {
7555 IEM_MC_REL_JMP_S8(i8Imm);
7556 } IEM_MC_ENDIF();
7557 IEM_MC_END();
7558 return VINF_SUCCESS;
7559}
7560
7561
7562/** Opcode 0x74. */
7563FNIEMOP_DEF(iemOp_je_Jb)
7564{
7565 IEMOP_MNEMONIC("je/jz Jb");
7566 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7567 IEMOP_HLP_NO_LOCK_PREFIX();
7568 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7569
7570 IEM_MC_BEGIN(0, 0);
7571 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7572 IEM_MC_REL_JMP_S8(i8Imm);
7573 } IEM_MC_ELSE() {
7574 IEM_MC_ADVANCE_RIP();
7575 } IEM_MC_ENDIF();
7576 IEM_MC_END();
7577 return VINF_SUCCESS;
7578}
7579
7580
7581/** Opcode 0x75. */
7582FNIEMOP_DEF(iemOp_jne_Jb)
7583{
7584 IEMOP_MNEMONIC("jne/jnz Jb");
7585 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7586 IEMOP_HLP_NO_LOCK_PREFIX();
7587 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7588
7589 IEM_MC_BEGIN(0, 0);
7590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7591 IEM_MC_ADVANCE_RIP();
7592 } IEM_MC_ELSE() {
7593 IEM_MC_REL_JMP_S8(i8Imm);
7594 } IEM_MC_ENDIF();
7595 IEM_MC_END();
7596 return VINF_SUCCESS;
7597}
7598
7599
7600/** Opcode 0x76. */
7601FNIEMOP_DEF(iemOp_jbe_Jb)
7602{
7603 IEMOP_MNEMONIC("jbe/jna Jb");
7604 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7605 IEMOP_HLP_NO_LOCK_PREFIX();
7606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7607
7608 IEM_MC_BEGIN(0, 0);
7609 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7610 IEM_MC_REL_JMP_S8(i8Imm);
7611 } IEM_MC_ELSE() {
7612 IEM_MC_ADVANCE_RIP();
7613 } IEM_MC_ENDIF();
7614 IEM_MC_END();
7615 return VINF_SUCCESS;
7616}
7617
7618
7619/** Opcode 0x77. */
7620FNIEMOP_DEF(iemOp_jnbe_Jb)
7621{
7622 IEMOP_MNEMONIC("jnbe/ja Jb");
7623 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7624 IEMOP_HLP_NO_LOCK_PREFIX();
7625 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7626
7627 IEM_MC_BEGIN(0, 0);
7628 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7629 IEM_MC_ADVANCE_RIP();
7630 } IEM_MC_ELSE() {
7631 IEM_MC_REL_JMP_S8(i8Imm);
7632 } IEM_MC_ENDIF();
7633 IEM_MC_END();
7634 return VINF_SUCCESS;
7635}
7636
7637
7638/** Opcode 0x78. */
7639FNIEMOP_DEF(iemOp_js_Jb)
7640{
7641 IEMOP_MNEMONIC("js Jb");
7642 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7643 IEMOP_HLP_NO_LOCK_PREFIX();
7644 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7645
7646 IEM_MC_BEGIN(0, 0);
7647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7648 IEM_MC_REL_JMP_S8(i8Imm);
7649 } IEM_MC_ELSE() {
7650 IEM_MC_ADVANCE_RIP();
7651 } IEM_MC_ENDIF();
7652 IEM_MC_END();
7653 return VINF_SUCCESS;
7654}
7655
7656
7657/** Opcode 0x79. */
7658FNIEMOP_DEF(iemOp_jns_Jb)
7659{
7660 IEMOP_MNEMONIC("jns Jb");
7661 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7662 IEMOP_HLP_NO_LOCK_PREFIX();
7663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7664
7665 IEM_MC_BEGIN(0, 0);
7666 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7667 IEM_MC_ADVANCE_RIP();
7668 } IEM_MC_ELSE() {
7669 IEM_MC_REL_JMP_S8(i8Imm);
7670 } IEM_MC_ENDIF();
7671 IEM_MC_END();
7672 return VINF_SUCCESS;
7673}
7674
7675
7676/** Opcode 0x7a. */
7677FNIEMOP_DEF(iemOp_jp_Jb)
7678{
7679 IEMOP_MNEMONIC("jp Jb");
7680 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7681 IEMOP_HLP_NO_LOCK_PREFIX();
7682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7683
7684 IEM_MC_BEGIN(0, 0);
7685 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7686 IEM_MC_REL_JMP_S8(i8Imm);
7687 } IEM_MC_ELSE() {
7688 IEM_MC_ADVANCE_RIP();
7689 } IEM_MC_ENDIF();
7690 IEM_MC_END();
7691 return VINF_SUCCESS;
7692}
7693
7694
7695/** Opcode 0x7b. */
7696FNIEMOP_DEF(iemOp_jnp_Jb)
7697{
7698 IEMOP_MNEMONIC("jnp Jb");
7699 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7700 IEMOP_HLP_NO_LOCK_PREFIX();
7701 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7702
7703 IEM_MC_BEGIN(0, 0);
7704 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7705 IEM_MC_ADVANCE_RIP();
7706 } IEM_MC_ELSE() {
7707 IEM_MC_REL_JMP_S8(i8Imm);
7708 } IEM_MC_ENDIF();
7709 IEM_MC_END();
7710 return VINF_SUCCESS;
7711}
7712
7713
7714/** Opcode 0x7c. */
7715FNIEMOP_DEF(iemOp_jl_Jb)
7716{
7717 IEMOP_MNEMONIC("jl/jnge Jb");
7718 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7719 IEMOP_HLP_NO_LOCK_PREFIX();
7720 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7721
7722 IEM_MC_BEGIN(0, 0);
7723 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7724 IEM_MC_REL_JMP_S8(i8Imm);
7725 } IEM_MC_ELSE() {
7726 IEM_MC_ADVANCE_RIP();
7727 } IEM_MC_ENDIF();
7728 IEM_MC_END();
7729 return VINF_SUCCESS;
7730}
7731
7732
7733/** Opcode 0x7d. */
7734FNIEMOP_DEF(iemOp_jnl_Jb)
7735{
7736 IEMOP_MNEMONIC("jnl/jge Jb");
7737 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7738 IEMOP_HLP_NO_LOCK_PREFIX();
7739 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7740
7741 IEM_MC_BEGIN(0, 0);
7742 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7743 IEM_MC_ADVANCE_RIP();
7744 } IEM_MC_ELSE() {
7745 IEM_MC_REL_JMP_S8(i8Imm);
7746 } IEM_MC_ENDIF();
7747 IEM_MC_END();
7748 return VINF_SUCCESS;
7749}
7750
7751
7752/** Opcode 0x7e. */
7753FNIEMOP_DEF(iemOp_jle_Jb)
7754{
7755 IEMOP_MNEMONIC("jle/jng Jb");
7756 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7757 IEMOP_HLP_NO_LOCK_PREFIX();
7758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7759
7760 IEM_MC_BEGIN(0, 0);
7761 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7762 IEM_MC_REL_JMP_S8(i8Imm);
7763 } IEM_MC_ELSE() {
7764 IEM_MC_ADVANCE_RIP();
7765 } IEM_MC_ENDIF();
7766 IEM_MC_END();
7767 return VINF_SUCCESS;
7768}
7769
7770
7771/** Opcode 0x7f. */
7772FNIEMOP_DEF(iemOp_jnle_Jb)
7773{
7774 IEMOP_MNEMONIC("jnle/jg Jb");
7775 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7776 IEMOP_HLP_NO_LOCK_PREFIX();
7777 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7778
7779 IEM_MC_BEGIN(0, 0);
7780 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7781 IEM_MC_ADVANCE_RIP();
7782 } IEM_MC_ELSE() {
7783 IEM_MC_REL_JMP_S8(i8Imm);
7784 } IEM_MC_ENDIF();
7785 IEM_MC_END();
7786 return VINF_SUCCESS;
7787}
7788
7789
7790/** Opcode 0x80. */
7791FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
7792{
7793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7794 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
7795 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7796
7797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7798 {
7799 /* register target */
7800 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7801 IEMOP_HLP_NO_LOCK_PREFIX();
7802 IEM_MC_BEGIN(3, 0);
7803 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7804 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
7805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7806
7807 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7808 IEM_MC_REF_EFLAGS(pEFlags);
7809 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
7810
7811 IEM_MC_ADVANCE_RIP();
7812 IEM_MC_END();
7813 }
7814 else
7815 {
7816 /* memory target */
7817 uint32_t fAccess;
7818 if (pImpl->pfnLockedU8)
7819 fAccess = IEM_ACCESS_DATA_RW;
7820 else
7821 { /* CMP */
7822 IEMOP_HLP_NO_LOCK_PREFIX();
7823 fAccess = IEM_ACCESS_DATA_R;
7824 }
7825 IEM_MC_BEGIN(3, 2);
7826 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7827 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7829
7830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7831 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7832 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
7833
7834 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7835 IEM_MC_FETCH_EFLAGS(EFlags);
7836 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7837 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
7838 else
7839 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
7840
7841 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
7842 IEM_MC_COMMIT_EFLAGS(EFlags);
7843 IEM_MC_ADVANCE_RIP();
7844 IEM_MC_END();
7845 }
7846 return VINF_SUCCESS;
7847}
7848
7849
7850/** Opcode 0x81. */
7851FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
7852{
7853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7854 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
7855 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7856
7857 switch (pIemCpu->enmEffOpSize)
7858 {
7859 case IEMMODE_16BIT:
7860 {
7861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7862 {
7863 /* register target */
7864 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7865 IEMOP_HLP_NO_LOCK_PREFIX();
7866 IEM_MC_BEGIN(3, 0);
7867 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7868 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
7869 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7870
7871 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7872 IEM_MC_REF_EFLAGS(pEFlags);
7873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7874
7875 IEM_MC_ADVANCE_RIP();
7876 IEM_MC_END();
7877 }
7878 else
7879 {
7880 /* memory target */
7881 uint32_t fAccess;
7882 if (pImpl->pfnLockedU16)
7883 fAccess = IEM_ACCESS_DATA_RW;
7884 else
7885 { /* CMP, TEST */
7886 IEMOP_HLP_NO_LOCK_PREFIX();
7887 fAccess = IEM_ACCESS_DATA_R;
7888 }
7889 IEM_MC_BEGIN(3, 2);
7890 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7891 IEM_MC_ARG(uint16_t, u16Src, 1);
7892 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7894
7895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7896 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7897 IEM_MC_ASSIGN(u16Src, u16Imm);
7898 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7899 IEM_MC_FETCH_EFLAGS(EFlags);
7900 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7901 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7902 else
7903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7904
7905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7906 IEM_MC_COMMIT_EFLAGS(EFlags);
7907 IEM_MC_ADVANCE_RIP();
7908 IEM_MC_END();
7909 }
7910 break;
7911 }
7912
7913 case IEMMODE_32BIT:
7914 {
7915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7916 {
7917 /* register target */
7918 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7919 IEMOP_HLP_NO_LOCK_PREFIX();
7920 IEM_MC_BEGIN(3, 0);
7921 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7922 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
7923 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7924
7925 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7926 IEM_MC_REF_EFLAGS(pEFlags);
7927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7928 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7929
7930 IEM_MC_ADVANCE_RIP();
7931 IEM_MC_END();
7932 }
7933 else
7934 {
7935 /* memory target */
7936 uint32_t fAccess;
7937 if (pImpl->pfnLockedU32)
7938 fAccess = IEM_ACCESS_DATA_RW;
7939 else
7940 { /* CMP, TEST */
7941 IEMOP_HLP_NO_LOCK_PREFIX();
7942 fAccess = IEM_ACCESS_DATA_R;
7943 }
7944 IEM_MC_BEGIN(3, 2);
7945 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7946 IEM_MC_ARG(uint32_t, u32Src, 1);
7947 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7949
7950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7951 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7952 IEM_MC_ASSIGN(u32Src, u32Imm);
7953 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7954 IEM_MC_FETCH_EFLAGS(EFlags);
7955 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7956 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7957 else
7958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7959
7960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7961 IEM_MC_COMMIT_EFLAGS(EFlags);
7962 IEM_MC_ADVANCE_RIP();
7963 IEM_MC_END();
7964 }
7965 break;
7966 }
7967
7968 case IEMMODE_64BIT:
7969 {
7970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7971 {
7972 /* register target */
7973 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7974 IEMOP_HLP_NO_LOCK_PREFIX();
7975 IEM_MC_BEGIN(3, 0);
7976 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7977 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
7978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7979
7980 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7981 IEM_MC_REF_EFLAGS(pEFlags);
7982 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7983
7984 IEM_MC_ADVANCE_RIP();
7985 IEM_MC_END();
7986 }
7987 else
7988 {
7989 /* memory target */
7990 uint32_t fAccess;
7991 if (pImpl->pfnLockedU64)
7992 fAccess = IEM_ACCESS_DATA_RW;
7993 else
7994 { /* CMP */
7995 IEMOP_HLP_NO_LOCK_PREFIX();
7996 fAccess = IEM_ACCESS_DATA_R;
7997 }
7998 IEM_MC_BEGIN(3, 2);
7999 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8000 IEM_MC_ARG(uint64_t, u64Src, 1);
8001 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8003
8004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8005 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8006 IEM_MC_ASSIGN(u64Src, u64Imm);
8007 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8008 IEM_MC_FETCH_EFLAGS(EFlags);
8009 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8010 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8011 else
8012 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8013
8014 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8015 IEM_MC_COMMIT_EFLAGS(EFlags);
8016 IEM_MC_ADVANCE_RIP();
8017 IEM_MC_END();
8018 }
8019 break;
8020 }
8021 }
8022 return VINF_SUCCESS;
8023}
8024
8025
8026/** Opcode 0x82. */
8027FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
8028{
8029 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
8030 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
8031}
8032
8033
8034/** Opcode 0x83. */
8035FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
8036{
8037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8038 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
8039 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
8040
8041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8042 {
8043 /*
8044 * Register target
8045 */
8046 IEMOP_HLP_NO_LOCK_PREFIX();
8047 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8048 switch (pIemCpu->enmEffOpSize)
8049 {
8050 case IEMMODE_16BIT:
8051 {
8052 IEM_MC_BEGIN(3, 0);
8053 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8054 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
8055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8056
8057 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8058 IEM_MC_REF_EFLAGS(pEFlags);
8059 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8060
8061 IEM_MC_ADVANCE_RIP();
8062 IEM_MC_END();
8063 break;
8064 }
8065
8066 case IEMMODE_32BIT:
8067 {
8068 IEM_MC_BEGIN(3, 0);
8069 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8070 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
8071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8072
8073 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8074 IEM_MC_REF_EFLAGS(pEFlags);
8075 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8076 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8077
8078 IEM_MC_ADVANCE_RIP();
8079 IEM_MC_END();
8080 break;
8081 }
8082
8083 case IEMMODE_64BIT:
8084 {
8085 IEM_MC_BEGIN(3, 0);
8086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8087 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
8088 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8089
8090 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8091 IEM_MC_REF_EFLAGS(pEFlags);
8092 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8093
8094 IEM_MC_ADVANCE_RIP();
8095 IEM_MC_END();
8096 break;
8097 }
8098 }
8099 }
8100 else
8101 {
8102 /*
8103 * Memory target.
8104 */
8105 uint32_t fAccess;
8106 if (pImpl->pfnLockedU16)
8107 fAccess = IEM_ACCESS_DATA_RW;
8108 else
8109 { /* CMP */
8110 IEMOP_HLP_NO_LOCK_PREFIX();
8111 fAccess = IEM_ACCESS_DATA_R;
8112 }
8113
8114 switch (pIemCpu->enmEffOpSize)
8115 {
8116 case IEMMODE_16BIT:
8117 {
8118 IEM_MC_BEGIN(3, 2);
8119 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8120 IEM_MC_ARG(uint16_t, u16Src, 1);
8121 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8123
8124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8125 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8126 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
8127 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8128 IEM_MC_FETCH_EFLAGS(EFlags);
8129 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8131 else
8132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8133
8134 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8135 IEM_MC_COMMIT_EFLAGS(EFlags);
8136 IEM_MC_ADVANCE_RIP();
8137 IEM_MC_END();
8138 break;
8139 }
8140
8141 case IEMMODE_32BIT:
8142 {
8143 IEM_MC_BEGIN(3, 2);
8144 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8145 IEM_MC_ARG(uint32_t, u32Src, 1);
8146 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8148
8149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8150 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8151 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
8152 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8153 IEM_MC_FETCH_EFLAGS(EFlags);
8154 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8156 else
8157 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8158
8159 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8160 IEM_MC_COMMIT_EFLAGS(EFlags);
8161 IEM_MC_ADVANCE_RIP();
8162 IEM_MC_END();
8163 break;
8164 }
8165
8166 case IEMMODE_64BIT:
8167 {
8168 IEM_MC_BEGIN(3, 2);
8169 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8170 IEM_MC_ARG(uint64_t, u64Src, 1);
8171 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8173
8174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8175 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8176 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
8177 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8178 IEM_MC_FETCH_EFLAGS(EFlags);
8179 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8181 else
8182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8183
8184 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8185 IEM_MC_COMMIT_EFLAGS(EFlags);
8186 IEM_MC_ADVANCE_RIP();
8187 IEM_MC_END();
8188 break;
8189 }
8190 }
8191 }
8192 return VINF_SUCCESS;
8193}
8194
8195
8196/** Opcode 0x84. */
8197FNIEMOP_DEF(iemOp_test_Eb_Gb)
8198{
8199 IEMOP_MNEMONIC("test Eb,Gb");
8200 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
8201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8202 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
8203}
8204
8205
8206/** Opcode 0x85. */
8207FNIEMOP_DEF(iemOp_test_Ev_Gv)
8208{
8209 IEMOP_MNEMONIC("test Ev,Gv");
8210 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
8211 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8212 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
8213}
8214
8215
8216/** Opcode 0x86. */
8217FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
8218{
8219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8220 IEMOP_MNEMONIC("xchg Eb,Gb");
8221
8222 /*
8223 * If rm is denoting a register, no more instruction bytes.
8224 */
8225 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8226 {
8227 IEMOP_HLP_NO_LOCK_PREFIX();
8228
8229 IEM_MC_BEGIN(0, 2);
8230 IEM_MC_LOCAL(uint8_t, uTmp1);
8231 IEM_MC_LOCAL(uint8_t, uTmp2);
8232
8233 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8234 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8235 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8236 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8237
8238 IEM_MC_ADVANCE_RIP();
8239 IEM_MC_END();
8240 }
8241 else
8242 {
8243 /*
8244 * We're accessing memory.
8245 */
8246/** @todo the register must be committed separately! */
8247 IEM_MC_BEGIN(2, 2);
8248 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
8249 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8251
8252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8253 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8254 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8255 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
8256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
8257
8258 IEM_MC_ADVANCE_RIP();
8259 IEM_MC_END();
8260 }
8261 return VINF_SUCCESS;
8262}
8263
8264
8265/** Opcode 0x87. */
8266FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
8267{
8268 IEMOP_MNEMONIC("xchg Ev,Gv");
8269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8270
8271 /*
8272 * If rm is denoting a register, no more instruction bytes.
8273 */
8274 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8275 {
8276 IEMOP_HLP_NO_LOCK_PREFIX();
8277
8278 switch (pIemCpu->enmEffOpSize)
8279 {
8280 case IEMMODE_16BIT:
8281 IEM_MC_BEGIN(0, 2);
8282 IEM_MC_LOCAL(uint16_t, uTmp1);
8283 IEM_MC_LOCAL(uint16_t, uTmp2);
8284
8285 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8286 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8287 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8288 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8289
8290 IEM_MC_ADVANCE_RIP();
8291 IEM_MC_END();
8292 return VINF_SUCCESS;
8293
8294 case IEMMODE_32BIT:
8295 IEM_MC_BEGIN(0, 2);
8296 IEM_MC_LOCAL(uint32_t, uTmp1);
8297 IEM_MC_LOCAL(uint32_t, uTmp2);
8298
8299 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8300 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8301 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8302 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8303
8304 IEM_MC_ADVANCE_RIP();
8305 IEM_MC_END();
8306 return VINF_SUCCESS;
8307
8308 case IEMMODE_64BIT:
8309 IEM_MC_BEGIN(0, 2);
8310 IEM_MC_LOCAL(uint64_t, uTmp1);
8311 IEM_MC_LOCAL(uint64_t, uTmp2);
8312
8313 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8314 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8315 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8316 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8317
8318 IEM_MC_ADVANCE_RIP();
8319 IEM_MC_END();
8320 return VINF_SUCCESS;
8321
8322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8323 }
8324 }
8325 else
8326 {
8327 /*
8328 * We're accessing memory.
8329 */
8330 switch (pIemCpu->enmEffOpSize)
8331 {
8332/** @todo the register must be committed separately! */
8333 case IEMMODE_16BIT:
8334 IEM_MC_BEGIN(2, 2);
8335 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
8336 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8338
8339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8340 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8341 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8342 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
8343 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
8344
8345 IEM_MC_ADVANCE_RIP();
8346 IEM_MC_END();
8347 return VINF_SUCCESS;
8348
8349 case IEMMODE_32BIT:
8350 IEM_MC_BEGIN(2, 2);
8351 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
8352 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8354
8355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8356 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8357 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8358 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
8359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
8360
8361 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
8362 IEM_MC_ADVANCE_RIP();
8363 IEM_MC_END();
8364 return VINF_SUCCESS;
8365
8366 case IEMMODE_64BIT:
8367 IEM_MC_BEGIN(2, 2);
8368 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
8369 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8371
8372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8373 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8374 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8375 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
8376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
8377
8378 IEM_MC_ADVANCE_RIP();
8379 IEM_MC_END();
8380 return VINF_SUCCESS;
8381
8382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8383 }
8384 }
8385}
8386
8387
8388/** Opcode 0x88. */
8389FNIEMOP_DEF(iemOp_mov_Eb_Gb)
8390{
8391 IEMOP_MNEMONIC("mov Eb,Gb");
8392
8393 uint8_t bRm;
8394 IEM_OPCODE_GET_NEXT_U8(&bRm);
8395 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8396
8397 /*
8398 * If rm is denoting a register, no more instruction bytes.
8399 */
8400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8401 {
8402 IEM_MC_BEGIN(0, 1);
8403 IEM_MC_LOCAL(uint8_t, u8Value);
8404 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8405 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
8406 IEM_MC_ADVANCE_RIP();
8407 IEM_MC_END();
8408 }
8409 else
8410 {
8411 /*
8412 * We're writing a register to memory.
8413 */
8414 IEM_MC_BEGIN(0, 2);
8415 IEM_MC_LOCAL(uint8_t, u8Value);
8416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8418 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8419 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
8420 IEM_MC_ADVANCE_RIP();
8421 IEM_MC_END();
8422 }
8423 return VINF_SUCCESS;
8424
8425}
8426
8427
8428/** Opcode 0x89. */
8429FNIEMOP_DEF(iemOp_mov_Ev_Gv)
8430{
8431 IEMOP_MNEMONIC("mov Ev,Gv");
8432
8433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8434 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8435
8436 /*
8437 * If rm is denoting a register, no more instruction bytes.
8438 */
8439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8440 {
8441 switch (pIemCpu->enmEffOpSize)
8442 {
8443 case IEMMODE_16BIT:
8444 IEM_MC_BEGIN(0, 1);
8445 IEM_MC_LOCAL(uint16_t, u16Value);
8446 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8447 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
8448 IEM_MC_ADVANCE_RIP();
8449 IEM_MC_END();
8450 break;
8451
8452 case IEMMODE_32BIT:
8453 IEM_MC_BEGIN(0, 1);
8454 IEM_MC_LOCAL(uint32_t, u32Value);
8455 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8456 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
8457 IEM_MC_ADVANCE_RIP();
8458 IEM_MC_END();
8459 break;
8460
8461 case IEMMODE_64BIT:
8462 IEM_MC_BEGIN(0, 1);
8463 IEM_MC_LOCAL(uint64_t, u64Value);
8464 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8465 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
8466 IEM_MC_ADVANCE_RIP();
8467 IEM_MC_END();
8468 break;
8469 }
8470 }
8471 else
8472 {
8473 /*
8474 * We're writing a register to memory.
8475 */
8476 switch (pIemCpu->enmEffOpSize)
8477 {
8478 case IEMMODE_16BIT:
8479 IEM_MC_BEGIN(0, 2);
8480 IEM_MC_LOCAL(uint16_t, u16Value);
8481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8483 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8484 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
8485 IEM_MC_ADVANCE_RIP();
8486 IEM_MC_END();
8487 break;
8488
8489 case IEMMODE_32BIT:
8490 IEM_MC_BEGIN(0, 2);
8491 IEM_MC_LOCAL(uint32_t, u32Value);
8492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8494 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8495 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
8496 IEM_MC_ADVANCE_RIP();
8497 IEM_MC_END();
8498 break;
8499
8500 case IEMMODE_64BIT:
8501 IEM_MC_BEGIN(0, 2);
8502 IEM_MC_LOCAL(uint64_t, u64Value);
8503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8505 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8506 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
8507 IEM_MC_ADVANCE_RIP();
8508 IEM_MC_END();
8509 break;
8510 }
8511 }
8512 return VINF_SUCCESS;
8513}
8514
8515
8516/** Opcode 0x8a. */
8517FNIEMOP_DEF(iemOp_mov_Gb_Eb)
8518{
8519 IEMOP_MNEMONIC("mov Gb,Eb");
8520
8521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8522 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8523
8524 /*
8525 * If rm is denoting a register, no more instruction bytes.
8526 */
8527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8528 {
8529 IEM_MC_BEGIN(0, 1);
8530 IEM_MC_LOCAL(uint8_t, u8Value);
8531 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8532 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
8533 IEM_MC_ADVANCE_RIP();
8534 IEM_MC_END();
8535 }
8536 else
8537 {
8538 /*
8539 * We're loading a register from memory.
8540 */
8541 IEM_MC_BEGIN(0, 2);
8542 IEM_MC_LOCAL(uint8_t, u8Value);
8543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8545 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
8546 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
8547 IEM_MC_ADVANCE_RIP();
8548 IEM_MC_END();
8549 }
8550 return VINF_SUCCESS;
8551}
8552
8553
8554/** Opcode 0x8b. */
8555FNIEMOP_DEF(iemOp_mov_Gv_Ev)
8556{
8557 IEMOP_MNEMONIC("mov Gv,Ev");
8558
8559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8560 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8561
8562 /*
8563 * If rm is denoting a register, no more instruction bytes.
8564 */
8565 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8566 {
8567 switch (pIemCpu->enmEffOpSize)
8568 {
8569 case IEMMODE_16BIT:
8570 IEM_MC_BEGIN(0, 1);
8571 IEM_MC_LOCAL(uint16_t, u16Value);
8572 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8573 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
8574 IEM_MC_ADVANCE_RIP();
8575 IEM_MC_END();
8576 break;
8577
8578 case IEMMODE_32BIT:
8579 IEM_MC_BEGIN(0, 1);
8580 IEM_MC_LOCAL(uint32_t, u32Value);
8581 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8582 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
8583 IEM_MC_ADVANCE_RIP();
8584 IEM_MC_END();
8585 break;
8586
8587 case IEMMODE_64BIT:
8588 IEM_MC_BEGIN(0, 1);
8589 IEM_MC_LOCAL(uint64_t, u64Value);
8590 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8591 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8592 IEM_MC_ADVANCE_RIP();
8593 IEM_MC_END();
8594 break;
8595 }
8596 }
8597 else
8598 {
8599 /*
8600 * We're loading a register from memory.
8601 */
8602 switch (pIemCpu->enmEffOpSize)
8603 {
8604 case IEMMODE_16BIT:
8605 IEM_MC_BEGIN(0, 2);
8606 IEM_MC_LOCAL(uint16_t, u16Value);
8607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8609 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
8610 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
8611 IEM_MC_ADVANCE_RIP();
8612 IEM_MC_END();
8613 break;
8614
8615 case IEMMODE_32BIT:
8616 IEM_MC_BEGIN(0, 2);
8617 IEM_MC_LOCAL(uint32_t, u32Value);
8618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8620 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
8621 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
8622 IEM_MC_ADVANCE_RIP();
8623 IEM_MC_END();
8624 break;
8625
8626 case IEMMODE_64BIT:
8627 IEM_MC_BEGIN(0, 2);
8628 IEM_MC_LOCAL(uint64_t, u64Value);
8629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8631 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8632 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8633 IEM_MC_ADVANCE_RIP();
8634 IEM_MC_END();
8635 break;
8636 }
8637 }
8638 return VINF_SUCCESS;
8639}
8640
8641
8642/** Opcode 0x63. */
8643FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
8644{
8645 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
8646 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
8647 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
8648 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
8649 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
8650}
8651
8652
8653/** Opcode 0x8c. */
8654FNIEMOP_DEF(iemOp_mov_Ev_Sw)
8655{
8656 IEMOP_MNEMONIC("mov Ev,Sw");
8657
8658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8659 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8660
8661 /*
8662 * Check that the destination register exists. The REX.R prefix is ignored.
8663 */
8664 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8665 if ( iSegReg > X86_SREG_GS)
8666 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8667
8668 /*
8669 * If rm is denoting a register, no more instruction bytes.
8670 * In that case, the operand size is respected and the upper bits are
8671 * cleared (starting with some pentium).
8672 */
8673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8674 {
8675 switch (pIemCpu->enmEffOpSize)
8676 {
8677 case IEMMODE_16BIT:
8678 IEM_MC_BEGIN(0, 1);
8679 IEM_MC_LOCAL(uint16_t, u16Value);
8680 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
8681 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
8682 IEM_MC_ADVANCE_RIP();
8683 IEM_MC_END();
8684 break;
8685
8686 case IEMMODE_32BIT:
8687 IEM_MC_BEGIN(0, 1);
8688 IEM_MC_LOCAL(uint32_t, u32Value);
8689 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
8690 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
8691 IEM_MC_ADVANCE_RIP();
8692 IEM_MC_END();
8693 break;
8694
8695 case IEMMODE_64BIT:
8696 IEM_MC_BEGIN(0, 1);
8697 IEM_MC_LOCAL(uint64_t, u64Value);
8698 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
8699 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
8700 IEM_MC_ADVANCE_RIP();
8701 IEM_MC_END();
8702 break;
8703 }
8704 }
8705 else
8706 {
8707 /*
8708 * We're saving the register to memory. The access is word sized
8709 * regardless of operand size prefixes.
8710 */
8711#if 0 /* not necessary */
8712 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
8713#endif
8714 IEM_MC_BEGIN(0, 2);
8715 IEM_MC_LOCAL(uint16_t, u16Value);
8716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8718 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
8719 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
8720 IEM_MC_ADVANCE_RIP();
8721 IEM_MC_END();
8722 }
8723 return VINF_SUCCESS;
8724}
8725
8726
8727
8728
8729/** Opcode 0x8d. */
8730FNIEMOP_DEF(iemOp_lea_Gv_M)
8731{
8732 IEMOP_MNEMONIC("lea Gv,M");
8733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8734 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8736 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* no register form */
8737
8738 switch (pIemCpu->enmEffOpSize)
8739 {
8740 case IEMMODE_16BIT:
8741 IEM_MC_BEGIN(0, 2);
8742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8743 IEM_MC_LOCAL(uint16_t, u16Cast);
8744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8745 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
8746 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
8747 IEM_MC_ADVANCE_RIP();
8748 IEM_MC_END();
8749 return VINF_SUCCESS;
8750
8751 case IEMMODE_32BIT:
8752 IEM_MC_BEGIN(0, 2);
8753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8754 IEM_MC_LOCAL(uint32_t, u32Cast);
8755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8756 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
8757 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
8758 IEM_MC_ADVANCE_RIP();
8759 IEM_MC_END();
8760 return VINF_SUCCESS;
8761
8762 case IEMMODE_64BIT:
8763 IEM_MC_BEGIN(0, 1);
8764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8766 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
8767 IEM_MC_ADVANCE_RIP();
8768 IEM_MC_END();
8769 return VINF_SUCCESS;
8770 }
8771 AssertFailedReturn(VERR_INTERNAL_ERROR_5);
8772}
8773
8774
8775/** Opcode 0x8e. */
8776FNIEMOP_DEF(iemOp_mov_Sw_Ev)
8777{
8778 IEMOP_MNEMONIC("mov Sw,Ev");
8779
8780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8781 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8782
8783 /*
8784 * The practical operand size is 16-bit.
8785 */
8786#if 0 /* not necessary */
8787 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
8788#endif
8789
8790 /*
8791 * Check that the destination register exists and can be used with this
8792 * instruction. The REX.R prefix is ignored.
8793 */
8794 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8795 if ( iSegReg == X86_SREG_CS
8796 || iSegReg > X86_SREG_GS)
8797 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8798
8799 /*
8800 * If rm is denoting a register, no more instruction bytes.
8801 */
8802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8803 {
8804 IEM_MC_BEGIN(2, 0);
8805 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
8806 IEM_MC_ARG(uint16_t, u16Value, 1);
8807 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8808 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
8809 IEM_MC_END();
8810 }
8811 else
8812 {
8813 /*
8814 * We're loading the register from memory. The access is word sized
8815 * regardless of operand size prefixes.
8816 */
8817 IEM_MC_BEGIN(2, 1);
8818 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
8819 IEM_MC_ARG(uint16_t, u16Value, 1);
8820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8822 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
8823 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
8824 IEM_MC_END();
8825 }
8826 return VINF_SUCCESS;
8827}
8828
8829
8830/** Opcode 0x8f /0. */
8831FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
8832{
8833 /* This bugger is rather annoying as it requires rSP to be updated before
8834 doing the effective address calculations. Will eventually require a
8835 split between the R/M+SIB decoding and the effective address
8836 calculation - which is something that is required for any attempt at
8837 reusing this code for a recompiler. It may also be good to have if we
8838 need to delay #UD exception caused by invalid lock prefixes.
8839
8840 For now, we'll do a mostly safe interpreter-only implementation here. */
8841 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
8842 * now until tests show it's checked.. */
8843 IEMOP_MNEMONIC("pop Ev");
8844 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8845
8846 /* Register access is relatively easy and can share code. */
8847 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8848 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8849
8850 /*
8851 * Memory target.
8852 *
8853 * Intel says that RSP is incremented before it's used in any effective
8854 * address calcuations. This means some serious extra annoyance here since
8855 * we decode and calculate the effective address in one step and like to
8856 * delay committing registers till everything is done.
8857 *
8858 * So, we'll decode and calculate the effective address twice. This will
8859 * require some recoding if turned into a recompiler.
8860 */
8861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
8862
8863#ifndef TST_IEM_CHECK_MC
8864 /* Calc effective address with modified ESP. */
8865 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
8866 RTGCPTR GCPtrEff;
8867 VBOXSTRICTRC rcStrict;
8868 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
8869 if (rcStrict != VINF_SUCCESS)
8870 return rcStrict;
8871 pIemCpu->offOpcode = offOpcodeSaved;
8872
8873 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
8874 uint64_t const RspSaved = pCtx->rsp;
8875 switch (pIemCpu->enmEffOpSize)
8876 {
8877 case IEMMODE_16BIT: iemRegAddToRsp(pCtx, 2); break;
8878 case IEMMODE_32BIT: iemRegAddToRsp(pCtx, 4); break;
8879 case IEMMODE_64BIT: iemRegAddToRsp(pCtx, 8); break;
8880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8881 }
8882 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
8883 Assert(rcStrict == VINF_SUCCESS);
8884 pCtx->rsp = RspSaved;
8885
8886 /* Perform the operation - this should be CImpl. */
8887 RTUINT64U TmpRsp;
8888 TmpRsp.u = pCtx->rsp;
8889 switch (pIemCpu->enmEffOpSize)
8890 {
8891 case IEMMODE_16BIT:
8892 {
8893 uint16_t u16Value;
8894 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
8895 if (rcStrict == VINF_SUCCESS)
8896 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
8897 break;
8898 }
8899
8900 case IEMMODE_32BIT:
8901 {
8902 uint32_t u32Value;
8903 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
8904 if (rcStrict == VINF_SUCCESS)
8905 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
8906 break;
8907 }
8908
8909 case IEMMODE_64BIT:
8910 {
8911 uint64_t u64Value;
8912 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
8913 if (rcStrict == VINF_SUCCESS)
8914 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
8915 break;
8916 }
8917
8918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8919 }
8920 if (rcStrict == VINF_SUCCESS)
8921 {
8922 pCtx->rsp = TmpRsp.u;
8923 iemRegUpdateRip(pIemCpu);
8924 }
8925 return rcStrict;
8926
8927#else
8928 return VERR_IEM_IPE_2;
8929#endif
8930}
8931
8932
8933/** Opcode 0x8f. */
8934FNIEMOP_DEF(iemOp_Grp1A)
8935{
8936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8937 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only pop Ev in this group. */
8938 return IEMOP_RAISE_INVALID_OPCODE();
8939 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
8940}
8941
8942
8943/**
8944 * Common 'xchg reg,rAX' helper.
8945 */
8946FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
8947{
8948 IEMOP_HLP_NO_LOCK_PREFIX();
8949
8950 iReg |= pIemCpu->uRexB;
8951 switch (pIemCpu->enmEffOpSize)
8952 {
8953 case IEMMODE_16BIT:
8954 IEM_MC_BEGIN(0, 2);
8955 IEM_MC_LOCAL(uint16_t, u16Tmp1);
8956 IEM_MC_LOCAL(uint16_t, u16Tmp2);
8957 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
8958 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
8959 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
8960 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
8961 IEM_MC_ADVANCE_RIP();
8962 IEM_MC_END();
8963 return VINF_SUCCESS;
8964
8965 case IEMMODE_32BIT:
8966 IEM_MC_BEGIN(0, 2);
8967 IEM_MC_LOCAL(uint32_t, u32Tmp1);
8968 IEM_MC_LOCAL(uint32_t, u32Tmp2);
8969 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
8970 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
8971 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
8972 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
8973 IEM_MC_ADVANCE_RIP();
8974 IEM_MC_END();
8975 return VINF_SUCCESS;
8976
8977 case IEMMODE_64BIT:
8978 IEM_MC_BEGIN(0, 2);
8979 IEM_MC_LOCAL(uint64_t, u64Tmp1);
8980 IEM_MC_LOCAL(uint64_t, u64Tmp2);
8981 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
8982 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
8983 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
8984 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
8985 IEM_MC_ADVANCE_RIP();
8986 IEM_MC_END();
8987 return VINF_SUCCESS;
8988
8989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8990 }
8991}
8992
8993
8994/** Opcode 0x90. */
8995FNIEMOP_DEF(iemOp_nop)
8996{
8997 /* R8/R8D and RAX/EAX can be exchanged. */
8998 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
8999 {
9000 IEMOP_MNEMONIC("xchg r8,rAX");
9001 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
9002 }
9003
9004 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
9005 IEMOP_MNEMONIC("pause");
9006 else
9007 IEMOP_MNEMONIC("nop");
9008 IEM_MC_BEGIN(0, 0);
9009 IEM_MC_ADVANCE_RIP();
9010 IEM_MC_END();
9011 return VINF_SUCCESS;
9012}
9013
9014
9015/** Opcode 0x91. */
9016FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
9017{
9018 IEMOP_MNEMONIC("xchg rCX,rAX");
9019 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
9020}
9021
9022
9023/** Opcode 0x92. */
9024FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
9025{
9026 IEMOP_MNEMONIC("xchg rDX,rAX");
9027 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
9028}
9029
9030
9031/** Opcode 0x93. */
9032FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
9033{
9034 IEMOP_MNEMONIC("xchg rBX,rAX");
9035 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
9036}
9037
9038
9039/** Opcode 0x94. */
9040FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
9041{
9042 IEMOP_MNEMONIC("xchg rSX,rAX");
9043 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
9044}
9045
9046
9047/** Opcode 0x95. */
9048FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
9049{
9050 IEMOP_MNEMONIC("xchg rBP,rAX");
9051 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
9052}
9053
9054
9055/** Opcode 0x96. */
9056FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
9057{
9058 IEMOP_MNEMONIC("xchg rSI,rAX");
9059 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
9060}
9061
9062
9063/** Opcode 0x97. */
9064FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
9065{
9066 IEMOP_MNEMONIC("xchg rDI,rAX");
9067 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
9068}
9069
9070
9071/** Opcode 0x98. */
9072FNIEMOP_DEF(iemOp_cbw)
9073{
9074 IEMOP_HLP_NO_LOCK_PREFIX();
9075 switch (pIemCpu->enmEffOpSize)
9076 {
9077 case IEMMODE_16BIT:
9078 IEMOP_MNEMONIC("cbw");
9079 IEM_MC_BEGIN(0, 1);
9080 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
9081 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
9082 } IEM_MC_ELSE() {
9083 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
9084 } IEM_MC_ENDIF();
9085 IEM_MC_ADVANCE_RIP();
9086 IEM_MC_END();
9087 return VINF_SUCCESS;
9088
9089 case IEMMODE_32BIT:
9090 IEMOP_MNEMONIC("cwde");
9091 IEM_MC_BEGIN(0, 1);
9092 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
9093 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
9094 } IEM_MC_ELSE() {
9095 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
9096 } IEM_MC_ENDIF();
9097 IEM_MC_ADVANCE_RIP();
9098 IEM_MC_END();
9099 return VINF_SUCCESS;
9100
9101 case IEMMODE_64BIT:
9102 IEMOP_MNEMONIC("cdqe");
9103 IEM_MC_BEGIN(0, 1);
9104 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
9105 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
9106 } IEM_MC_ELSE() {
9107 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
9108 } IEM_MC_ENDIF();
9109 IEM_MC_ADVANCE_RIP();
9110 IEM_MC_END();
9111 return VINF_SUCCESS;
9112
9113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9114 }
9115}
9116
9117
9118/** Opcode 0x99. */
9119FNIEMOP_DEF(iemOp_cwd)
9120{
9121 IEMOP_HLP_NO_LOCK_PREFIX();
9122 switch (pIemCpu->enmEffOpSize)
9123 {
9124 case IEMMODE_16BIT:
9125 IEMOP_MNEMONIC("cwd");
9126 IEM_MC_BEGIN(0, 1);
9127 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
9128 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
9129 } IEM_MC_ELSE() {
9130 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
9131 } IEM_MC_ENDIF();
9132 IEM_MC_ADVANCE_RIP();
9133 IEM_MC_END();
9134 return VINF_SUCCESS;
9135
9136 case IEMMODE_32BIT:
9137 IEMOP_MNEMONIC("cdq");
9138 IEM_MC_BEGIN(0, 1);
9139 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
9140 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
9141 } IEM_MC_ELSE() {
9142 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
9143 } IEM_MC_ENDIF();
9144 IEM_MC_ADVANCE_RIP();
9145 IEM_MC_END();
9146 return VINF_SUCCESS;
9147
9148 case IEMMODE_64BIT:
9149 IEMOP_MNEMONIC("cqo");
9150 IEM_MC_BEGIN(0, 1);
9151 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
9152 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
9153 } IEM_MC_ELSE() {
9154 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
9155 } IEM_MC_ENDIF();
9156 IEM_MC_ADVANCE_RIP();
9157 IEM_MC_END();
9158 return VINF_SUCCESS;
9159
9160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9161 }
9162}
9163
9164
9165/** Opcode 0x9a. */
9166FNIEMOP_DEF(iemOp_call_Ap)
9167{
9168 IEMOP_MNEMONIC("call Ap");
9169 IEMOP_HLP_NO_64BIT();
9170
9171 /* Decode the far pointer address and pass it on to the far call C implementation. */
9172 uint32_t offSeg;
9173 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
9174 IEM_OPCODE_GET_NEXT_U32(&offSeg);
9175 else
9176 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
9177 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
9178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9179 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
9180}
9181
9182
9183/** Opcode 0x9b. (aka fwait) */
9184FNIEMOP_DEF(iemOp_wait)
9185{
9186 IEMOP_MNEMONIC("wait");
9187 IEMOP_HLP_NO_LOCK_PREFIX();
9188
9189 IEM_MC_BEGIN(0, 0);
9190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9191 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9192 IEM_MC_ADVANCE_RIP();
9193 IEM_MC_END();
9194 return VINF_SUCCESS;
9195}
9196
9197
9198/** Opcode 0x9c. */
9199FNIEMOP_DEF(iemOp_pushf_Fv)
9200{
9201 IEMOP_HLP_NO_LOCK_PREFIX();
9202 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9203 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
9204}
9205
9206
9207/** Opcode 0x9d. */
9208FNIEMOP_DEF(iemOp_popf_Fv)
9209{
9210 IEMOP_HLP_NO_LOCK_PREFIX();
9211 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9212 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
9213}
9214
9215
9216/** Opcode 0x9e. */
9217FNIEMOP_DEF(iemOp_sahf)
9218{
9219 IEMOP_MNEMONIC("sahf");
9220 IEMOP_HLP_NO_LOCK_PREFIX();
9221 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
9222 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
9223 return IEMOP_RAISE_INVALID_OPCODE();
9224 IEM_MC_BEGIN(0, 2);
9225 IEM_MC_LOCAL(uint32_t, u32Flags);
9226 IEM_MC_LOCAL(uint32_t, EFlags);
9227 IEM_MC_FETCH_EFLAGS(EFlags);
9228 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
9229 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9230 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
9231 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
9232 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
9233 IEM_MC_COMMIT_EFLAGS(EFlags);
9234 IEM_MC_ADVANCE_RIP();
9235 IEM_MC_END();
9236 return VINF_SUCCESS;
9237}
9238
9239
9240/** Opcode 0x9f. */
9241FNIEMOP_DEF(iemOp_lahf)
9242{
9243 IEMOP_MNEMONIC("lahf");
9244 IEMOP_HLP_NO_LOCK_PREFIX();
9245 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
9246 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
9247 return IEMOP_RAISE_INVALID_OPCODE();
9248 IEM_MC_BEGIN(0, 1);
9249 IEM_MC_LOCAL(uint8_t, u8Flags);
9250 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
9251 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
9252 IEM_MC_ADVANCE_RIP();
9253 IEM_MC_END();
9254 return VINF_SUCCESS;
9255}
9256
9257
9258/**
9259 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
9260 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
9261 * prefixes. Will return on failures.
9262 * @param a_GCPtrMemOff The variable to store the offset in.
9263 */
9264#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
9265 do \
9266 { \
9267 switch (pIemCpu->enmEffAddrMode) \
9268 { \
9269 case IEMMODE_16BIT: \
9270 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
9271 break; \
9272 case IEMMODE_32BIT: \
9273 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
9274 break; \
9275 case IEMMODE_64BIT: \
9276 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
9277 break; \
9278 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9279 } \
9280 IEMOP_HLP_NO_LOCK_PREFIX(); \
9281 } while (0)
9282
9283/** Opcode 0xa0. */
9284FNIEMOP_DEF(iemOp_mov_Al_Ob)
9285{
9286 /*
9287 * Get the offset and fend of lock prefixes.
9288 */
9289 RTGCPTR GCPtrMemOff;
9290 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9291
9292 /*
9293 * Fetch AL.
9294 */
9295 IEM_MC_BEGIN(0,1);
9296 IEM_MC_LOCAL(uint8_t, u8Tmp);
9297 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9298 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9299 IEM_MC_ADVANCE_RIP();
9300 IEM_MC_END();
9301 return VINF_SUCCESS;
9302}
9303
9304
9305/** Opcode 0xa1. */
9306FNIEMOP_DEF(iemOp_mov_rAX_Ov)
9307{
9308 /*
9309 * Get the offset and fend of lock prefixes.
9310 */
9311 IEMOP_MNEMONIC("mov rAX,Ov");
9312 RTGCPTR GCPtrMemOff;
9313 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9314
9315 /*
9316 * Fetch rAX.
9317 */
9318 switch (pIemCpu->enmEffOpSize)
9319 {
9320 case IEMMODE_16BIT:
9321 IEM_MC_BEGIN(0,1);
9322 IEM_MC_LOCAL(uint16_t, u16Tmp);
9323 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9324 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9325 IEM_MC_ADVANCE_RIP();
9326 IEM_MC_END();
9327 return VINF_SUCCESS;
9328
9329 case IEMMODE_32BIT:
9330 IEM_MC_BEGIN(0,1);
9331 IEM_MC_LOCAL(uint32_t, u32Tmp);
9332 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9333 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
9334 IEM_MC_ADVANCE_RIP();
9335 IEM_MC_END();
9336 return VINF_SUCCESS;
9337
9338 case IEMMODE_64BIT:
9339 IEM_MC_BEGIN(0,1);
9340 IEM_MC_LOCAL(uint64_t, u64Tmp);
9341 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9342 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
9343 IEM_MC_ADVANCE_RIP();
9344 IEM_MC_END();
9345 return VINF_SUCCESS;
9346
9347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9348 }
9349}
9350
9351
9352/** Opcode 0xa2. */
9353FNIEMOP_DEF(iemOp_mov_Ob_AL)
9354{
9355 /*
9356 * Get the offset and fend of lock prefixes.
9357 */
9358 RTGCPTR GCPtrMemOff;
9359 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9360
9361 /*
9362 * Store AL.
9363 */
9364 IEM_MC_BEGIN(0,1);
9365 IEM_MC_LOCAL(uint8_t, u8Tmp);
9366 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
9367 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
9368 IEM_MC_ADVANCE_RIP();
9369 IEM_MC_END();
9370 return VINF_SUCCESS;
9371}
9372
9373
9374/** Opcode 0xa3. */
9375FNIEMOP_DEF(iemOp_mov_Ov_rAX)
9376{
9377 /*
9378 * Get the offset and fend of lock prefixes.
9379 */
9380 RTGCPTR GCPtrMemOff;
9381 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9382
9383 /*
9384 * Store rAX.
9385 */
9386 switch (pIemCpu->enmEffOpSize)
9387 {
9388 case IEMMODE_16BIT:
9389 IEM_MC_BEGIN(0,1);
9390 IEM_MC_LOCAL(uint16_t, u16Tmp);
9391 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
9392 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
9393 IEM_MC_ADVANCE_RIP();
9394 IEM_MC_END();
9395 return VINF_SUCCESS;
9396
9397 case IEMMODE_32BIT:
9398 IEM_MC_BEGIN(0,1);
9399 IEM_MC_LOCAL(uint32_t, u32Tmp);
9400 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
9401 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
9402 IEM_MC_ADVANCE_RIP();
9403 IEM_MC_END();
9404 return VINF_SUCCESS;
9405
9406 case IEMMODE_64BIT:
9407 IEM_MC_BEGIN(0,1);
9408 IEM_MC_LOCAL(uint64_t, u64Tmp);
9409 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
9410 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
9411 IEM_MC_ADVANCE_RIP();
9412 IEM_MC_END();
9413 return VINF_SUCCESS;
9414
9415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9416 }
9417}
9418
9419/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
9420#define IEM_MOVS_CASE(ValBits, AddrBits) \
9421 IEM_MC_BEGIN(0, 2); \
9422 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9423 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9424 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9425 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
9426 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9427 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
9428 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9429 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9430 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9431 } IEM_MC_ELSE() { \
9432 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9433 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9434 } IEM_MC_ENDIF(); \
9435 IEM_MC_ADVANCE_RIP(); \
9436 IEM_MC_END();
9437
9438/** Opcode 0xa4. */
9439FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
9440{
9441 IEMOP_HLP_NO_LOCK_PREFIX();
9442
9443 /*
9444 * Use the C implementation if a repeat prefix is encountered.
9445 */
9446 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9447 {
9448 IEMOP_MNEMONIC("rep movsb Xb,Yb");
9449 switch (pIemCpu->enmEffAddrMode)
9450 {
9451 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
9452 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
9453 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
9454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9455 }
9456 }
9457 IEMOP_MNEMONIC("movsb Xb,Yb");
9458
9459 /*
9460 * Sharing case implementation with movs[wdq] below.
9461 */
9462 switch (pIemCpu->enmEffAddrMode)
9463 {
9464 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
9465 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
9466 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
9467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9468 }
9469 return VINF_SUCCESS;
9470}
9471
9472
9473/** Opcode 0xa5. */
9474FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
9475{
9476 IEMOP_HLP_NO_LOCK_PREFIX();
9477
9478 /*
9479 * Use the C implementation if a repeat prefix is encountered.
9480 */
9481 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9482 {
9483 IEMOP_MNEMONIC("rep movs Xv,Yv");
9484 switch (pIemCpu->enmEffOpSize)
9485 {
9486 case IEMMODE_16BIT:
9487 switch (pIemCpu->enmEffAddrMode)
9488 {
9489 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
9490 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
9491 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
9492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9493 }
9494 break;
9495 case IEMMODE_32BIT:
9496 switch (pIemCpu->enmEffAddrMode)
9497 {
9498 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
9499 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
9500 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
9501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9502 }
9503 case IEMMODE_64BIT:
9504 switch (pIemCpu->enmEffAddrMode)
9505 {
9506 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9507 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
9508 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
9509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9510 }
9511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9512 }
9513 }
9514 IEMOP_MNEMONIC("movs Xv,Yv");
9515
9516 /*
9517 * Annoying double switch here.
9518 * Using ugly macro for implementing the cases, sharing it with movsb.
9519 */
9520 switch (pIemCpu->enmEffOpSize)
9521 {
9522 case IEMMODE_16BIT:
9523 switch (pIemCpu->enmEffAddrMode)
9524 {
9525 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
9526 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
9527 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
9528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9529 }
9530 break;
9531
9532 case IEMMODE_32BIT:
9533 switch (pIemCpu->enmEffAddrMode)
9534 {
9535 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
9536 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
9537 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
9538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9539 }
9540 break;
9541
9542 case IEMMODE_64BIT:
9543 switch (pIemCpu->enmEffAddrMode)
9544 {
9545 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9546 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
9547 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
9548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9549 }
9550 break;
9551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9552 }
9553 return VINF_SUCCESS;
9554}
9555
9556#undef IEM_MOVS_CASE
9557
9558/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
9559#define IEM_CMPS_CASE(ValBits, AddrBits) \
9560 IEM_MC_BEGIN(3, 3); \
9561 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
9562 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
9563 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9564 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
9565 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9566 \
9567 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9568 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
9569 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9570 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
9571 IEM_MC_REF_LOCAL(puValue1, uValue1); \
9572 IEM_MC_REF_EFLAGS(pEFlags); \
9573 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
9574 \
9575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9576 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9577 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9578 } IEM_MC_ELSE() { \
9579 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9580 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9581 } IEM_MC_ENDIF(); \
9582 IEM_MC_ADVANCE_RIP(); \
9583 IEM_MC_END(); \
9584
9585/** Opcode 0xa6. */
9586FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
9587{
9588 IEMOP_HLP_NO_LOCK_PREFIX();
9589
9590 /*
9591 * Use the C implementation if a repeat prefix is encountered.
9592 */
9593 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9594 {
9595 IEMOP_MNEMONIC("repe cmps Xb,Yb");
9596 switch (pIemCpu->enmEffAddrMode)
9597 {
9598 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
9599 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
9600 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
9601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9602 }
9603 }
9604 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9605 {
9606 IEMOP_MNEMONIC("repe cmps Xb,Yb");
9607 switch (pIemCpu->enmEffAddrMode)
9608 {
9609 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
9610 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
9611 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
9612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9613 }
9614 }
9615 IEMOP_MNEMONIC("cmps Xb,Yb");
9616
9617 /*
9618 * Sharing case implementation with cmps[wdq] below.
9619 */
9620 switch (pIemCpu->enmEffAddrMode)
9621 {
9622 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
9623 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
9624 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
9625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9626 }
9627 return VINF_SUCCESS;
9628
9629}
9630
9631
9632/** Opcode 0xa7. */
9633FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
9634{
9635 IEMOP_HLP_NO_LOCK_PREFIX();
9636
9637 /*
9638 * Use the C implementation if a repeat prefix is encountered.
9639 */
9640 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9641 {
9642 IEMOP_MNEMONIC("repe cmps Xv,Yv");
9643 switch (pIemCpu->enmEffOpSize)
9644 {
9645 case IEMMODE_16BIT:
9646 switch (pIemCpu->enmEffAddrMode)
9647 {
9648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
9649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
9650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
9651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9652 }
9653 break;
9654 case IEMMODE_32BIT:
9655 switch (pIemCpu->enmEffAddrMode)
9656 {
9657 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
9658 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
9659 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
9660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9661 }
9662 case IEMMODE_64BIT:
9663 switch (pIemCpu->enmEffAddrMode)
9664 {
9665 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9666 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
9667 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
9668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9669 }
9670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9671 }
9672 }
9673
9674 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9675 {
9676 IEMOP_MNEMONIC("repne cmps Xv,Yv");
9677 switch (pIemCpu->enmEffOpSize)
9678 {
9679 case IEMMODE_16BIT:
9680 switch (pIemCpu->enmEffAddrMode)
9681 {
9682 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
9683 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
9684 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
9685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9686 }
9687 break;
9688 case IEMMODE_32BIT:
9689 switch (pIemCpu->enmEffAddrMode)
9690 {
9691 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
9692 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
9693 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
9694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9695 }
9696 case IEMMODE_64BIT:
9697 switch (pIemCpu->enmEffAddrMode)
9698 {
9699 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9700 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
9701 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
9702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9703 }
9704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9705 }
9706 }
9707
9708 IEMOP_MNEMONIC("cmps Xv,Yv");
9709
9710 /*
9711 * Annoying double switch here.
9712 * Using ugly macro for implementing the cases, sharing it with cmpsb.
9713 */
9714 switch (pIemCpu->enmEffOpSize)
9715 {
9716 case IEMMODE_16BIT:
9717 switch (pIemCpu->enmEffAddrMode)
9718 {
9719 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
9720 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
9721 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
9722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9723 }
9724 break;
9725
9726 case IEMMODE_32BIT:
9727 switch (pIemCpu->enmEffAddrMode)
9728 {
9729 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
9730 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
9731 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
9732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9733 }
9734 break;
9735
9736 case IEMMODE_64BIT:
9737 switch (pIemCpu->enmEffAddrMode)
9738 {
9739 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9740 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
9741 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
9742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9743 }
9744 break;
9745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9746 }
9747 return VINF_SUCCESS;
9748
9749}
9750
9751#undef IEM_CMPS_CASE
9752
9753/** Opcode 0xa8. */
9754FNIEMOP_DEF(iemOp_test_AL_Ib)
9755{
9756 IEMOP_MNEMONIC("test al,Ib");
9757 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9758 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
9759}
9760
9761
9762/** Opcode 0xa9. */
9763FNIEMOP_DEF(iemOp_test_eAX_Iz)
9764{
9765 IEMOP_MNEMONIC("test rAX,Iz");
9766 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9767 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
9768}
9769
9770
9771/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
9772#define IEM_STOS_CASE(ValBits, AddrBits) \
9773 IEM_MC_BEGIN(0, 2); \
9774 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9775 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9776 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
9777 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9778 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
9779 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9780 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9781 } IEM_MC_ELSE() { \
9782 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9783 } IEM_MC_ENDIF(); \
9784 IEM_MC_ADVANCE_RIP(); \
9785 IEM_MC_END(); \
9786
9787/** Opcode 0xaa. */
9788FNIEMOP_DEF(iemOp_stosb_Yb_AL)
9789{
9790 IEMOP_HLP_NO_LOCK_PREFIX();
9791
9792 /*
9793 * Use the C implementation if a repeat prefix is encountered.
9794 */
9795 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9796 {
9797 IEMOP_MNEMONIC("rep stos Yb,al");
9798 switch (pIemCpu->enmEffAddrMode)
9799 {
9800 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
9801 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
9802 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
9803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9804 }
9805 }
9806 IEMOP_MNEMONIC("stos Yb,al");
9807
9808 /*
9809 * Sharing case implementation with stos[wdq] below.
9810 */
9811 switch (pIemCpu->enmEffAddrMode)
9812 {
9813 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
9814 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
9815 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
9816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9817 }
9818 return VINF_SUCCESS;
9819}
9820
9821
9822/** Opcode 0xab. */
9823FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
9824{
9825 IEMOP_HLP_NO_LOCK_PREFIX();
9826
9827 /*
9828 * Use the C implementation if a repeat prefix is encountered.
9829 */
9830 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9831 {
9832 IEMOP_MNEMONIC("rep stos Yv,rAX");
9833 switch (pIemCpu->enmEffOpSize)
9834 {
9835 case IEMMODE_16BIT:
9836 switch (pIemCpu->enmEffAddrMode)
9837 {
9838 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
9839 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
9840 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
9841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9842 }
9843 break;
9844 case IEMMODE_32BIT:
9845 switch (pIemCpu->enmEffAddrMode)
9846 {
9847 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
9848 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
9849 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
9850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9851 }
9852 case IEMMODE_64BIT:
9853 switch (pIemCpu->enmEffAddrMode)
9854 {
9855 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9856 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
9857 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
9858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9859 }
9860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9861 }
9862 }
9863 IEMOP_MNEMONIC("stos Yv,rAX");
9864
9865 /*
9866 * Annoying double switch here.
9867 * Using ugly macro for implementing the cases, sharing it with stosb.
9868 */
9869 switch (pIemCpu->enmEffOpSize)
9870 {
9871 case IEMMODE_16BIT:
9872 switch (pIemCpu->enmEffAddrMode)
9873 {
9874 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
9875 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
9876 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
9877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9878 }
9879 break;
9880
9881 case IEMMODE_32BIT:
9882 switch (pIemCpu->enmEffAddrMode)
9883 {
9884 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
9885 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
9886 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
9887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9888 }
9889 break;
9890
9891 case IEMMODE_64BIT:
9892 switch (pIemCpu->enmEffAddrMode)
9893 {
9894 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9895 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
9896 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
9897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9898 }
9899 break;
9900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9901 }
9902 return VINF_SUCCESS;
9903}
9904
9905#undef IEM_STOS_CASE
9906
9907/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
9908#define IEM_LODS_CASE(ValBits, AddrBits) \
9909 IEM_MC_BEGIN(0, 2); \
9910 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9911 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9912 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9913 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
9914 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
9915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9916 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9917 } IEM_MC_ELSE() { \
9918 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9919 } IEM_MC_ENDIF(); \
9920 IEM_MC_ADVANCE_RIP(); \
9921 IEM_MC_END();
9922
9923/** Opcode 0xac. */
9924FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
9925{
9926 IEMOP_HLP_NO_LOCK_PREFIX();
9927
9928 /*
9929 * Use the C implementation if a repeat prefix is encountered.
9930 */
9931 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9932 {
9933 IEMOP_MNEMONIC("rep lodsb al,Xb");
9934 switch (pIemCpu->enmEffAddrMode)
9935 {
9936 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
9937 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
9938 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
9939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9940 }
9941 }
9942 IEMOP_MNEMONIC("lodsb al,Xb");
9943
9944 /*
9945 * Sharing case implementation with stos[wdq] below.
9946 */
9947 switch (pIemCpu->enmEffAddrMode)
9948 {
9949 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
9950 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
9951 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
9952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9953 }
9954 return VINF_SUCCESS;
9955}
9956
9957
9958/** Opcode 0xad. */
9959FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
9960{
9961 IEMOP_HLP_NO_LOCK_PREFIX();
9962
9963 /*
9964 * Use the C implementation if a repeat prefix is encountered.
9965 */
9966 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9967 {
9968 IEMOP_MNEMONIC("rep lods rAX,Xv");
9969 switch (pIemCpu->enmEffOpSize)
9970 {
9971 case IEMMODE_16BIT:
9972 switch (pIemCpu->enmEffAddrMode)
9973 {
9974 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
9975 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
9976 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
9977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9978 }
9979 break;
9980 case IEMMODE_32BIT:
9981 switch (pIemCpu->enmEffAddrMode)
9982 {
9983 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
9984 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
9985 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
9986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9987 }
9988 case IEMMODE_64BIT:
9989 switch (pIemCpu->enmEffAddrMode)
9990 {
9991 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9992 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
9993 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
9994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9995 }
9996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9997 }
9998 }
9999 IEMOP_MNEMONIC("lods rAX,Xv");
10000
10001 /*
10002 * Annoying double switch here.
10003 * Using ugly macro for implementing the cases, sharing it with lodsb.
10004 */
10005 switch (pIemCpu->enmEffOpSize)
10006 {
10007 case IEMMODE_16BIT:
10008 switch (pIemCpu->enmEffAddrMode)
10009 {
10010 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
10011 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
10012 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
10013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10014 }
10015 break;
10016
10017 case IEMMODE_32BIT:
10018 switch (pIemCpu->enmEffAddrMode)
10019 {
10020 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
10021 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
10022 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
10023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10024 }
10025 break;
10026
10027 case IEMMODE_64BIT:
10028 switch (pIemCpu->enmEffAddrMode)
10029 {
10030 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
10031 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
10032 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
10033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10034 }
10035 break;
10036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10037 }
10038 return VINF_SUCCESS;
10039}
10040
10041#undef IEM_LODS_CASE
10042
10043/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
10044#define IEM_SCAS_CASE(ValBits, AddrBits) \
10045 IEM_MC_BEGIN(3, 2); \
10046 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
10047 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
10048 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10049 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10050 \
10051 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10052 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
10053 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
10054 IEM_MC_REF_EFLAGS(pEFlags); \
10055 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
10056 \
10057 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10058 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10059 } IEM_MC_ELSE() { \
10060 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10061 } IEM_MC_ENDIF(); \
10062 IEM_MC_ADVANCE_RIP(); \
10063 IEM_MC_END();
10064
10065/** Opcode 0xae. */
10066FNIEMOP_DEF(iemOp_scasb_AL_Xb)
10067{
10068 IEMOP_HLP_NO_LOCK_PREFIX();
10069
10070 /*
10071 * Use the C implementation if a repeat prefix is encountered.
10072 */
10073 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10074 {
10075 IEMOP_MNEMONIC("repe scasb al,Xb");
10076 switch (pIemCpu->enmEffAddrMode)
10077 {
10078 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
10079 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
10080 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
10081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10082 }
10083 }
10084 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10085 {
10086 IEMOP_MNEMONIC("repne scasb al,Xb");
10087 switch (pIemCpu->enmEffAddrMode)
10088 {
10089 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
10090 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
10091 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
10092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10093 }
10094 }
10095 IEMOP_MNEMONIC("scasb al,Xb");
10096
10097 /*
10098 * Sharing case implementation with stos[wdq] below.
10099 */
10100 switch (pIemCpu->enmEffAddrMode)
10101 {
10102 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
10103 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
10104 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
10105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10106 }
10107 return VINF_SUCCESS;
10108}
10109
10110
10111/** Opcode 0xaf. */
10112FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
10113{
10114 IEMOP_HLP_NO_LOCK_PREFIX();
10115
10116 /*
10117 * Use the C implementation if a repeat prefix is encountered.
10118 */
10119 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10120 {
10121 IEMOP_MNEMONIC("repe scas rAX,Xv");
10122 switch (pIemCpu->enmEffOpSize)
10123 {
10124 case IEMMODE_16BIT:
10125 switch (pIemCpu->enmEffAddrMode)
10126 {
10127 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
10128 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
10129 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
10130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10131 }
10132 break;
10133 case IEMMODE_32BIT:
10134 switch (pIemCpu->enmEffAddrMode)
10135 {
10136 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
10137 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
10138 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
10139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10140 }
10141 case IEMMODE_64BIT:
10142 switch (pIemCpu->enmEffAddrMode)
10143 {
10144 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
10145 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
10146 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
10147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10148 }
10149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10150 }
10151 }
10152 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10153 {
10154 IEMOP_MNEMONIC("repne scas rAX,Xv");
10155 switch (pIemCpu->enmEffOpSize)
10156 {
10157 case IEMMODE_16BIT:
10158 switch (pIemCpu->enmEffAddrMode)
10159 {
10160 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
10161 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
10162 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
10163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10164 }
10165 break;
10166 case IEMMODE_32BIT:
10167 switch (pIemCpu->enmEffAddrMode)
10168 {
10169 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
10170 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
10171 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
10172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10173 }
10174 case IEMMODE_64BIT:
10175 switch (pIemCpu->enmEffAddrMode)
10176 {
10177 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
10178 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
10179 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
10180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10181 }
10182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10183 }
10184 }
10185 IEMOP_MNEMONIC("scas rAX,Xv");
10186
10187 /*
10188 * Annoying double switch here.
10189 * Using ugly macro for implementing the cases, sharing it with scasb.
10190 */
10191 switch (pIemCpu->enmEffOpSize)
10192 {
10193 case IEMMODE_16BIT:
10194 switch (pIemCpu->enmEffAddrMode)
10195 {
10196 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
10197 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
10198 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
10199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10200 }
10201 break;
10202
10203 case IEMMODE_32BIT:
10204 switch (pIemCpu->enmEffAddrMode)
10205 {
10206 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
10207 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
10208 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
10209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10210 }
10211 break;
10212
10213 case IEMMODE_64BIT:
10214 switch (pIemCpu->enmEffAddrMode)
10215 {
10216 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
10217 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
10218 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
10219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10220 }
10221 break;
10222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10223 }
10224 return VINF_SUCCESS;
10225}
10226
10227#undef IEM_SCAS_CASE
10228
10229/**
10230 * Common 'mov r8, imm8' helper.
10231 */
10232FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
10233{
10234 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10235 IEMOP_HLP_NO_LOCK_PREFIX();
10236
10237 IEM_MC_BEGIN(0, 1);
10238 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
10239 IEM_MC_STORE_GREG_U8(iReg, u8Value);
10240 IEM_MC_ADVANCE_RIP();
10241 IEM_MC_END();
10242
10243 return VINF_SUCCESS;
10244}
10245
10246
10247/** Opcode 0xb0. */
10248FNIEMOP_DEF(iemOp_mov_AL_Ib)
10249{
10250 IEMOP_MNEMONIC("mov AL,Ib");
10251 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
10252}
10253
10254
10255/** Opcode 0xb1. */
10256FNIEMOP_DEF(iemOp_CL_Ib)
10257{
10258 IEMOP_MNEMONIC("mov CL,Ib");
10259 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
10260}
10261
10262
10263/** Opcode 0xb2. */
10264FNIEMOP_DEF(iemOp_DL_Ib)
10265{
10266 IEMOP_MNEMONIC("mov DL,Ib");
10267 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
10268}
10269
10270
10271/** Opcode 0xb3. */
10272FNIEMOP_DEF(iemOp_BL_Ib)
10273{
10274 IEMOP_MNEMONIC("mov BL,Ib");
10275 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
10276}
10277
10278
10279/** Opcode 0xb4. */
10280FNIEMOP_DEF(iemOp_mov_AH_Ib)
10281{
10282 IEMOP_MNEMONIC("mov AH,Ib");
10283 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
10284}
10285
10286
10287/** Opcode 0xb5. */
10288FNIEMOP_DEF(iemOp_CH_Ib)
10289{
10290 IEMOP_MNEMONIC("mov CH,Ib");
10291 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
10292}
10293
10294
10295/** Opcode 0xb6. */
10296FNIEMOP_DEF(iemOp_DH_Ib)
10297{
10298 IEMOP_MNEMONIC("mov DH,Ib");
10299 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
10300}
10301
10302
10303/** Opcode 0xb7. */
10304FNIEMOP_DEF(iemOp_BH_Ib)
10305{
10306 IEMOP_MNEMONIC("mov BH,Ib");
10307 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
10308}
10309
10310
10311/**
10312 * Common 'mov regX,immX' helper.
10313 */
10314FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
10315{
10316 switch (pIemCpu->enmEffOpSize)
10317 {
10318 case IEMMODE_16BIT:
10319 {
10320 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10321 IEMOP_HLP_NO_LOCK_PREFIX();
10322
10323 IEM_MC_BEGIN(0, 1);
10324 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
10325 IEM_MC_STORE_GREG_U16(iReg, u16Value);
10326 IEM_MC_ADVANCE_RIP();
10327 IEM_MC_END();
10328 break;
10329 }
10330
10331 case IEMMODE_32BIT:
10332 {
10333 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10334 IEMOP_HLP_NO_LOCK_PREFIX();
10335
10336 IEM_MC_BEGIN(0, 1);
10337 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
10338 IEM_MC_STORE_GREG_U32(iReg, u32Value);
10339 IEM_MC_ADVANCE_RIP();
10340 IEM_MC_END();
10341 break;
10342 }
10343 case IEMMODE_64BIT:
10344 {
10345 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
10346 IEMOP_HLP_NO_LOCK_PREFIX();
10347
10348 IEM_MC_BEGIN(0, 1);
10349 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
10350 IEM_MC_STORE_GREG_U64(iReg, u64Value);
10351 IEM_MC_ADVANCE_RIP();
10352 IEM_MC_END();
10353 break;
10354 }
10355 }
10356
10357 return VINF_SUCCESS;
10358}
10359
10360
10361/** Opcode 0xb8. */
10362FNIEMOP_DEF(iemOp_eAX_Iv)
10363{
10364 IEMOP_MNEMONIC("mov rAX,IV");
10365 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
10366}
10367
10368
10369/** Opcode 0xb9. */
10370FNIEMOP_DEF(iemOp_eCX_Iv)
10371{
10372 IEMOP_MNEMONIC("mov rCX,IV");
10373 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
10374}
10375
10376
10377/** Opcode 0xba. */
10378FNIEMOP_DEF(iemOp_eDX_Iv)
10379{
10380 IEMOP_MNEMONIC("mov rDX,IV");
10381 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
10382}
10383
10384
10385/** Opcode 0xbb. */
10386FNIEMOP_DEF(iemOp_eBX_Iv)
10387{
10388 IEMOP_MNEMONIC("mov rBX,IV");
10389 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
10390}
10391
10392
10393/** Opcode 0xbc. */
10394FNIEMOP_DEF(iemOp_eSP_Iv)
10395{
10396 IEMOP_MNEMONIC("mov rSP,IV");
10397 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
10398}
10399
10400
10401/** Opcode 0xbd. */
10402FNIEMOP_DEF(iemOp_eBP_Iv)
10403{
10404 IEMOP_MNEMONIC("mov rBP,IV");
10405 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
10406}
10407
10408
10409/** Opcode 0xbe. */
10410FNIEMOP_DEF(iemOp_eSI_Iv)
10411{
10412 IEMOP_MNEMONIC("mov rSI,IV");
10413 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
10414}
10415
10416
10417/** Opcode 0xbf. */
10418FNIEMOP_DEF(iemOp_eDI_Iv)
10419{
10420 IEMOP_MNEMONIC("mov rDI,IV");
10421 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
10422}
10423
10424
10425/** Opcode 0xc0. */
10426FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
10427{
10428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10429 PCIEMOPSHIFTSIZES pImpl;
10430 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10431 {
10432 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
10433 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
10434 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
10435 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
10436 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
10437 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
10438 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
10439 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10440 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
10441 }
10442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10443
10444 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10445 {
10446 /* register */
10447 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10448 IEMOP_HLP_NO_LOCK_PREFIX();
10449 IEM_MC_BEGIN(3, 0);
10450 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10451 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10452 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10453 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10454 IEM_MC_REF_EFLAGS(pEFlags);
10455 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10456 IEM_MC_ADVANCE_RIP();
10457 IEM_MC_END();
10458 }
10459 else
10460 {
10461 /* memory */
10462 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10463 IEM_MC_BEGIN(3, 2);
10464 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10465 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10466 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10468
10469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10470 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10471 IEM_MC_ASSIGN(cShiftArg, cShift);
10472 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10473 IEM_MC_FETCH_EFLAGS(EFlags);
10474 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10475
10476 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10477 IEM_MC_COMMIT_EFLAGS(EFlags);
10478 IEM_MC_ADVANCE_RIP();
10479 IEM_MC_END();
10480 }
10481 return VINF_SUCCESS;
10482}
10483
10484
10485/** Opcode 0xc1. */
10486FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
10487{
10488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10489 PCIEMOPSHIFTSIZES pImpl;
10490 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10491 {
10492 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
10493 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
10494 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
10495 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
10496 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
10497 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
10498 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
10499 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10500 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
10501 }
10502 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10503
10504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10505 {
10506 /* register */
10507 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10508 IEMOP_HLP_NO_LOCK_PREFIX();
10509 switch (pIemCpu->enmEffOpSize)
10510 {
10511 case IEMMODE_16BIT:
10512 IEM_MC_BEGIN(3, 0);
10513 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10514 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10515 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10516 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10517 IEM_MC_REF_EFLAGS(pEFlags);
10518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10519 IEM_MC_ADVANCE_RIP();
10520 IEM_MC_END();
10521 return VINF_SUCCESS;
10522
10523 case IEMMODE_32BIT:
10524 IEM_MC_BEGIN(3, 0);
10525 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10526 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10527 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10528 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10529 IEM_MC_REF_EFLAGS(pEFlags);
10530 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10531 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10532 IEM_MC_ADVANCE_RIP();
10533 IEM_MC_END();
10534 return VINF_SUCCESS;
10535
10536 case IEMMODE_64BIT:
10537 IEM_MC_BEGIN(3, 0);
10538 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10539 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10540 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10541 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10542 IEM_MC_REF_EFLAGS(pEFlags);
10543 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10544 IEM_MC_ADVANCE_RIP();
10545 IEM_MC_END();
10546 return VINF_SUCCESS;
10547
10548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10549 }
10550 }
10551 else
10552 {
10553 /* memory */
10554 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10555 switch (pIemCpu->enmEffOpSize)
10556 {
10557 case IEMMODE_16BIT:
10558 IEM_MC_BEGIN(3, 2);
10559 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10560 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10561 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10563
10564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10565 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10566 IEM_MC_ASSIGN(cShiftArg, cShift);
10567 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10568 IEM_MC_FETCH_EFLAGS(EFlags);
10569 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10570
10571 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10572 IEM_MC_COMMIT_EFLAGS(EFlags);
10573 IEM_MC_ADVANCE_RIP();
10574 IEM_MC_END();
10575 return VINF_SUCCESS;
10576
10577 case IEMMODE_32BIT:
10578 IEM_MC_BEGIN(3, 2);
10579 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10580 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10581 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10583
10584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10585 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10586 IEM_MC_ASSIGN(cShiftArg, cShift);
10587 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10588 IEM_MC_FETCH_EFLAGS(EFlags);
10589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10590
10591 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10592 IEM_MC_COMMIT_EFLAGS(EFlags);
10593 IEM_MC_ADVANCE_RIP();
10594 IEM_MC_END();
10595 return VINF_SUCCESS;
10596
10597 case IEMMODE_64BIT:
10598 IEM_MC_BEGIN(3, 2);
10599 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10600 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10601 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10603
10604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10605 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10606 IEM_MC_ASSIGN(cShiftArg, cShift);
10607 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10608 IEM_MC_FETCH_EFLAGS(EFlags);
10609 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10610
10611 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10612 IEM_MC_COMMIT_EFLAGS(EFlags);
10613 IEM_MC_ADVANCE_RIP();
10614 IEM_MC_END();
10615 return VINF_SUCCESS;
10616
10617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10618 }
10619 }
10620}
10621
10622
10623/** Opcode 0xc2. */
10624FNIEMOP_DEF(iemOp_retn_Iw)
10625{
10626 IEMOP_MNEMONIC("retn Iw");
10627 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10628 IEMOP_HLP_NO_LOCK_PREFIX();
10629 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10630 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
10631}
10632
10633
10634/** Opcode 0xc3. */
10635FNIEMOP_DEF(iemOp_retn)
10636{
10637 IEMOP_MNEMONIC("retn");
10638 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10639 IEMOP_HLP_NO_LOCK_PREFIX();
10640 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
10641}
10642
10643
10644/** Opcode 0xc4. */
10645FNIEMOP_DEF(iemOp_les_Gv_Mp)
10646{
10647 IEMOP_MNEMONIC("les Gv,Mp");
10648 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_ES);
10649}
10650
10651
10652/** Opcode 0xc5. */
10653FNIEMOP_DEF(iemOp_lds_Gv_Mp)
10654{
10655 IEMOP_MNEMONIC("lds Gv,Mp");
10656 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_DS);
10657}
10658
10659
10660/** Opcode 0xc6. */
10661FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
10662{
10663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10664 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10665 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
10666 return IEMOP_RAISE_INVALID_OPCODE();
10667 IEMOP_MNEMONIC("mov Eb,Ib");
10668
10669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10670 {
10671 /* register access */
10672 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10673 IEM_MC_BEGIN(0, 0);
10674 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
10675 IEM_MC_ADVANCE_RIP();
10676 IEM_MC_END();
10677 }
10678 else
10679 {
10680 /* memory access. */
10681 IEM_MC_BEGIN(0, 1);
10682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10684 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10685 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
10686 IEM_MC_ADVANCE_RIP();
10687 IEM_MC_END();
10688 }
10689 return VINF_SUCCESS;
10690}
10691
10692
10693/** Opcode 0xc7. */
10694FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
10695{
10696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10697 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10698 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
10699 return IEMOP_RAISE_INVALID_OPCODE();
10700 IEMOP_MNEMONIC("mov Ev,Iz");
10701
10702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10703 {
10704 /* register access */
10705 switch (pIemCpu->enmEffOpSize)
10706 {
10707 case IEMMODE_16BIT:
10708 IEM_MC_BEGIN(0, 0);
10709 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10710 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
10711 IEM_MC_ADVANCE_RIP();
10712 IEM_MC_END();
10713 return VINF_SUCCESS;
10714
10715 case IEMMODE_32BIT:
10716 IEM_MC_BEGIN(0, 0);
10717 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10718 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
10719 IEM_MC_ADVANCE_RIP();
10720 IEM_MC_END();
10721 return VINF_SUCCESS;
10722
10723 case IEMMODE_64BIT:
10724 IEM_MC_BEGIN(0, 0);
10725 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10726 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
10727 IEM_MC_ADVANCE_RIP();
10728 IEM_MC_END();
10729 return VINF_SUCCESS;
10730
10731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10732 }
10733 }
10734 else
10735 {
10736 /* memory access. */
10737 switch (pIemCpu->enmEffOpSize)
10738 {
10739 case IEMMODE_16BIT:
10740 IEM_MC_BEGIN(0, 1);
10741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10743 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10744 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
10745 IEM_MC_ADVANCE_RIP();
10746 IEM_MC_END();
10747 return VINF_SUCCESS;
10748
10749 case IEMMODE_32BIT:
10750 IEM_MC_BEGIN(0, 1);
10751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10753 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10754 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
10755 IEM_MC_ADVANCE_RIP();
10756 IEM_MC_END();
10757 return VINF_SUCCESS;
10758
10759 case IEMMODE_64BIT:
10760 IEM_MC_BEGIN(0, 1);
10761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10763 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10764 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
10765 IEM_MC_ADVANCE_RIP();
10766 IEM_MC_END();
10767 return VINF_SUCCESS;
10768
10769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10770 }
10771 }
10772}
10773
10774
10775
10776
10777/** Opcode 0xc8. */
10778FNIEMOP_DEF(iemOp_enter_Iw_Ib)
10779{
10780 IEMOP_MNEMONIC("enter Iw,Ib");
10781 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10782 IEMOP_HLP_NO_LOCK_PREFIX();
10783 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
10784 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
10785 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
10786}
10787
10788
10789/** Opcode 0xc9. */
10790FNIEMOP_DEF(iemOp_leave)
10791{
10792 IEMOP_MNEMONIC("retn");
10793 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10794 IEMOP_HLP_NO_LOCK_PREFIX();
10795 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
10796}
10797
10798
10799/** Opcode 0xca. */
10800FNIEMOP_DEF(iemOp_retf_Iw)
10801{
10802 IEMOP_MNEMONIC("retf Iw");
10803 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10804 IEMOP_HLP_NO_LOCK_PREFIX();
10805 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10806 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
10807}
10808
10809
10810/** Opcode 0xcb. */
10811FNIEMOP_DEF(iemOp_retf)
10812{
10813 IEMOP_MNEMONIC("retf");
10814 IEMOP_HLP_NO_LOCK_PREFIX();
10815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10816 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
10817}
10818
10819
10820/** Opcode 0xcc. */
10821FNIEMOP_DEF(iemOp_int_3)
10822{
10823 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
10824}
10825
10826
10827/** Opcode 0xcd. */
10828FNIEMOP_DEF(iemOp_int_Ib)
10829{
10830 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
10831 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
10832}
10833
10834
10835/** Opcode 0xce. */
10836FNIEMOP_DEF(iemOp_into)
10837{
10838 IEM_MC_BEGIN(2, 0);
10839 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
10840 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
10841 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
10842 IEM_MC_END();
10843 return VINF_SUCCESS;
10844}
10845
10846
10847/** Opcode 0xcf. */
10848FNIEMOP_DEF(iemOp_iret)
10849{
10850 IEMOP_MNEMONIC("iret");
10851 IEMOP_HLP_NO_LOCK_PREFIX();
10852 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
10853}
10854
10855
10856/** Opcode 0xd0. */
10857FNIEMOP_DEF(iemOp_Grp2_Eb_1)
10858{
10859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10860 PCIEMOPSHIFTSIZES pImpl;
10861 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10862 {
10863 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
10864 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
10865 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
10866 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
10867 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
10868 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
10869 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
10870 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10871 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10872 }
10873 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10874
10875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10876 {
10877 /* register */
10878 IEMOP_HLP_NO_LOCK_PREFIX();
10879 IEM_MC_BEGIN(3, 0);
10880 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10881 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
10882 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10883 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10884 IEM_MC_REF_EFLAGS(pEFlags);
10885 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10886 IEM_MC_ADVANCE_RIP();
10887 IEM_MC_END();
10888 }
10889 else
10890 {
10891 /* memory */
10892 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10893 IEM_MC_BEGIN(3, 2);
10894 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10895 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
10896 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10898
10899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10900 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10901 IEM_MC_FETCH_EFLAGS(EFlags);
10902 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10903
10904 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10905 IEM_MC_COMMIT_EFLAGS(EFlags);
10906 IEM_MC_ADVANCE_RIP();
10907 IEM_MC_END();
10908 }
10909 return VINF_SUCCESS;
10910}
10911
10912
10913
10914/** Opcode 0xd1. */
10915FNIEMOP_DEF(iemOp_Grp2_Ev_1)
10916{
10917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10918 PCIEMOPSHIFTSIZES pImpl;
10919 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10920 {
10921 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
10922 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
10923 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
10924 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
10925 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
10926 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
10927 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
10928 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10929 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10930 }
10931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10932
10933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10934 {
10935 /* register */
10936 IEMOP_HLP_NO_LOCK_PREFIX();
10937 switch (pIemCpu->enmEffOpSize)
10938 {
10939 case IEMMODE_16BIT:
10940 IEM_MC_BEGIN(3, 0);
10941 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10942 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10943 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10944 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10945 IEM_MC_REF_EFLAGS(pEFlags);
10946 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10947 IEM_MC_ADVANCE_RIP();
10948 IEM_MC_END();
10949 return VINF_SUCCESS;
10950
10951 case IEMMODE_32BIT:
10952 IEM_MC_BEGIN(3, 0);
10953 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10954 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10955 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10956 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10957 IEM_MC_REF_EFLAGS(pEFlags);
10958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10959 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10960 IEM_MC_ADVANCE_RIP();
10961 IEM_MC_END();
10962 return VINF_SUCCESS;
10963
10964 case IEMMODE_64BIT:
10965 IEM_MC_BEGIN(3, 0);
10966 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10967 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10968 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10969 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10970 IEM_MC_REF_EFLAGS(pEFlags);
10971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10972 IEM_MC_ADVANCE_RIP();
10973 IEM_MC_END();
10974 return VINF_SUCCESS;
10975
10976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10977 }
10978 }
10979 else
10980 {
10981 /* memory */
10982 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10983 switch (pIemCpu->enmEffOpSize)
10984 {
10985 case IEMMODE_16BIT:
10986 IEM_MC_BEGIN(3, 2);
10987 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10988 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10989 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10991
10992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10993 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10994 IEM_MC_FETCH_EFLAGS(EFlags);
10995 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10996
10997 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10998 IEM_MC_COMMIT_EFLAGS(EFlags);
10999 IEM_MC_ADVANCE_RIP();
11000 IEM_MC_END();
11001 return VINF_SUCCESS;
11002
11003 case IEMMODE_32BIT:
11004 IEM_MC_BEGIN(3, 2);
11005 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11006 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
11007 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11009
11010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11011 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11012 IEM_MC_FETCH_EFLAGS(EFlags);
11013 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11014
11015 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11016 IEM_MC_COMMIT_EFLAGS(EFlags);
11017 IEM_MC_ADVANCE_RIP();
11018 IEM_MC_END();
11019 return VINF_SUCCESS;
11020
11021 case IEMMODE_64BIT:
11022 IEM_MC_BEGIN(3, 2);
11023 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11024 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
11025 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11027
11028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11029 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11030 IEM_MC_FETCH_EFLAGS(EFlags);
11031 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11032
11033 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11034 IEM_MC_COMMIT_EFLAGS(EFlags);
11035 IEM_MC_ADVANCE_RIP();
11036 IEM_MC_END();
11037 return VINF_SUCCESS;
11038
11039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11040 }
11041 }
11042}
11043
11044
11045/** Opcode 0xd2. */
11046FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
11047{
11048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11049 PCIEMOPSHIFTSIZES pImpl;
11050 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11051 {
11052 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
11053 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
11054 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
11055 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
11056 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
11057 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
11058 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
11059 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11060 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
11061 }
11062 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11063
11064 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11065 {
11066 /* register */
11067 IEMOP_HLP_NO_LOCK_PREFIX();
11068 IEM_MC_BEGIN(3, 0);
11069 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11070 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11072 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11073 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11074 IEM_MC_REF_EFLAGS(pEFlags);
11075 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11076 IEM_MC_ADVANCE_RIP();
11077 IEM_MC_END();
11078 }
11079 else
11080 {
11081 /* memory */
11082 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11083 IEM_MC_BEGIN(3, 2);
11084 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11085 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11086 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11088
11089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11090 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11091 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11092 IEM_MC_FETCH_EFLAGS(EFlags);
11093 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11094
11095 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11096 IEM_MC_COMMIT_EFLAGS(EFlags);
11097 IEM_MC_ADVANCE_RIP();
11098 IEM_MC_END();
11099 }
11100 return VINF_SUCCESS;
11101}
11102
11103
11104/** Opcode 0xd3. */
11105FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
11106{
11107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11108 PCIEMOPSHIFTSIZES pImpl;
11109 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11110 {
11111 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
11112 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
11113 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
11114 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
11115 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
11116 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
11117 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
11118 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11119 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11120 }
11121 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11122
11123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11124 {
11125 /* register */
11126 IEMOP_HLP_NO_LOCK_PREFIX();
11127 switch (pIemCpu->enmEffOpSize)
11128 {
11129 case IEMMODE_16BIT:
11130 IEM_MC_BEGIN(3, 0);
11131 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11132 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11133 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11134 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11135 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11136 IEM_MC_REF_EFLAGS(pEFlags);
11137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11138 IEM_MC_ADVANCE_RIP();
11139 IEM_MC_END();
11140 return VINF_SUCCESS;
11141
11142 case IEMMODE_32BIT:
11143 IEM_MC_BEGIN(3, 0);
11144 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11145 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11146 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11147 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11148 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11149 IEM_MC_REF_EFLAGS(pEFlags);
11150 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11151 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11152 IEM_MC_ADVANCE_RIP();
11153 IEM_MC_END();
11154 return VINF_SUCCESS;
11155
11156 case IEMMODE_64BIT:
11157 IEM_MC_BEGIN(3, 0);
11158 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11159 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11160 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11161 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11162 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11163 IEM_MC_REF_EFLAGS(pEFlags);
11164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11165 IEM_MC_ADVANCE_RIP();
11166 IEM_MC_END();
11167 return VINF_SUCCESS;
11168
11169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11170 }
11171 }
11172 else
11173 {
11174 /* memory */
11175 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11176 switch (pIemCpu->enmEffOpSize)
11177 {
11178 case IEMMODE_16BIT:
11179 IEM_MC_BEGIN(3, 2);
11180 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11181 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11182 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11184
11185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11186 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11187 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11188 IEM_MC_FETCH_EFLAGS(EFlags);
11189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11190
11191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11192 IEM_MC_COMMIT_EFLAGS(EFlags);
11193 IEM_MC_ADVANCE_RIP();
11194 IEM_MC_END();
11195 return VINF_SUCCESS;
11196
11197 case IEMMODE_32BIT:
11198 IEM_MC_BEGIN(3, 2);
11199 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11200 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11201 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11203
11204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11205 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11206 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11207 IEM_MC_FETCH_EFLAGS(EFlags);
11208 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11209
11210 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11211 IEM_MC_COMMIT_EFLAGS(EFlags);
11212 IEM_MC_ADVANCE_RIP();
11213 IEM_MC_END();
11214 return VINF_SUCCESS;
11215
11216 case IEMMODE_64BIT:
11217 IEM_MC_BEGIN(3, 2);
11218 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11219 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11220 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11222
11223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11224 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11225 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11226 IEM_MC_FETCH_EFLAGS(EFlags);
11227 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11228
11229 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11230 IEM_MC_COMMIT_EFLAGS(EFlags);
11231 IEM_MC_ADVANCE_RIP();
11232 IEM_MC_END();
11233 return VINF_SUCCESS;
11234
11235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11236 }
11237 }
11238}
11239
11240/** Opcode 0xd4. */
11241FNIEMOP_DEF(iemOp_aam_Ib)
11242{
11243 IEMOP_MNEMONIC("aam Ib");
11244 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11245 IEMOP_HLP_NO_LOCK_PREFIX();
11246 IEMOP_HLP_NO_64BIT();
11247 if (!bImm)
11248 return IEMOP_RAISE_DIVIDE_ERROR();
11249 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
11250}
11251
11252
11253/** Opcode 0xd5. */
11254FNIEMOP_DEF(iemOp_aad_Ib)
11255{
11256 IEMOP_MNEMONIC("aad Ib");
11257 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11258 IEMOP_HLP_NO_LOCK_PREFIX();
11259 IEMOP_HLP_NO_64BIT();
11260 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
11261}
11262
11263
11264/** Opcode 0xd7. */
11265FNIEMOP_DEF(iemOp_xlat)
11266{
11267 IEMOP_MNEMONIC("xlat");
11268 IEMOP_HLP_NO_LOCK_PREFIX();
11269 switch (pIemCpu->enmEffAddrMode)
11270 {
11271 case IEMMODE_16BIT:
11272 IEM_MC_BEGIN(2, 0);
11273 IEM_MC_LOCAL(uint8_t, u8Tmp);
11274 IEM_MC_LOCAL(uint16_t, u16Addr);
11275 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
11276 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
11277 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
11278 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11279 IEM_MC_ADVANCE_RIP();
11280 IEM_MC_END();
11281 return VINF_SUCCESS;
11282
11283 case IEMMODE_32BIT:
11284 IEM_MC_BEGIN(2, 0);
11285 IEM_MC_LOCAL(uint8_t, u8Tmp);
11286 IEM_MC_LOCAL(uint32_t, u32Addr);
11287 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
11288 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
11289 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
11290 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11291 IEM_MC_ADVANCE_RIP();
11292 IEM_MC_END();
11293 return VINF_SUCCESS;
11294
11295 case IEMMODE_64BIT:
11296 IEM_MC_BEGIN(2, 0);
11297 IEM_MC_LOCAL(uint8_t, u8Tmp);
11298 IEM_MC_LOCAL(uint64_t, u64Addr);
11299 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
11300 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
11301 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
11302 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11303 IEM_MC_ADVANCE_RIP();
11304 IEM_MC_END();
11305 return VINF_SUCCESS;
11306
11307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11308 }
11309}
11310
11311
11312/**
11313 * Common worker for FPU instructions working on ST0 and STn, and storing the
11314 * result in ST0.
11315 *
11316 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11317 */
11318FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11319{
11320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11321
11322 IEM_MC_BEGIN(3, 1);
11323 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11324 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11325 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11326 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11327
11328 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11329 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11330 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11331 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11332 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11333 IEM_MC_ELSE()
11334 IEM_MC_FPU_STACK_UNDERFLOW(0);
11335 IEM_MC_ENDIF();
11336 IEM_MC_USED_FPU();
11337 IEM_MC_ADVANCE_RIP();
11338
11339 IEM_MC_END();
11340 return VINF_SUCCESS;
11341}
11342
11343
11344/**
11345 * Common worker for FPU instructions working on ST0 and STn, and only affecting
11346 * flags.
11347 *
11348 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11349 */
11350FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11351{
11352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11353
11354 IEM_MC_BEGIN(3, 1);
11355 IEM_MC_LOCAL(uint16_t, u16Fsw);
11356 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11357 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11358 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11359
11360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11361 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11362 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11363 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11364 IEM_MC_UPDATE_FSW(u16Fsw);
11365 IEM_MC_ELSE()
11366 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
11367 IEM_MC_ENDIF();
11368 IEM_MC_USED_FPU();
11369 IEM_MC_ADVANCE_RIP();
11370
11371 IEM_MC_END();
11372 return VINF_SUCCESS;
11373}
11374
11375
11376/**
11377 * Common worker for FPU instructions working on ST0 and STn, only affecting
11378 * flags, and popping when done.
11379 *
11380 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11381 */
11382FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11383{
11384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11385
11386 IEM_MC_BEGIN(3, 1);
11387 IEM_MC_LOCAL(uint16_t, u16Fsw);
11388 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11389 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11390 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11391
11392 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11393 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11394 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11395 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11396 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
11397 IEM_MC_ELSE()
11398 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
11399 IEM_MC_ENDIF();
11400 IEM_MC_USED_FPU();
11401 IEM_MC_ADVANCE_RIP();
11402
11403 IEM_MC_END();
11404 return VINF_SUCCESS;
11405}
11406
11407
11408/** Opcode 0xd8 11/0. */
11409FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
11410{
11411 IEMOP_MNEMONIC("fadd st0,stN");
11412 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
11413}
11414
11415
11416/** Opcode 0xd8 11/1. */
11417FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
11418{
11419 IEMOP_MNEMONIC("fmul st0,stN");
11420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
11421}
11422
11423
11424/** Opcode 0xd8 11/2. */
11425FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
11426{
11427 IEMOP_MNEMONIC("fcom st0,stN");
11428 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
11429}
11430
11431
11432/** Opcode 0xd8 11/3. */
11433FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
11434{
11435 IEMOP_MNEMONIC("fcomp st0,stN");
11436 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
11437}
11438
11439
11440/** Opcode 0xd8 11/4. */
11441FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
11442{
11443 IEMOP_MNEMONIC("fsub st0,stN");
11444 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
11445}
11446
11447
11448/** Opcode 0xd8 11/5. */
11449FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
11450{
11451 IEMOP_MNEMONIC("fsubr st0,stN");
11452 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
11453}
11454
11455
11456/** Opcode 0xd8 11/6. */
11457FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
11458{
11459 IEMOP_MNEMONIC("fdiv st0,stN");
11460 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
11461}
11462
11463
11464/** Opcode 0xd8 11/7. */
11465FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
11466{
11467 IEMOP_MNEMONIC("fdivr st0,stN");
11468 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
11469}
11470
11471
11472/**
11473 * Common worker for FPU instructions working on ST0 and an m32r, and storing
11474 * the result in ST0.
11475 *
11476 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11477 */
11478FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
11479{
11480 IEM_MC_BEGIN(3, 3);
11481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11482 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11483 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11484 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11485 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11486 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11487
11488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11490
11491 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11492 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11493 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11494
11495 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11496 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
11497 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11498 IEM_MC_ELSE()
11499 IEM_MC_FPU_STACK_UNDERFLOW(0);
11500 IEM_MC_ENDIF();
11501 IEM_MC_USED_FPU();
11502 IEM_MC_ADVANCE_RIP();
11503
11504 IEM_MC_END();
11505 return VINF_SUCCESS;
11506}
11507
11508
11509/** Opcode 0xd8 !11/0. */
11510FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
11511{
11512 IEMOP_MNEMONIC("fadd st0,m32r");
11513 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
11514}
11515
11516
11517/** Opcode 0xd8 !11/1. */
11518FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
11519{
11520 IEMOP_MNEMONIC("fmul st0,m32r");
11521 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
11522}
11523
11524
11525/** Opcode 0xd8 !11/2. */
11526FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
11527{
11528 IEMOP_MNEMONIC("fcom st0,m32r");
11529
11530 IEM_MC_BEGIN(3, 3);
11531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11532 IEM_MC_LOCAL(uint16_t, u16Fsw);
11533 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11534 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11535 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11536 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11537
11538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11540
11541 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11542 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11543 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11544
11545 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11546 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
11547 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11548 IEM_MC_ELSE()
11549 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
11550 IEM_MC_ENDIF();
11551 IEM_MC_USED_FPU();
11552 IEM_MC_ADVANCE_RIP();
11553
11554 IEM_MC_END();
11555 return VINF_SUCCESS;
11556}
11557
11558
11559/** Opcode 0xd8 !11/3. */
11560FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
11561{
11562 IEMOP_MNEMONIC("fcomp st0,m32r");
11563
11564 IEM_MC_BEGIN(3, 3);
11565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11566 IEM_MC_LOCAL(uint16_t, u16Fsw);
11567 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11568 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11570 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11571
11572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11574
11575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11577 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11578
11579 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11580 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
11581 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11582 IEM_MC_ELSE()
11583 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
11584 IEM_MC_ENDIF();
11585 IEM_MC_USED_FPU();
11586 IEM_MC_ADVANCE_RIP();
11587
11588 IEM_MC_END();
11589 return VINF_SUCCESS;
11590}
11591
11592
11593/** Opcode 0xd8 !11/4. */
11594FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
11595{
11596 IEMOP_MNEMONIC("fsub st0,m32r");
11597 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
11598}
11599
11600
11601/** Opcode 0xd8 !11/5. */
11602FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
11603{
11604 IEMOP_MNEMONIC("fsubr st0,m32r");
11605 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
11606}
11607
11608
11609/** Opcode 0xd8 !11/6. */
11610FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
11611{
11612 IEMOP_MNEMONIC("fdiv st0,m32r");
11613 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
11614}
11615
11616
11617/** Opcode 0xd8 !11/7. */
11618FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
11619{
11620 IEMOP_MNEMONIC("fdivr st0,m32r");
11621 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
11622}
11623
11624
11625/** Opcode 0xd8. */
11626FNIEMOP_DEF(iemOp_EscF0)
11627{
11628 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
11629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11630
11631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11632 {
11633 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11634 {
11635 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
11636 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
11637 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
11638 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11639 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
11640 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
11641 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
11642 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
11643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11644 }
11645 }
11646 else
11647 {
11648 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11649 {
11650 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
11651 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
11652 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
11653 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
11654 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
11655 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
11656 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
11657 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
11658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11659 }
11660 }
11661}
11662
11663
11664/** Opcode 0xd9 /0 mem32real
11665 * @sa iemOp_fld_m64r */
11666FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
11667{
11668 IEMOP_MNEMONIC("fld m32r");
11669
11670 IEM_MC_BEGIN(2, 3);
11671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11672 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11673 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
11674 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11675 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
11676
11677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11679
11680 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11681 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11682 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
11683
11684 IEM_MC_IF_FPUREG_IS_EMPTY(7)
11685 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
11686 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
11687 IEM_MC_ELSE()
11688 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
11689 IEM_MC_ENDIF();
11690 IEM_MC_USED_FPU();
11691 IEM_MC_ADVANCE_RIP();
11692
11693 IEM_MC_END();
11694 return VINF_SUCCESS;
11695}
11696
11697
11698/** Opcode 0xd9 !11/2 mem32real */
11699FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
11700{
11701 IEMOP_MNEMONIC("fst m32r");
11702 IEM_MC_BEGIN(3, 2);
11703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11704 IEM_MC_LOCAL(uint16_t, u16Fsw);
11705 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11706 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
11707 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11708
11709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11712 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11713
11714 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11715 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11716 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
11717 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11718 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11719 IEM_MC_ELSE()
11720 IEM_MC_IF_FCW_IM()
11721 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11722 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
11723 IEM_MC_ENDIF();
11724 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11725 IEM_MC_ENDIF();
11726 IEM_MC_USED_FPU();
11727 IEM_MC_ADVANCE_RIP();
11728
11729 IEM_MC_END();
11730 return VINF_SUCCESS;
11731}
11732
11733
11734/** Opcode 0xd9 !11/3 */
11735FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
11736{
11737 IEMOP_MNEMONIC("fstp m32r");
11738 IEM_MC_BEGIN(3, 2);
11739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11740 IEM_MC_LOCAL(uint16_t, u16Fsw);
11741 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11742 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
11743 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11744
11745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11749
11750 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11751 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11752 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
11753 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11754 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11755 IEM_MC_ELSE()
11756 IEM_MC_IF_FCW_IM()
11757 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11758 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
11759 IEM_MC_ENDIF();
11760 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11761 IEM_MC_ENDIF();
11762 IEM_MC_USED_FPU();
11763 IEM_MC_ADVANCE_RIP();
11764
11765 IEM_MC_END();
11766 return VINF_SUCCESS;
11767}
11768
11769
11770/** Opcode 0xd9 !11/4 */
11771FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
11772{
11773 IEMOP_MNEMONIC("fldenv m14/28byte");
11774 IEM_MC_BEGIN(3, 0);
11775 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
11776 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
11777 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11781 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11782 IEM_MC_END();
11783 return VINF_SUCCESS;
11784}
11785
11786
11787/** Opcode 0xd9 !11/5 */
11788FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
11789{
11790 IEMOP_MNEMONIC("fldcw m2byte");
11791 IEM_MC_BEGIN(1, 1);
11792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11793 IEM_MC_ARG(uint16_t, u16Fsw, 0);
11794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11797 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11798 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
11799 IEM_MC_END();
11800 return VINF_SUCCESS;
11801}
11802
11803
11804/** Opcode 0xd9 !11/6 */
11805FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
11806{
11807 IEMOP_MNEMONIC("fstenv m14/m28byte");
11808 IEM_MC_BEGIN(3, 0);
11809 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
11810 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
11811 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11815 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
11816 IEM_MC_END();
11817 return VINF_SUCCESS;
11818}
11819
11820
11821/** Opcode 0xd9 !11/7 */
11822FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
11823{
11824 IEMOP_MNEMONIC("fnstcw m2byte");
11825 IEM_MC_BEGIN(2, 0);
11826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11827 IEM_MC_LOCAL(uint16_t, u16Fcw);
11828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11830 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11831 IEM_MC_FETCH_FCW(u16Fcw);
11832 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
11833 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11834 IEM_MC_END();
11835 return VINF_SUCCESS;
11836}
11837
11838
11839/** Opcode 0xd9 0xc9, 0xd9 0xd8-0xdf, ++?. */
11840FNIEMOP_DEF(iemOp_fnop)
11841{
11842 IEMOP_MNEMONIC("fnop");
11843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11844
11845 IEM_MC_BEGIN(0, 0);
11846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11847 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11848 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
11849 * intel optimizations. Investigate. */
11850 IEM_MC_UPDATE_FPU_OPCODE_IP();
11851 IEM_MC_USED_FPU();
11852 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11853 IEM_MC_END();
11854 return VINF_SUCCESS;
11855}
11856
11857
11858/** Opcode 0xd9 11/0 stN */
11859FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
11860{
11861 IEMOP_MNEMONIC("fld stN");
11862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11863
11864 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11865 * indicates that it does. */
11866 IEM_MC_BEGIN(0, 2);
11867 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11868 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11869 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11870 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11871 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
11872 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11873 IEM_MC_PUSH_FPU_RESULT(FpuRes);
11874 IEM_MC_ELSE()
11875 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
11876 IEM_MC_ENDIF();
11877 IEM_MC_USED_FPU();
11878 IEM_MC_ADVANCE_RIP();
11879 IEM_MC_END();
11880
11881 return VINF_SUCCESS;
11882}
11883
11884
11885/** Opcode 0xd9 11/3 stN */
11886FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
11887{
11888 IEMOP_MNEMONIC("fxch stN");
11889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11890
11891 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11892 * indicates that it does. */
11893 IEM_MC_BEGIN(1, 3);
11894 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
11895 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
11896 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11897 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
11898 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11899 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11900 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11901 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
11902 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
11903 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11904 IEM_MC_ELSE()
11905 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
11906 IEM_MC_ENDIF();
11907 IEM_MC_USED_FPU();
11908 IEM_MC_ADVANCE_RIP();
11909 IEM_MC_END();
11910
11911 return VINF_SUCCESS;
11912}
11913
11914
11915/** Opcode 0xd9 11/4, 0xdd 11/2. */
11916FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
11917{
11918 IEMOP_MNEMONIC("fstp st0,stN");
11919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11920
11921 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
11922 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
11923 if (!iDstReg)
11924 {
11925 IEM_MC_BEGIN(0, 1);
11926 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
11927 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11928 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11929 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
11930 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
11931 IEM_MC_ELSE()
11932 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
11933 IEM_MC_ENDIF();
11934 IEM_MC_USED_FPU();
11935 IEM_MC_ADVANCE_RIP();
11936 IEM_MC_END();
11937 }
11938 else
11939 {
11940 IEM_MC_BEGIN(0, 2);
11941 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11942 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11944 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11945 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11946 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11947 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
11948 IEM_MC_ELSE()
11949 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
11950 IEM_MC_ENDIF();
11951 IEM_MC_USED_FPU();
11952 IEM_MC_ADVANCE_RIP();
11953 IEM_MC_END();
11954 }
11955 return VINF_SUCCESS;
11956}
11957
11958
11959/**
11960 * Common worker for FPU instructions working on ST0 and replaces it with the
11961 * result, i.e. unary operators.
11962 *
11963 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11964 */
11965FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
11966{
11967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11968
11969 IEM_MC_BEGIN(2, 1);
11970 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11971 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11972 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11973
11974 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11975 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11976 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11977 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
11978 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11979 IEM_MC_ELSE()
11980 IEM_MC_FPU_STACK_UNDERFLOW(0);
11981 IEM_MC_ENDIF();
11982 IEM_MC_USED_FPU();
11983 IEM_MC_ADVANCE_RIP();
11984
11985 IEM_MC_END();
11986 return VINF_SUCCESS;
11987}
11988
11989
11990/** Opcode 0xd9 0xe0. */
11991FNIEMOP_DEF(iemOp_fchs)
11992{
11993 IEMOP_MNEMONIC("fchs st0");
11994 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11995}
11996
11997
11998/** Opcode 0xd9 0xe1. */
11999FNIEMOP_DEF(iemOp_fabs)
12000{
12001 IEMOP_MNEMONIC("fabs st0");
12002 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
12003}
12004
12005
12006/**
12007 * Common worker for FPU instructions working on ST0 and only returns FSW.
12008 *
12009 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12010 */
12011FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
12012{
12013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12014
12015 IEM_MC_BEGIN(2, 1);
12016 IEM_MC_LOCAL(uint16_t, u16Fsw);
12017 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12018 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
12019
12020 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12021 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12022 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12023 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
12024 IEM_MC_UPDATE_FSW(u16Fsw);
12025 IEM_MC_ELSE()
12026 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12027 IEM_MC_ENDIF();
12028 IEM_MC_USED_FPU();
12029 IEM_MC_ADVANCE_RIP();
12030
12031 IEM_MC_END();
12032 return VINF_SUCCESS;
12033}
12034
12035
12036/** Opcode 0xd9 0xe4. */
12037FNIEMOP_DEF(iemOp_ftst)
12038{
12039 IEMOP_MNEMONIC("ftst st0");
12040 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
12041}
12042
12043
12044/** Opcode 0xd9 0xe5. */
12045FNIEMOP_DEF(iemOp_fxam)
12046{
12047 IEMOP_MNEMONIC("fxam st0");
12048 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
12049}
12050
12051
12052/**
12053 * Common worker for FPU instructions pushing a constant onto the FPU stack.
12054 *
12055 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12056 */
12057FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
12058{
12059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12060
12061 IEM_MC_BEGIN(1, 1);
12062 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12063 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12064
12065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12067 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12068 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
12069 IEM_MC_PUSH_FPU_RESULT(FpuRes);
12070 IEM_MC_ELSE()
12071 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
12072 IEM_MC_ENDIF();
12073 IEM_MC_USED_FPU();
12074 IEM_MC_ADVANCE_RIP();
12075
12076 IEM_MC_END();
12077 return VINF_SUCCESS;
12078}
12079
12080
12081/** Opcode 0xd9 0xe8. */
12082FNIEMOP_DEF(iemOp_fld1)
12083{
12084 IEMOP_MNEMONIC("fld1");
12085 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
12086}
12087
12088
12089/** Opcode 0xd9 0xe9. */
12090FNIEMOP_DEF(iemOp_fldl2t)
12091{
12092 IEMOP_MNEMONIC("fldl2t");
12093 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
12094}
12095
12096
12097/** Opcode 0xd9 0xea. */
12098FNIEMOP_DEF(iemOp_fldl2e)
12099{
12100 IEMOP_MNEMONIC("fldl2e");
12101 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
12102}
12103
12104/** Opcode 0xd9 0xeb. */
12105FNIEMOP_DEF(iemOp_fldpi)
12106{
12107 IEMOP_MNEMONIC("fldpi");
12108 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
12109}
12110
12111
12112/** Opcode 0xd9 0xec. */
12113FNIEMOP_DEF(iemOp_fldlg2)
12114{
12115 IEMOP_MNEMONIC("fldlg2");
12116 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
12117}
12118
12119/** Opcode 0xd9 0xed. */
12120FNIEMOP_DEF(iemOp_fldln2)
12121{
12122 IEMOP_MNEMONIC("fldln2");
12123 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
12124}
12125
12126
12127/** Opcode 0xd9 0xee. */
12128FNIEMOP_DEF(iemOp_fldz)
12129{
12130 IEMOP_MNEMONIC("fldz");
12131 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
12132}
12133
12134
12135/** Opcode 0xd9 0xf0. */
12136FNIEMOP_DEF(iemOp_f2xm1)
12137{
12138 IEMOP_MNEMONIC("f2xm1 st0");
12139 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
12140}
12141
12142
12143/** Opcode 0xd9 0xf1. */
12144FNIEMOP_DEF(iemOp_fylx2)
12145{
12146 IEMOP_MNEMONIC("fylx2 st0");
12147 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
12148}
12149
12150
12151/**
12152 * Common worker for FPU instructions working on ST0 and having two outputs, one
12153 * replacing ST0 and one pushed onto the stack.
12154 *
12155 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12156 */
12157FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
12158{
12159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12160
12161 IEM_MC_BEGIN(2, 1);
12162 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
12163 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
12164 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
12165
12166 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12167 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12168 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12169 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
12170 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
12171 IEM_MC_ELSE()
12172 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
12173 IEM_MC_ENDIF();
12174 IEM_MC_USED_FPU();
12175 IEM_MC_ADVANCE_RIP();
12176
12177 IEM_MC_END();
12178 return VINF_SUCCESS;
12179}
12180
12181
12182/** Opcode 0xd9 0xf2. */
12183FNIEMOP_DEF(iemOp_fptan)
12184{
12185 IEMOP_MNEMONIC("fptan st0");
12186 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
12187}
12188
12189
12190/**
12191 * Common worker for FPU instructions working on STn and ST0, storing the result
12192 * in STn, and popping the stack unless IE, DE or ZE was raised.
12193 *
12194 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12195 */
12196FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12197{
12198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12199
12200 IEM_MC_BEGIN(3, 1);
12201 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12202 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12203 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12204 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12205
12206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12208
12209 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
12210 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12211 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
12212 IEM_MC_ELSE()
12213 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
12214 IEM_MC_ENDIF();
12215 IEM_MC_USED_FPU();
12216 IEM_MC_ADVANCE_RIP();
12217
12218 IEM_MC_END();
12219 return VINF_SUCCESS;
12220}
12221
12222
12223/** Opcode 0xd9 0xf3. */
12224FNIEMOP_DEF(iemOp_fpatan)
12225{
12226 IEMOP_MNEMONIC("fpatan st1,st0");
12227 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
12228}
12229
12230
12231/** Opcode 0xd9 0xf4. */
12232FNIEMOP_DEF(iemOp_fxtract)
12233{
12234 IEMOP_MNEMONIC("fxtract st0");
12235 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
12236}
12237
12238
12239/** Opcode 0xd9 0xf5. */
12240FNIEMOP_DEF(iemOp_fprem1)
12241{
12242 IEMOP_MNEMONIC("fprem1 st0, st1");
12243 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
12244}
12245
12246
12247/** Opcode 0xd9 0xf6. */
12248FNIEMOP_DEF(iemOp_fdecstp)
12249{
12250 IEMOP_MNEMONIC("fdecstp");
12251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12252 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
12253 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
12254 * FINCSTP and FDECSTP. */
12255
12256 IEM_MC_BEGIN(0,0);
12257
12258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12259 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12260
12261 IEM_MC_FPU_STACK_DEC_TOP();
12262 IEM_MC_UPDATE_FSW_CONST(0);
12263
12264 IEM_MC_USED_FPU();
12265 IEM_MC_ADVANCE_RIP();
12266 IEM_MC_END();
12267 return VINF_SUCCESS;
12268}
12269
12270
12271/** Opcode 0xd9 0xf7. */
12272FNIEMOP_DEF(iemOp_fincstp)
12273{
12274 IEMOP_MNEMONIC("fincstp");
12275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12276 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
12277 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
12278 * FINCSTP and FDECSTP. */
12279
12280 IEM_MC_BEGIN(0,0);
12281
12282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12283 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12284
12285 IEM_MC_FPU_STACK_INC_TOP();
12286 IEM_MC_UPDATE_FSW_CONST(0);
12287
12288 IEM_MC_USED_FPU();
12289 IEM_MC_ADVANCE_RIP();
12290 IEM_MC_END();
12291 return VINF_SUCCESS;
12292}
12293
12294
12295/** Opcode 0xd9 0xf8. */
12296FNIEMOP_DEF(iemOp_fprem)
12297{
12298 IEMOP_MNEMONIC("fprem st0, st1");
12299 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
12300}
12301
12302
12303/** Opcode 0xd9 0xf9. */
12304FNIEMOP_DEF(iemOp_fyl2xp1)
12305{
12306 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
12307 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
12308}
12309
12310
12311/** Opcode 0xd9 0xfa. */
12312FNIEMOP_DEF(iemOp_fsqrt)
12313{
12314 IEMOP_MNEMONIC("fsqrt st0");
12315 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
12316}
12317
12318
12319/** Opcode 0xd9 0xfb. */
12320FNIEMOP_DEF(iemOp_fsincos)
12321{
12322 IEMOP_MNEMONIC("fsincos st0");
12323 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
12324}
12325
12326
12327/** Opcode 0xd9 0xfc. */
12328FNIEMOP_DEF(iemOp_frndint)
12329{
12330 IEMOP_MNEMONIC("frndint st0");
12331 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
12332}
12333
12334
12335/** Opcode 0xd9 0xfd. */
12336FNIEMOP_DEF(iemOp_fscale)
12337{
12338 IEMOP_MNEMONIC("fscale st0, st1");
12339 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
12340}
12341
12342
12343/** Opcode 0xd9 0xfe. */
12344FNIEMOP_DEF(iemOp_fsin)
12345{
12346 IEMOP_MNEMONIC("fsin st0");
12347 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
12348}
12349
12350
12351/** Opcode 0xd9 0xff. */
12352FNIEMOP_DEF(iemOp_fcos)
12353{
12354 IEMOP_MNEMONIC("fcos st0");
12355 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
12356}
12357
12358
12359/** Used by iemOp_EscF1. */
12360static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
12361{
12362 /* 0xe0 */ iemOp_fchs,
12363 /* 0xe1 */ iemOp_fabs,
12364 /* 0xe2 */ iemOp_Invalid,
12365 /* 0xe3 */ iemOp_Invalid,
12366 /* 0xe4 */ iemOp_ftst,
12367 /* 0xe5 */ iemOp_fxam,
12368 /* 0xe6 */ iemOp_Invalid,
12369 /* 0xe7 */ iemOp_Invalid,
12370 /* 0xe8 */ iemOp_fld1,
12371 /* 0xe9 */ iemOp_fldl2t,
12372 /* 0xea */ iemOp_fldl2e,
12373 /* 0xeb */ iemOp_fldpi,
12374 /* 0xec */ iemOp_fldlg2,
12375 /* 0xed */ iemOp_fldln2,
12376 /* 0xee */ iemOp_fldz,
12377 /* 0xef */ iemOp_Invalid,
12378 /* 0xf0 */ iemOp_f2xm1,
12379 /* 0xf1 */ iemOp_fylx2,
12380 /* 0xf2 */ iemOp_fptan,
12381 /* 0xf3 */ iemOp_fpatan,
12382 /* 0xf4 */ iemOp_fxtract,
12383 /* 0xf5 */ iemOp_fprem1,
12384 /* 0xf6 */ iemOp_fdecstp,
12385 /* 0xf7 */ iemOp_fincstp,
12386 /* 0xf8 */ iemOp_fprem,
12387 /* 0xf9 */ iemOp_fyl2xp1,
12388 /* 0xfa */ iemOp_fsqrt,
12389 /* 0xfb */ iemOp_fsincos,
12390 /* 0xfc */ iemOp_frndint,
12391 /* 0xfd */ iemOp_fscale,
12392 /* 0xfe */ iemOp_fsin,
12393 /* 0xff */ iemOp_fcos
12394};
12395
12396
12397/** Opcode 0xd9. */
12398FNIEMOP_DEF(iemOp_EscF1)
12399{
12400 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12403 {
12404 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12405 {
12406 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
12407 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
12408 case 2:
12409 if (bRm == 0xc9)
12410 return FNIEMOP_CALL(iemOp_fnop);
12411 return IEMOP_RAISE_INVALID_OPCODE();
12412 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
12413 case 4:
12414 case 5:
12415 case 6:
12416 case 7:
12417 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
12418 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
12419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12420 }
12421 }
12422 else
12423 {
12424 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12425 {
12426 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
12427 case 1: return IEMOP_RAISE_INVALID_OPCODE();
12428 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
12429 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
12430 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
12431 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
12432 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
12433 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
12434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12435 }
12436 }
12437}
12438
12439
12440/** Opcode 0xda 11/0. */
12441FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
12442{
12443 IEMOP_MNEMONIC("fcmovb st0,stN");
12444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12445
12446 IEM_MC_BEGIN(0, 1);
12447 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12448
12449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12451
12452 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
12454 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12455 IEM_MC_ENDIF();
12456 IEM_MC_UPDATE_FPU_OPCODE_IP();
12457 IEM_MC_ELSE()
12458 IEM_MC_FPU_STACK_UNDERFLOW(0);
12459 IEM_MC_ENDIF();
12460 IEM_MC_USED_FPU();
12461 IEM_MC_ADVANCE_RIP();
12462
12463 IEM_MC_END();
12464 return VINF_SUCCESS;
12465}
12466
12467
12468/** Opcode 0xda 11/1. */
12469FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
12470{
12471 IEMOP_MNEMONIC("fcmove st0,stN");
12472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12473
12474 IEM_MC_BEGIN(0, 1);
12475 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12476
12477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12478 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12479
12480 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
12482 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12483 IEM_MC_ENDIF();
12484 IEM_MC_UPDATE_FPU_OPCODE_IP();
12485 IEM_MC_ELSE()
12486 IEM_MC_FPU_STACK_UNDERFLOW(0);
12487 IEM_MC_ENDIF();
12488 IEM_MC_USED_FPU();
12489 IEM_MC_ADVANCE_RIP();
12490
12491 IEM_MC_END();
12492 return VINF_SUCCESS;
12493}
12494
12495
12496/** Opcode 0xda 11/2. */
12497FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
12498{
12499 IEMOP_MNEMONIC("fcmovbe st0,stN");
12500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12501
12502 IEM_MC_BEGIN(0, 1);
12503 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12504
12505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12506 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12507
12508 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12509 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
12510 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12511 IEM_MC_ENDIF();
12512 IEM_MC_UPDATE_FPU_OPCODE_IP();
12513 IEM_MC_ELSE()
12514 IEM_MC_FPU_STACK_UNDERFLOW(0);
12515 IEM_MC_ENDIF();
12516 IEM_MC_USED_FPU();
12517 IEM_MC_ADVANCE_RIP();
12518
12519 IEM_MC_END();
12520 return VINF_SUCCESS;
12521}
12522
12523
12524/** Opcode 0xda 11/3. */
12525FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
12526{
12527 IEMOP_MNEMONIC("fcmovu st0,stN");
12528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12529
12530 IEM_MC_BEGIN(0, 1);
12531 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12532
12533 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12534 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12535
12536 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
12538 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12539 IEM_MC_ENDIF();
12540 IEM_MC_UPDATE_FPU_OPCODE_IP();
12541 IEM_MC_ELSE()
12542 IEM_MC_FPU_STACK_UNDERFLOW(0);
12543 IEM_MC_ENDIF();
12544 IEM_MC_USED_FPU();
12545 IEM_MC_ADVANCE_RIP();
12546
12547 IEM_MC_END();
12548 return VINF_SUCCESS;
12549}
12550
12551
12552/**
12553 * Common worker for FPU instructions working on ST0 and STn, only affecting
12554 * flags, and popping twice when done.
12555 *
12556 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12557 */
12558FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12559{
12560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12561
12562 IEM_MC_BEGIN(3, 1);
12563 IEM_MC_LOCAL(uint16_t, u16Fsw);
12564 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12565 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12566 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12567
12568 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12569 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12570 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
12571 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12572 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
12573 IEM_MC_ELSE()
12574 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
12575 IEM_MC_ENDIF();
12576 IEM_MC_USED_FPU();
12577 IEM_MC_ADVANCE_RIP();
12578
12579 IEM_MC_END();
12580 return VINF_SUCCESS;
12581}
12582
12583
12584/** Opcode 0xda 0xe9. */
12585FNIEMOP_DEF(iemOp_fucompp)
12586{
12587 IEMOP_MNEMONIC("fucompp st0,stN");
12588 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
12589}
12590
12591
12592/**
12593 * Common worker for FPU instructions working on ST0 and an m32i, and storing
12594 * the result in ST0.
12595 *
12596 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12597 */
12598FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
12599{
12600 IEM_MC_BEGIN(3, 3);
12601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12602 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12603 IEM_MC_LOCAL(int32_t, i32Val2);
12604 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12605 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12606 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12607
12608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12610
12611 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12612 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12613 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12614
12615 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12616 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
12617 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12618 IEM_MC_ELSE()
12619 IEM_MC_FPU_STACK_UNDERFLOW(0);
12620 IEM_MC_ENDIF();
12621 IEM_MC_USED_FPU();
12622 IEM_MC_ADVANCE_RIP();
12623
12624 IEM_MC_END();
12625 return VINF_SUCCESS;
12626}
12627
12628
12629/** Opcode 0xda !11/0. */
12630FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
12631{
12632 IEMOP_MNEMONIC("fiadd m32i");
12633 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
12634}
12635
12636
12637/** Opcode 0xda !11/1. */
12638FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
12639{
12640 IEMOP_MNEMONIC("fimul m32i");
12641 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
12642}
12643
12644
12645/** Opcode 0xda !11/2. */
12646FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
12647{
12648 IEMOP_MNEMONIC("ficom st0,m32i");
12649
12650 IEM_MC_BEGIN(3, 3);
12651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12652 IEM_MC_LOCAL(uint16_t, u16Fsw);
12653 IEM_MC_LOCAL(int32_t, i32Val2);
12654 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12655 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12656 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12657
12658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12660
12661 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12662 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12663 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12664
12665 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12666 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
12667 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12668 IEM_MC_ELSE()
12669 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12670 IEM_MC_ENDIF();
12671 IEM_MC_USED_FPU();
12672 IEM_MC_ADVANCE_RIP();
12673
12674 IEM_MC_END();
12675 return VINF_SUCCESS;
12676}
12677
12678
12679/** Opcode 0xda !11/3. */
12680FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
12681{
12682 IEMOP_MNEMONIC("ficomp st0,m32i");
12683
12684 IEM_MC_BEGIN(3, 3);
12685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12686 IEM_MC_LOCAL(uint16_t, u16Fsw);
12687 IEM_MC_LOCAL(int32_t, i32Val2);
12688 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12689 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12690 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12691
12692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12694
12695 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12696 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12697 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12698
12699 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12700 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
12701 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12702 IEM_MC_ELSE()
12703 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12704 IEM_MC_ENDIF();
12705 IEM_MC_USED_FPU();
12706 IEM_MC_ADVANCE_RIP();
12707
12708 IEM_MC_END();
12709 return VINF_SUCCESS;
12710}
12711
12712
12713/** Opcode 0xda !11/4. */
12714FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
12715{
12716 IEMOP_MNEMONIC("fisub m32i");
12717 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
12718}
12719
12720
12721/** Opcode 0xda !11/5. */
12722FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
12723{
12724 IEMOP_MNEMONIC("fisubr m32i");
12725 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
12726}
12727
12728
12729/** Opcode 0xda !11/6. */
12730FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
12731{
12732 IEMOP_MNEMONIC("fidiv m32i");
12733 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
12734}
12735
12736
12737/** Opcode 0xda !11/7. */
12738FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
12739{
12740 IEMOP_MNEMONIC("fidivr m32i");
12741 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
12742}
12743
12744
12745/** Opcode 0xda. */
12746FNIEMOP_DEF(iemOp_EscF2)
12747{
12748 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12751 {
12752 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12753 {
12754 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
12755 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
12756 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
12757 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
12758 case 4: return IEMOP_RAISE_INVALID_OPCODE();
12759 case 5:
12760 if (bRm == 0xe9)
12761 return FNIEMOP_CALL(iemOp_fucompp);
12762 return IEMOP_RAISE_INVALID_OPCODE();
12763 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12764 case 7: return IEMOP_RAISE_INVALID_OPCODE();
12765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12766 }
12767 }
12768 else
12769 {
12770 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12771 {
12772 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
12773 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
12774 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
12775 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
12776 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
12777 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
12778 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
12779 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
12780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12781 }
12782 }
12783}
12784
12785
12786/** Opcode 0xdb !11/0. */
12787FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
12788{
12789 IEMOP_MNEMONIC("fild m32i");
12790
12791 IEM_MC_BEGIN(2, 3);
12792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12793 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12794 IEM_MC_LOCAL(int32_t, i32Val);
12795 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12796 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
12797
12798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12800
12801 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12802 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12803 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12804
12805 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12806 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
12807 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12808 IEM_MC_ELSE()
12809 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12810 IEM_MC_ENDIF();
12811 IEM_MC_USED_FPU();
12812 IEM_MC_ADVANCE_RIP();
12813
12814 IEM_MC_END();
12815 return VINF_SUCCESS;
12816}
12817
12818
12819/** Opcode 0xdb !11/1. */
12820FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
12821{
12822 IEMOP_MNEMONIC("fisttp m32i");
12823 IEM_MC_BEGIN(3, 2);
12824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12825 IEM_MC_LOCAL(uint16_t, u16Fsw);
12826 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12827 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12828 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12829
12830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12832 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12833 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12834
12835 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12836 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12837 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12838 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12839 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12840 IEM_MC_ELSE()
12841 IEM_MC_IF_FCW_IM()
12842 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12843 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12844 IEM_MC_ENDIF();
12845 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12846 IEM_MC_ENDIF();
12847 IEM_MC_USED_FPU();
12848 IEM_MC_ADVANCE_RIP();
12849
12850 IEM_MC_END();
12851 return VINF_SUCCESS;
12852}
12853
12854
12855/** Opcode 0xdb !11/2. */
12856FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
12857{
12858 IEMOP_MNEMONIC("fist m32i");
12859 IEM_MC_BEGIN(3, 2);
12860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12861 IEM_MC_LOCAL(uint16_t, u16Fsw);
12862 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12863 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12864 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12865
12866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12868 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12869 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12870
12871 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12872 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12873 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12874 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12875 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12876 IEM_MC_ELSE()
12877 IEM_MC_IF_FCW_IM()
12878 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12879 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12880 IEM_MC_ENDIF();
12881 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12882 IEM_MC_ENDIF();
12883 IEM_MC_USED_FPU();
12884 IEM_MC_ADVANCE_RIP();
12885
12886 IEM_MC_END();
12887 return VINF_SUCCESS;
12888}
12889
12890
12891/** Opcode 0xdb !11/3. */
12892FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
12893{
12894 IEMOP_MNEMONIC("fisttp m32i");
12895 IEM_MC_BEGIN(3, 2);
12896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12897 IEM_MC_LOCAL(uint16_t, u16Fsw);
12898 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12899 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12900 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12901
12902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12904 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12905 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12906
12907 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12908 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12909 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12910 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12911 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12912 IEM_MC_ELSE()
12913 IEM_MC_IF_FCW_IM()
12914 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12915 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12916 IEM_MC_ENDIF();
12917 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12918 IEM_MC_ENDIF();
12919 IEM_MC_USED_FPU();
12920 IEM_MC_ADVANCE_RIP();
12921
12922 IEM_MC_END();
12923 return VINF_SUCCESS;
12924}
12925
12926
12927/** Opcode 0xdb !11/5. */
12928FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
12929{
12930 IEMOP_MNEMONIC("fld m80r");
12931
12932 IEM_MC_BEGIN(2, 3);
12933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12934 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12935 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
12936 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12937 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
12938
12939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12941
12942 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12943 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12944 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12945
12946 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12947 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
12948 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12949 IEM_MC_ELSE()
12950 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12951 IEM_MC_ENDIF();
12952 IEM_MC_USED_FPU();
12953 IEM_MC_ADVANCE_RIP();
12954
12955 IEM_MC_END();
12956 return VINF_SUCCESS;
12957}
12958
12959
12960/** Opcode 0xdb !11/7. */
12961FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
12962{
12963 IEMOP_MNEMONIC("fstp m80r");
12964 IEM_MC_BEGIN(3, 2);
12965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12966 IEM_MC_LOCAL(uint16_t, u16Fsw);
12967 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12968 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
12969 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12970
12971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12973 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12974 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12975
12976 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12977 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12978 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
12979 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
12980 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12981 IEM_MC_ELSE()
12982 IEM_MC_IF_FCW_IM()
12983 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12984 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
12985 IEM_MC_ENDIF();
12986 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12987 IEM_MC_ENDIF();
12988 IEM_MC_USED_FPU();
12989 IEM_MC_ADVANCE_RIP();
12990
12991 IEM_MC_END();
12992 return VINF_SUCCESS;
12993}
12994
12995
12996/** Opcode 0xdb 11/0. */
12997FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12998{
12999 IEMOP_MNEMONIC("fcmovnb st0,stN");
13000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13001
13002 IEM_MC_BEGIN(0, 1);
13003 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13004
13005 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13006 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13007
13008 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13009 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
13010 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13011 IEM_MC_ENDIF();
13012 IEM_MC_UPDATE_FPU_OPCODE_IP();
13013 IEM_MC_ELSE()
13014 IEM_MC_FPU_STACK_UNDERFLOW(0);
13015 IEM_MC_ENDIF();
13016 IEM_MC_USED_FPU();
13017 IEM_MC_ADVANCE_RIP();
13018
13019 IEM_MC_END();
13020 return VINF_SUCCESS;
13021}
13022
13023
13024/** Opcode 0xdb 11/1. */
13025FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
13026{
13027 IEMOP_MNEMONIC("fcmovne st0,stN");
13028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13029
13030 IEM_MC_BEGIN(0, 1);
13031 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13032
13033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13034 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13035
13036 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13037 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
13038 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13039 IEM_MC_ENDIF();
13040 IEM_MC_UPDATE_FPU_OPCODE_IP();
13041 IEM_MC_ELSE()
13042 IEM_MC_FPU_STACK_UNDERFLOW(0);
13043 IEM_MC_ENDIF();
13044 IEM_MC_USED_FPU();
13045 IEM_MC_ADVANCE_RIP();
13046
13047 IEM_MC_END();
13048 return VINF_SUCCESS;
13049}
13050
13051
13052/** Opcode 0xdb 11/2. */
13053FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
13054{
13055 IEMOP_MNEMONIC("fcmovnbe st0,stN");
13056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13057
13058 IEM_MC_BEGIN(0, 1);
13059 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13060
13061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13063
13064 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13065 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
13066 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13067 IEM_MC_ENDIF();
13068 IEM_MC_UPDATE_FPU_OPCODE_IP();
13069 IEM_MC_ELSE()
13070 IEM_MC_FPU_STACK_UNDERFLOW(0);
13071 IEM_MC_ENDIF();
13072 IEM_MC_USED_FPU();
13073 IEM_MC_ADVANCE_RIP();
13074
13075 IEM_MC_END();
13076 return VINF_SUCCESS;
13077}
13078
13079
13080/** Opcode 0xdb 11/3. */
13081FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
13082{
13083 IEMOP_MNEMONIC("fcmovnnu st0,stN");
13084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13085
13086 IEM_MC_BEGIN(0, 1);
13087 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13088
13089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13090 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13091
13092 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13093 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
13094 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13095 IEM_MC_ENDIF();
13096 IEM_MC_UPDATE_FPU_OPCODE_IP();
13097 IEM_MC_ELSE()
13098 IEM_MC_FPU_STACK_UNDERFLOW(0);
13099 IEM_MC_ENDIF();
13100 IEM_MC_USED_FPU();
13101 IEM_MC_ADVANCE_RIP();
13102
13103 IEM_MC_END();
13104 return VINF_SUCCESS;
13105}
13106
13107
13108/** Opcode 0xdb 0xe0. */
13109FNIEMOP_DEF(iemOp_fneni)
13110{
13111 IEMOP_MNEMONIC("fneni (8087/ign)");
13112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13113 IEM_MC_BEGIN(0,0);
13114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13115 IEM_MC_ADVANCE_RIP();
13116 IEM_MC_END();
13117 return VINF_SUCCESS;
13118}
13119
13120
13121/** Opcode 0xdb 0xe1. */
13122FNIEMOP_DEF(iemOp_fndisi)
13123{
13124 IEMOP_MNEMONIC("fndisi (8087/ign)");
13125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13126 IEM_MC_BEGIN(0,0);
13127 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13128 IEM_MC_ADVANCE_RIP();
13129 IEM_MC_END();
13130 return VINF_SUCCESS;
13131}
13132
13133
13134/** Opcode 0xdb 0xe2. */
13135FNIEMOP_DEF(iemOp_fnclex)
13136{
13137 IEMOP_MNEMONIC("fnclex");
13138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13139
13140 IEM_MC_BEGIN(0,0);
13141 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13142 IEM_MC_CLEAR_FSW_EX();
13143 IEM_MC_ADVANCE_RIP();
13144 IEM_MC_END();
13145 return VINF_SUCCESS;
13146}
13147
13148
13149/** Opcode 0xdb 0xe3. */
13150FNIEMOP_DEF(iemOp_fninit)
13151{
13152 IEMOP_MNEMONIC("fninit");
13153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13154 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
13155}
13156
13157
13158/** Opcode 0xdb 0xe4. */
13159FNIEMOP_DEF(iemOp_fnsetpm)
13160{
13161 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
13162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13163 IEM_MC_BEGIN(0,0);
13164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13165 IEM_MC_ADVANCE_RIP();
13166 IEM_MC_END();
13167 return VINF_SUCCESS;
13168}
13169
13170
13171/** Opcode 0xdb 0xe5. */
13172FNIEMOP_DEF(iemOp_frstpm)
13173{
13174 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
13175#if 0 /* #UDs on newer CPUs */
13176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13177 IEM_MC_BEGIN(0,0);
13178 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13179 IEM_MC_ADVANCE_RIP();
13180 IEM_MC_END();
13181 return VINF_SUCCESS;
13182#else
13183 return IEMOP_RAISE_INVALID_OPCODE();
13184#endif
13185}
13186
13187
13188/** Opcode 0xdb 11/5. */
13189FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
13190{
13191 IEMOP_MNEMONIC("fucomi st0,stN");
13192 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
13193}
13194
13195
13196/** Opcode 0xdb 11/6. */
13197FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
13198{
13199 IEMOP_MNEMONIC("fcomi st0,stN");
13200 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
13201}
13202
13203
13204/** Opcode 0xdb. */
13205FNIEMOP_DEF(iemOp_EscF3)
13206{
13207 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13210 {
13211 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13212 {
13213 case 0: FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
13214 case 1: FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
13215 case 2: FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
13216 case 3: FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
13217 case 4:
13218 switch (bRm)
13219 {
13220 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
13221 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
13222 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
13223 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
13224 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
13225 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
13226 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
13227 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
13228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13229 }
13230 break;
13231 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
13232 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
13233 case 7: return IEMOP_RAISE_INVALID_OPCODE();
13234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13235 }
13236 }
13237 else
13238 {
13239 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13240 {
13241 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
13242 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
13243 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
13244 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
13245 case 4: return IEMOP_RAISE_INVALID_OPCODE();
13246 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
13247 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13248 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
13249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13250 }
13251 }
13252}
13253
13254
13255/**
13256 * Common worker for FPU instructions working on STn and ST0, and storing the
13257 * result in STn unless IE, DE or ZE was raised.
13258 *
13259 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13260 */
13261FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13262{
13263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13264
13265 IEM_MC_BEGIN(3, 1);
13266 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13267 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13268 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13269 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13270
13271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13273
13274 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13275 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13276 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
13277 IEM_MC_ELSE()
13278 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
13279 IEM_MC_ENDIF();
13280 IEM_MC_USED_FPU();
13281 IEM_MC_ADVANCE_RIP();
13282
13283 IEM_MC_END();
13284 return VINF_SUCCESS;
13285}
13286
13287
13288/** Opcode 0xdc 11/0. */
13289FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
13290{
13291 IEMOP_MNEMONIC("fadd stN,st0");
13292 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
13293}
13294
13295
13296/** Opcode 0xdc 11/1. */
13297FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
13298{
13299 IEMOP_MNEMONIC("fmul stN,st0");
13300 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
13301}
13302
13303
13304/** Opcode 0xdc 11/4. */
13305FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
13306{
13307 IEMOP_MNEMONIC("fsubr stN,st0");
13308 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
13309}
13310
13311
13312/** Opcode 0xdc 11/5. */
13313FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
13314{
13315 IEMOP_MNEMONIC("fsub stN,st0");
13316 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
13317}
13318
13319
13320/** Opcode 0xdc 11/6. */
13321FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
13322{
13323 IEMOP_MNEMONIC("fdivr stN,st0");
13324 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
13325}
13326
13327
13328/** Opcode 0xdc 11/7. */
13329FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
13330{
13331 IEMOP_MNEMONIC("fdiv stN,st0");
13332 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
13333}
13334
13335
13336/**
13337 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
13338 * memory operand, and storing the result in ST0.
13339 *
13340 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13341 */
13342FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
13343{
13344 IEM_MC_BEGIN(3, 3);
13345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13346 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13347 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
13348 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13349 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
13350 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
13351
13352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13354 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13355 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13356
13357 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
13358 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
13359 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
13360 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
13361 IEM_MC_ELSE()
13362 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
13363 IEM_MC_ENDIF();
13364 IEM_MC_USED_FPU();
13365 IEM_MC_ADVANCE_RIP();
13366
13367 IEM_MC_END();
13368 return VINF_SUCCESS;
13369}
13370
13371
13372/** Opcode 0xdc !11/0. */
13373FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
13374{
13375 IEMOP_MNEMONIC("fadd m64r");
13376 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
13377}
13378
13379
13380/** Opcode 0xdc !11/1. */
13381FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
13382{
13383 IEMOP_MNEMONIC("fmul m64r");
13384 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
13385}
13386
13387
13388/** Opcode 0xdc !11/2. */
13389FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
13390{
13391 IEMOP_MNEMONIC("fcom st0,m64r");
13392
13393 IEM_MC_BEGIN(3, 3);
13394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13395 IEM_MC_LOCAL(uint16_t, u16Fsw);
13396 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
13397 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13398 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13399 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
13400
13401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13403
13404 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13405 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13406 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13407
13408 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13409 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
13410 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13411 IEM_MC_ELSE()
13412 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13413 IEM_MC_ENDIF();
13414 IEM_MC_USED_FPU();
13415 IEM_MC_ADVANCE_RIP();
13416
13417 IEM_MC_END();
13418 return VINF_SUCCESS;
13419}
13420
13421
13422/** Opcode 0xdc !11/3. */
13423FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
13424{
13425 IEMOP_MNEMONIC("fcomp st0,m64r");
13426
13427 IEM_MC_BEGIN(3, 3);
13428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13429 IEM_MC_LOCAL(uint16_t, u16Fsw);
13430 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
13431 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13432 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13433 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
13434
13435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13437
13438 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13439 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13440 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13441
13442 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13443 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
13444 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13445 IEM_MC_ELSE()
13446 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13447 IEM_MC_ENDIF();
13448 IEM_MC_USED_FPU();
13449 IEM_MC_ADVANCE_RIP();
13450
13451 IEM_MC_END();
13452 return VINF_SUCCESS;
13453}
13454
13455
13456/** Opcode 0xdc !11/4. */
13457FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
13458{
13459 IEMOP_MNEMONIC("fsub m64r");
13460 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
13461}
13462
13463
13464/** Opcode 0xdc !11/5. */
13465FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
13466{
13467 IEMOP_MNEMONIC("fsubr m64r");
13468 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
13469}
13470
13471
13472/** Opcode 0xdc !11/6. */
13473FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
13474{
13475 IEMOP_MNEMONIC("fdiv m64r");
13476 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
13477}
13478
13479
13480/** Opcode 0xdc !11/7. */
13481FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
13482{
13483 IEMOP_MNEMONIC("fdivr m64r");
13484 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
13485}
13486
13487
13488/** Opcode 0xdc. */
13489FNIEMOP_DEF(iemOp_EscF4)
13490{
13491 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13494 {
13495 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13496 {
13497 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
13498 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
13499 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
13500 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
13501 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
13502 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
13503 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
13504 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
13505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13506 }
13507 }
13508 else
13509 {
13510 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13511 {
13512 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
13513 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
13514 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
13515 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
13516 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
13517 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
13518 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
13519 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
13520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13521 }
13522 }
13523}
13524
13525
13526/** Opcode 0xdd !11/0.
13527 * @sa iemOp_fld_m32r */
13528FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
13529{
13530 IEMOP_MNEMONIC("fld m64r");
13531
13532 IEM_MC_BEGIN(2, 3);
13533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13534 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13535 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
13536 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13537 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
13538
13539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13541 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13542 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13543
13544 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13545 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13546 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
13547 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13548 IEM_MC_ELSE()
13549 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13550 IEM_MC_ENDIF();
13551 IEM_MC_USED_FPU();
13552 IEM_MC_ADVANCE_RIP();
13553
13554 IEM_MC_END();
13555 return VINF_SUCCESS;
13556}
13557
13558
13559/** Opcode 0xdd !11/0. */
13560FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
13561{
13562 IEMOP_MNEMONIC("fisttp m64i");
13563 IEM_MC_BEGIN(3, 2);
13564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13565 IEM_MC_LOCAL(uint16_t, u16Fsw);
13566 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13567 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13568 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13569
13570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13572 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13573 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13574
13575 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13576 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13577 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13578 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13579 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13580 IEM_MC_ELSE()
13581 IEM_MC_IF_FCW_IM()
13582 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13583 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
13584 IEM_MC_ENDIF();
13585 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13586 IEM_MC_ENDIF();
13587 IEM_MC_USED_FPU();
13588 IEM_MC_ADVANCE_RIP();
13589
13590 IEM_MC_END();
13591 return VINF_SUCCESS;
13592}
13593
13594
13595/** Opcode 0xdd !11/0. */
13596FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
13597{
13598 IEMOP_MNEMONIC("fst m64r");
13599 IEM_MC_BEGIN(3, 2);
13600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13601 IEM_MC_LOCAL(uint16_t, u16Fsw);
13602 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13603 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
13604 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13605
13606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13610
13611 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13612 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13613 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
13614 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13615 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13616 IEM_MC_ELSE()
13617 IEM_MC_IF_FCW_IM()
13618 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
13619 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
13620 IEM_MC_ENDIF();
13621 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13622 IEM_MC_ENDIF();
13623 IEM_MC_USED_FPU();
13624 IEM_MC_ADVANCE_RIP();
13625
13626 IEM_MC_END();
13627 return VINF_SUCCESS;
13628}
13629
13630
13631
13632
13633/** Opcode 0xdd !11/0. */
13634FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
13635{
13636 IEMOP_MNEMONIC("fstp m64r");
13637 IEM_MC_BEGIN(3, 2);
13638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13639 IEM_MC_LOCAL(uint16_t, u16Fsw);
13640 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13641 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
13642 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13643
13644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13647 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13648
13649 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13650 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13651 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
13652 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13653 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13654 IEM_MC_ELSE()
13655 IEM_MC_IF_FCW_IM()
13656 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
13657 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
13658 IEM_MC_ENDIF();
13659 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13660 IEM_MC_ENDIF();
13661 IEM_MC_USED_FPU();
13662 IEM_MC_ADVANCE_RIP();
13663
13664 IEM_MC_END();
13665 return VINF_SUCCESS;
13666}
13667
13668
13669/** Opcode 0xdd !11/0. */
13670FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
13671{
13672 IEMOP_MNEMONIC("fxrstor m94/108byte");
13673 IEM_MC_BEGIN(3, 0);
13674 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13675 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
13676 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13679 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13680 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13681 IEM_MC_END();
13682 return VINF_SUCCESS;
13683}
13684
13685
13686/** Opcode 0xdd !11/0. */
13687FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
13688{
13689 IEMOP_MNEMONIC("fnsave m94/108byte");
13690 IEM_MC_BEGIN(3, 0);
13691 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13692 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
13693 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13697 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
13698 IEM_MC_END();
13699 return VINF_SUCCESS;
13700
13701}
13702
13703/** Opcode 0xdd !11/0. */
13704FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
13705{
13706 IEMOP_MNEMONIC("fnstsw m16");
13707
13708 IEM_MC_BEGIN(0, 2);
13709 IEM_MC_LOCAL(uint16_t, u16Tmp);
13710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13711
13712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13715
13716 IEM_MC_FETCH_FSW(u16Tmp);
13717 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
13718 IEM_MC_ADVANCE_RIP();
13719
13720/** @todo Debug / drop a hint to the verifier that things may differ
13721 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
13722 * NT4SP1. (X86_FSW_PE) */
13723 IEM_MC_END();
13724 return VINF_SUCCESS;
13725}
13726
13727
13728/** Opcode 0xdd 11/0. */
13729FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
13730{
13731 IEMOP_MNEMONIC("ffree stN");
13732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13733 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
13734 unmodified. */
13735
13736 IEM_MC_BEGIN(0, 0);
13737
13738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13740
13741 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
13742 IEM_MC_UPDATE_FPU_OPCODE_IP();
13743
13744 IEM_MC_USED_FPU();
13745 IEM_MC_ADVANCE_RIP();
13746 IEM_MC_END();
13747 return VINF_SUCCESS;
13748}
13749
13750
13751/** Opcode 0xdd 11/1. */
13752FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
13753{
13754 IEMOP_MNEMONIC("fst st0,stN");
13755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13756
13757 IEM_MC_BEGIN(0, 2);
13758 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13759 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13760 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13761 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13762 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13763 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13764 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
13765 IEM_MC_ELSE()
13766 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
13767 IEM_MC_ENDIF();
13768 IEM_MC_USED_FPU();
13769 IEM_MC_ADVANCE_RIP();
13770 IEM_MC_END();
13771 return VINF_SUCCESS;
13772}
13773
13774
13775/** Opcode 0xdd 11/3. */
13776FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
13777{
13778 IEMOP_MNEMONIC("fcom st0,stN");
13779 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
13780}
13781
13782
13783/** Opcode 0xdd 11/4. */
13784FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
13785{
13786 IEMOP_MNEMONIC("fcomp st0,stN");
13787 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
13788}
13789
13790
13791/** Opcode 0xdd. */
13792FNIEMOP_DEF(iemOp_EscF5)
13793{
13794 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13797 {
13798 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13799 {
13800 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
13801 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
13802 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
13803 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
13804 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
13805 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
13806 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13807 case 7: return IEMOP_RAISE_INVALID_OPCODE();
13808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13809 }
13810 }
13811 else
13812 {
13813 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13814 {
13815 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
13816 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
13817 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
13818 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
13819 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
13820 case 5: return IEMOP_RAISE_INVALID_OPCODE();
13821 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
13822 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
13823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13824 }
13825 }
13826}
13827
13828
13829/** Opcode 0xde 11/0. */
13830FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
13831{
13832 IEMOP_MNEMONIC("faddp stN,st0");
13833 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
13834}
13835
13836
13837/** Opcode 0xde 11/0. */
13838FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
13839{
13840 IEMOP_MNEMONIC("fmulp stN,st0");
13841 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
13842}
13843
13844
13845/** Opcode 0xde 0xd9. */
13846FNIEMOP_DEF(iemOp_fcompp)
13847{
13848 IEMOP_MNEMONIC("fucompp st0,stN");
13849 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
13850}
13851
13852
13853/** Opcode 0xde 11/4. */
13854FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
13855{
13856 IEMOP_MNEMONIC("fsubrp stN,st0");
13857 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
13858}
13859
13860
13861/** Opcode 0xde 11/5. */
13862FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
13863{
13864 IEMOP_MNEMONIC("fsubp stN,st0");
13865 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
13866}
13867
13868
13869/** Opcode 0xde 11/6. */
13870FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
13871{
13872 IEMOP_MNEMONIC("fdivrp stN,st0");
13873 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
13874}
13875
13876
13877/** Opcode 0xde 11/7. */
13878FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
13879{
13880 IEMOP_MNEMONIC("fdivp stN,st0");
13881 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
13882}
13883
13884
13885/**
13886 * Common worker for FPU instructions working on ST0 and an m16i, and storing
13887 * the result in ST0.
13888 *
13889 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13890 */
13891FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
13892{
13893 IEM_MC_BEGIN(3, 3);
13894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13895 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13896 IEM_MC_LOCAL(int16_t, i16Val2);
13897 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13898 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13899 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13900
13901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13903
13904 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13905 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13906 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13907
13908 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13909 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
13910 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13911 IEM_MC_ELSE()
13912 IEM_MC_FPU_STACK_UNDERFLOW(0);
13913 IEM_MC_ENDIF();
13914 IEM_MC_USED_FPU();
13915 IEM_MC_ADVANCE_RIP();
13916
13917 IEM_MC_END();
13918 return VINF_SUCCESS;
13919}
13920
13921
13922/** Opcode 0xde !11/0. */
13923FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
13924{
13925 IEMOP_MNEMONIC("fiadd m16i");
13926 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
13927}
13928
13929
13930/** Opcode 0xde !11/1. */
13931FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
13932{
13933 IEMOP_MNEMONIC("fimul m16i");
13934 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
13935}
13936
13937
13938/** Opcode 0xde !11/2. */
13939FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
13940{
13941 IEMOP_MNEMONIC("ficom st0,m16i");
13942
13943 IEM_MC_BEGIN(3, 3);
13944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13945 IEM_MC_LOCAL(uint16_t, u16Fsw);
13946 IEM_MC_LOCAL(int16_t, i16Val2);
13947 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13948 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13949 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13950
13951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13953
13954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13956 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13957
13958 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13959 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13960 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13961 IEM_MC_ELSE()
13962 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13963 IEM_MC_ENDIF();
13964 IEM_MC_USED_FPU();
13965 IEM_MC_ADVANCE_RIP();
13966
13967 IEM_MC_END();
13968 return VINF_SUCCESS;
13969}
13970
13971
13972/** Opcode 0xde !11/3. */
13973FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
13974{
13975 IEMOP_MNEMONIC("ficomp st0,m16i");
13976
13977 IEM_MC_BEGIN(3, 3);
13978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13979 IEM_MC_LOCAL(uint16_t, u16Fsw);
13980 IEM_MC_LOCAL(int16_t, i16Val2);
13981 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13982 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13983 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13984
13985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13987
13988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13990 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13991
13992 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13993 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13994 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13995 IEM_MC_ELSE()
13996 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13997 IEM_MC_ENDIF();
13998 IEM_MC_USED_FPU();
13999 IEM_MC_ADVANCE_RIP();
14000
14001 IEM_MC_END();
14002 return VINF_SUCCESS;
14003}
14004
14005
14006/** Opcode 0xde !11/4. */
14007FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
14008{
14009 IEMOP_MNEMONIC("fisub m16i");
14010 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
14011}
14012
14013
14014/** Opcode 0xde !11/5. */
14015FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
14016{
14017 IEMOP_MNEMONIC("fisubr m16i");
14018 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
14019}
14020
14021
14022/** Opcode 0xde !11/6. */
14023FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
14024{
14025 IEMOP_MNEMONIC("fiadd m16i");
14026 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
14027}
14028
14029
14030/** Opcode 0xde !11/7. */
14031FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
14032{
14033 IEMOP_MNEMONIC("fiadd m16i");
14034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
14035}
14036
14037
14038/** Opcode 0xde. */
14039FNIEMOP_DEF(iemOp_EscF6)
14040{
14041 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14044 {
14045 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14046 {
14047 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
14048 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
14049 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14050 case 3: if (bRm == 0xd9)
14051 return FNIEMOP_CALL(iemOp_fcompp);
14052 return IEMOP_RAISE_INVALID_OPCODE();
14053 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
14054 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
14055 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
14056 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
14057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14058 }
14059 }
14060 else
14061 {
14062 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14063 {
14064 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
14065 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
14066 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
14067 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
14068 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
14069 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
14070 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
14071 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
14072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14073 }
14074 }
14075}
14076
14077
14078/** Opcode 0xdf 11/0.
14079 * Undocument instruction, assumed to work like ffree + fincstp. */
14080FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
14081{
14082 IEMOP_MNEMONIC("ffreep stN");
14083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14084
14085 IEM_MC_BEGIN(0, 0);
14086
14087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14089
14090 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
14091 IEM_MC_FPU_STACK_INC_TOP();
14092 IEM_MC_UPDATE_FPU_OPCODE_IP();
14093
14094 IEM_MC_USED_FPU();
14095 IEM_MC_ADVANCE_RIP();
14096 IEM_MC_END();
14097 return VINF_SUCCESS;
14098}
14099
14100
14101/** Opcode 0xdf 0xe0. */
14102FNIEMOP_DEF(iemOp_fnstsw_ax)
14103{
14104 IEMOP_MNEMONIC("fnstsw ax");
14105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14106
14107 IEM_MC_BEGIN(0, 1);
14108 IEM_MC_LOCAL(uint16_t, u16Tmp);
14109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14110 IEM_MC_FETCH_FSW(u16Tmp);
14111 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
14112 IEM_MC_ADVANCE_RIP();
14113 IEM_MC_END();
14114 return VINF_SUCCESS;
14115}
14116
14117
14118/** Opcode 0xdf 11/5. */
14119FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
14120{
14121 IEMOP_MNEMONIC("fcomip st0,stN");
14122 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
14123}
14124
14125
14126/** Opcode 0xdf 11/6. */
14127FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
14128{
14129 IEMOP_MNEMONIC("fcomip st0,stN");
14130 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
14131}
14132
14133
14134/** Opcode 0xdf !11/0. */
14135FNIEMOP_STUB_1(iemOp_fild_m16i, uint8_t, bRm);
14136
14137
14138/** Opcode 0xdf !11/1. */
14139FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
14140{
14141 IEMOP_MNEMONIC("fisttp m16i");
14142 IEM_MC_BEGIN(3, 2);
14143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14144 IEM_MC_LOCAL(uint16_t, u16Fsw);
14145 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14146 IEM_MC_ARG(int16_t *, pi16Dst, 1);
14147 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14148
14149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14152 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14153
14154 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14155 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14156 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
14157 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
14158 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14159 IEM_MC_ELSE()
14160 IEM_MC_IF_FCW_IM()
14161 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
14162 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
14163 IEM_MC_ENDIF();
14164 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14165 IEM_MC_ENDIF();
14166 IEM_MC_USED_FPU();
14167 IEM_MC_ADVANCE_RIP();
14168
14169 IEM_MC_END();
14170 return VINF_SUCCESS;
14171}
14172
14173
14174/** Opcode 0xdf !11/2. */
14175FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
14176{
14177 IEMOP_MNEMONIC("fistp m16i");
14178 IEM_MC_BEGIN(3, 2);
14179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14180 IEM_MC_LOCAL(uint16_t, u16Fsw);
14181 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14182 IEM_MC_ARG(int16_t *, pi16Dst, 1);
14183 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14184
14185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14188 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14189
14190 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14191 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14192 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
14193 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
14194 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14195 IEM_MC_ELSE()
14196 IEM_MC_IF_FCW_IM()
14197 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
14198 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
14199 IEM_MC_ENDIF();
14200 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14201 IEM_MC_ENDIF();
14202 IEM_MC_USED_FPU();
14203 IEM_MC_ADVANCE_RIP();
14204
14205 IEM_MC_END();
14206 return VINF_SUCCESS;
14207}
14208
14209
14210/** Opcode 0xdf !11/3. */
14211FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
14212{
14213 IEMOP_MNEMONIC("fistp m16i");
14214 IEM_MC_BEGIN(3, 2);
14215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14216 IEM_MC_LOCAL(uint16_t, u16Fsw);
14217 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14218 IEM_MC_ARG(int16_t *, pi16Dst, 1);
14219 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14220
14221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14223 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14224 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14225
14226 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14227 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14228 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
14229 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
14230 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14231 IEM_MC_ELSE()
14232 IEM_MC_IF_FCW_IM()
14233 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
14234 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
14235 IEM_MC_ENDIF();
14236 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14237 IEM_MC_ENDIF();
14238 IEM_MC_USED_FPU();
14239 IEM_MC_ADVANCE_RIP();
14240
14241 IEM_MC_END();
14242 return VINF_SUCCESS;
14243}
14244
14245
14246/** Opcode 0xdf !11/4. */
14247FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
14248
14249/** Opcode 0xdf !11/5. */
14250FNIEMOP_STUB_1(iemOp_fild_m64i, uint8_t, bRm);
14251
14252/** Opcode 0xdf !11/6. */
14253FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
14254
14255
14256/** Opcode 0xdf !11/7. */
14257FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
14258{
14259 IEMOP_MNEMONIC("fistp m64i");
14260 IEM_MC_BEGIN(3, 2);
14261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14262 IEM_MC_LOCAL(uint16_t, u16Fsw);
14263 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14264 IEM_MC_ARG(int64_t *, pi64Dst, 1);
14265 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14266
14267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14271
14272 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14273 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14274 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
14275 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14276 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14277 IEM_MC_ELSE()
14278 IEM_MC_IF_FCW_IM()
14279 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
14280 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
14281 IEM_MC_ENDIF();
14282 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14283 IEM_MC_ENDIF();
14284 IEM_MC_USED_FPU();
14285 IEM_MC_ADVANCE_RIP();
14286
14287 IEM_MC_END();
14288 return VINF_SUCCESS;
14289}
14290
14291
14292/** Opcode 0xdf. */
14293FNIEMOP_DEF(iemOp_EscF7)
14294{
14295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14296 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14297 {
14298 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14299 {
14300 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
14301 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
14302 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
14303 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
14304 case 4: if (bRm == 0xe0)
14305 return FNIEMOP_CALL(iemOp_fnstsw_ax);
14306 return IEMOP_RAISE_INVALID_OPCODE();
14307 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
14308 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
14309 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14311 }
14312 }
14313 else
14314 {
14315 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14316 {
14317 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
14318 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
14319 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
14320 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
14321 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
14322 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
14323 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
14324 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
14325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14326 }
14327 }
14328}
14329
14330
14331/** Opcode 0xe0. */
14332FNIEMOP_DEF(iemOp_loopne_Jb)
14333{
14334 IEMOP_MNEMONIC("loopne Jb");
14335 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14336 IEMOP_HLP_NO_LOCK_PREFIX();
14337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14338
14339 switch (pIemCpu->enmEffAddrMode)
14340 {
14341 case IEMMODE_16BIT:
14342 IEM_MC_BEGIN(0,0);
14343 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
14344 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
14345 IEM_MC_REL_JMP_S8(i8Imm);
14346 } IEM_MC_ELSE() {
14347 IEM_MC_ADVANCE_RIP();
14348 } IEM_MC_ENDIF();
14349 IEM_MC_END();
14350 return VINF_SUCCESS;
14351
14352 case IEMMODE_32BIT:
14353 IEM_MC_BEGIN(0,0);
14354 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
14355 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
14356 IEM_MC_REL_JMP_S8(i8Imm);
14357 } IEM_MC_ELSE() {
14358 IEM_MC_ADVANCE_RIP();
14359 } IEM_MC_ENDIF();
14360 IEM_MC_END();
14361 return VINF_SUCCESS;
14362
14363 case IEMMODE_64BIT:
14364 IEM_MC_BEGIN(0,0);
14365 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
14366 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
14367 IEM_MC_REL_JMP_S8(i8Imm);
14368 } IEM_MC_ELSE() {
14369 IEM_MC_ADVANCE_RIP();
14370 } IEM_MC_ENDIF();
14371 IEM_MC_END();
14372 return VINF_SUCCESS;
14373
14374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14375 }
14376}
14377
14378
14379/** Opcode 0xe1. */
14380FNIEMOP_DEF(iemOp_loope_Jb)
14381{
14382 IEMOP_MNEMONIC("loope Jb");
14383 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14384 IEMOP_HLP_NO_LOCK_PREFIX();
14385 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14386
14387 switch (pIemCpu->enmEffAddrMode)
14388 {
14389 case IEMMODE_16BIT:
14390 IEM_MC_BEGIN(0,0);
14391 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
14392 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
14393 IEM_MC_REL_JMP_S8(i8Imm);
14394 } IEM_MC_ELSE() {
14395 IEM_MC_ADVANCE_RIP();
14396 } IEM_MC_ENDIF();
14397 IEM_MC_END();
14398 return VINF_SUCCESS;
14399
14400 case IEMMODE_32BIT:
14401 IEM_MC_BEGIN(0,0);
14402 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
14403 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
14404 IEM_MC_REL_JMP_S8(i8Imm);
14405 } IEM_MC_ELSE() {
14406 IEM_MC_ADVANCE_RIP();
14407 } IEM_MC_ENDIF();
14408 IEM_MC_END();
14409 return VINF_SUCCESS;
14410
14411 case IEMMODE_64BIT:
14412 IEM_MC_BEGIN(0,0);
14413 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
14414 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
14415 IEM_MC_REL_JMP_S8(i8Imm);
14416 } IEM_MC_ELSE() {
14417 IEM_MC_ADVANCE_RIP();
14418 } IEM_MC_ENDIF();
14419 IEM_MC_END();
14420 return VINF_SUCCESS;
14421
14422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14423 }
14424}
14425
14426
14427/** Opcode 0xe2. */
14428FNIEMOP_DEF(iemOp_loop_Jb)
14429{
14430 IEMOP_MNEMONIC("loop Jb");
14431 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14432 IEMOP_HLP_NO_LOCK_PREFIX();
14433 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14434
14435 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
14436 * using the 32-bit operand size override. How can that be restarted? See
14437 * weird pseudo code in intel manual. */
14438 switch (pIemCpu->enmEffAddrMode)
14439 {
14440 case IEMMODE_16BIT:
14441 IEM_MC_BEGIN(0,0);
14442 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
14443 IEM_MC_IF_CX_IS_NZ() {
14444 IEM_MC_REL_JMP_S8(i8Imm);
14445 } IEM_MC_ELSE() {
14446 IEM_MC_ADVANCE_RIP();
14447 } IEM_MC_ENDIF();
14448 IEM_MC_END();
14449 return VINF_SUCCESS;
14450
14451 case IEMMODE_32BIT:
14452 IEM_MC_BEGIN(0,0);
14453 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
14454 IEM_MC_IF_ECX_IS_NZ() {
14455 IEM_MC_REL_JMP_S8(i8Imm);
14456 } IEM_MC_ELSE() {
14457 IEM_MC_ADVANCE_RIP();
14458 } IEM_MC_ENDIF();
14459 IEM_MC_END();
14460 return VINF_SUCCESS;
14461
14462 case IEMMODE_64BIT:
14463 IEM_MC_BEGIN(0,0);
14464 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
14465 IEM_MC_IF_RCX_IS_NZ() {
14466 IEM_MC_REL_JMP_S8(i8Imm);
14467 } IEM_MC_ELSE() {
14468 IEM_MC_ADVANCE_RIP();
14469 } IEM_MC_ENDIF();
14470 IEM_MC_END();
14471 return VINF_SUCCESS;
14472
14473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14474 }
14475}
14476
14477
14478/** Opcode 0xe3. */
14479FNIEMOP_DEF(iemOp_jecxz_Jb)
14480{
14481 IEMOP_MNEMONIC("jecxz Jb");
14482 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14483 IEMOP_HLP_NO_LOCK_PREFIX();
14484 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14485
14486 switch (pIemCpu->enmEffAddrMode)
14487 {
14488 case IEMMODE_16BIT:
14489 IEM_MC_BEGIN(0,0);
14490 IEM_MC_IF_CX_IS_NZ() {
14491 IEM_MC_ADVANCE_RIP();
14492 } IEM_MC_ELSE() {
14493 IEM_MC_REL_JMP_S8(i8Imm);
14494 } IEM_MC_ENDIF();
14495 IEM_MC_END();
14496 return VINF_SUCCESS;
14497
14498 case IEMMODE_32BIT:
14499 IEM_MC_BEGIN(0,0);
14500 IEM_MC_IF_ECX_IS_NZ() {
14501 IEM_MC_ADVANCE_RIP();
14502 } IEM_MC_ELSE() {
14503 IEM_MC_REL_JMP_S8(i8Imm);
14504 } IEM_MC_ENDIF();
14505 IEM_MC_END();
14506 return VINF_SUCCESS;
14507
14508 case IEMMODE_64BIT:
14509 IEM_MC_BEGIN(0,0);
14510 IEM_MC_IF_RCX_IS_NZ() {
14511 IEM_MC_ADVANCE_RIP();
14512 } IEM_MC_ELSE() {
14513 IEM_MC_REL_JMP_S8(i8Imm);
14514 } IEM_MC_ENDIF();
14515 IEM_MC_END();
14516 return VINF_SUCCESS;
14517
14518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14519 }
14520}
14521
14522
14523/** Opcode 0xe4 */
14524FNIEMOP_DEF(iemOp_in_AL_Ib)
14525{
14526 IEMOP_MNEMONIC("in eAX,Ib");
14527 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14528 IEMOP_HLP_NO_LOCK_PREFIX();
14529 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
14530}
14531
14532
14533/** Opcode 0xe5 */
14534FNIEMOP_DEF(iemOp_in_eAX_Ib)
14535{
14536 IEMOP_MNEMONIC("in eAX,Ib");
14537 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14538 IEMOP_HLP_NO_LOCK_PREFIX();
14539 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14540}
14541
14542
14543/** Opcode 0xe6 */
14544FNIEMOP_DEF(iemOp_out_Ib_AL)
14545{
14546 IEMOP_MNEMONIC("out Ib,AL");
14547 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14548 IEMOP_HLP_NO_LOCK_PREFIX();
14549 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
14550}
14551
14552
14553/** Opcode 0xe7 */
14554FNIEMOP_DEF(iemOp_out_Ib_eAX)
14555{
14556 IEMOP_MNEMONIC("out Ib,eAX");
14557 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14558 IEMOP_HLP_NO_LOCK_PREFIX();
14559 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14560}
14561
14562
14563/** Opcode 0xe8. */
14564FNIEMOP_DEF(iemOp_call_Jv)
14565{
14566 IEMOP_MNEMONIC("call Jv");
14567 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14568 switch (pIemCpu->enmEffOpSize)
14569 {
14570 case IEMMODE_16BIT:
14571 {
14572 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14573 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
14574 }
14575
14576 case IEMMODE_32BIT:
14577 {
14578 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14579 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
14580 }
14581
14582 case IEMMODE_64BIT:
14583 {
14584 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14585 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
14586 }
14587
14588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14589 }
14590}
14591
14592
14593/** Opcode 0xe9. */
14594FNIEMOP_DEF(iemOp_jmp_Jv)
14595{
14596 IEMOP_MNEMONIC("jmp Jv");
14597 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14598 switch (pIemCpu->enmEffOpSize)
14599 {
14600 case IEMMODE_16BIT:
14601 {
14602 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
14603 IEM_MC_BEGIN(0, 0);
14604 IEM_MC_REL_JMP_S16(i16Imm);
14605 IEM_MC_END();
14606 return VINF_SUCCESS;
14607 }
14608
14609 case IEMMODE_64BIT:
14610 case IEMMODE_32BIT:
14611 {
14612 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
14613 IEM_MC_BEGIN(0, 0);
14614 IEM_MC_REL_JMP_S32(i32Imm);
14615 IEM_MC_END();
14616 return VINF_SUCCESS;
14617 }
14618
14619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14620 }
14621}
14622
14623
14624/** Opcode 0xea. */
14625FNIEMOP_DEF(iemOp_jmp_Ap)
14626{
14627 IEMOP_MNEMONIC("jmp Ap");
14628 IEMOP_HLP_NO_64BIT();
14629
14630 /* Decode the far pointer address and pass it on to the far call C implementation. */
14631 uint32_t offSeg;
14632 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
14633 IEM_OPCODE_GET_NEXT_U32(&offSeg);
14634 else
14635 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
14636 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
14637 IEMOP_HLP_NO_LOCK_PREFIX();
14638 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
14639}
14640
14641
14642/** Opcode 0xeb. */
14643FNIEMOP_DEF(iemOp_jmp_Jb)
14644{
14645 IEMOP_MNEMONIC("jmp Jb");
14646 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14647 IEMOP_HLP_NO_LOCK_PREFIX();
14648 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14649
14650 IEM_MC_BEGIN(0, 0);
14651 IEM_MC_REL_JMP_S8(i8Imm);
14652 IEM_MC_END();
14653 return VINF_SUCCESS;
14654}
14655
14656
14657/** Opcode 0xec */
14658FNIEMOP_DEF(iemOp_in_AL_DX)
14659{
14660 IEMOP_MNEMONIC("in AL,DX");
14661 IEMOP_HLP_NO_LOCK_PREFIX();
14662 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
14663}
14664
14665
14666/** Opcode 0xed */
14667FNIEMOP_DEF(iemOp_eAX_DX)
14668{
14669 IEMOP_MNEMONIC("in eAX,DX");
14670 IEMOP_HLP_NO_LOCK_PREFIX();
14671 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14672}
14673
14674
14675/** Opcode 0xee */
14676FNIEMOP_DEF(iemOp_out_DX_AL)
14677{
14678 IEMOP_MNEMONIC("out DX,AL");
14679 IEMOP_HLP_NO_LOCK_PREFIX();
14680 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
14681}
14682
14683
14684/** Opcode 0xef */
14685FNIEMOP_DEF(iemOp_out_DX_eAX)
14686{
14687 IEMOP_MNEMONIC("out DX,eAX");
14688 IEMOP_HLP_NO_LOCK_PREFIX();
14689 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14690}
14691
14692
14693/** Opcode 0xf0. */
14694FNIEMOP_DEF(iemOp_lock)
14695{
14696 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
14697
14698 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14699 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14700}
14701
14702
14703/** Opcode 0xf2. */
14704FNIEMOP_DEF(iemOp_repne)
14705{
14706 /* This overrides any previous REPE prefix. */
14707 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
14708 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
14709
14710 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14711 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14712}
14713
14714
14715/** Opcode 0xf3. */
14716FNIEMOP_DEF(iemOp_repe)
14717{
14718 /* This overrides any previous REPNE prefix. */
14719 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
14720 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
14721
14722 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14723 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14724}
14725
14726
14727/** Opcode 0xf4. */
14728FNIEMOP_DEF(iemOp_hlt)
14729{
14730 IEMOP_HLP_NO_LOCK_PREFIX();
14731 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
14732}
14733
14734
14735/** Opcode 0xf5. */
14736FNIEMOP_DEF(iemOp_cmc)
14737{
14738 IEMOP_MNEMONIC("cmc");
14739 IEMOP_HLP_NO_LOCK_PREFIX();
14740 IEM_MC_BEGIN(0, 0);
14741 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14742 IEM_MC_ADVANCE_RIP();
14743 IEM_MC_END();
14744 return VINF_SUCCESS;
14745}
14746
14747
14748/**
14749 * Common implementation of 'inc/dec/not/neg Eb'.
14750 *
14751 * @param bRm The RM byte.
14752 * @param pImpl The instruction implementation.
14753 */
14754FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
14755{
14756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14757 {
14758 /* register access */
14759 IEM_MC_BEGIN(2, 0);
14760 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14761 IEM_MC_ARG(uint32_t *, pEFlags, 1);
14762 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14763 IEM_MC_REF_EFLAGS(pEFlags);
14764 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
14765 IEM_MC_ADVANCE_RIP();
14766 IEM_MC_END();
14767 }
14768 else
14769 {
14770 /* memory access. */
14771 IEM_MC_BEGIN(2, 2);
14772 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14773 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14775
14776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14777 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14778 IEM_MC_FETCH_EFLAGS(EFlags);
14779 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14780 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
14781 else
14782 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
14783
14784 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14785 IEM_MC_COMMIT_EFLAGS(EFlags);
14786 IEM_MC_ADVANCE_RIP();
14787 IEM_MC_END();
14788 }
14789 return VINF_SUCCESS;
14790}
14791
14792
14793/**
14794 * Common implementation of 'inc/dec/not/neg Ev'.
14795 *
14796 * @param bRm The RM byte.
14797 * @param pImpl The instruction implementation.
14798 */
14799FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
14800{
14801 /* Registers are handled by a common worker. */
14802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14803 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14804
14805 /* Memory we do here. */
14806 switch (pIemCpu->enmEffOpSize)
14807 {
14808 case IEMMODE_16BIT:
14809 IEM_MC_BEGIN(2, 2);
14810 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14811 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14813
14814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14815 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14816 IEM_MC_FETCH_EFLAGS(EFlags);
14817 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14818 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
14819 else
14820 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
14821
14822 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14823 IEM_MC_COMMIT_EFLAGS(EFlags);
14824 IEM_MC_ADVANCE_RIP();
14825 IEM_MC_END();
14826 return VINF_SUCCESS;
14827
14828 case IEMMODE_32BIT:
14829 IEM_MC_BEGIN(2, 2);
14830 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14831 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14833
14834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14835 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14836 IEM_MC_FETCH_EFLAGS(EFlags);
14837 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14838 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
14839 else
14840 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
14841
14842 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14843 IEM_MC_COMMIT_EFLAGS(EFlags);
14844 IEM_MC_ADVANCE_RIP();
14845 IEM_MC_END();
14846 return VINF_SUCCESS;
14847
14848 case IEMMODE_64BIT:
14849 IEM_MC_BEGIN(2, 2);
14850 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14851 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14853
14854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14855 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14856 IEM_MC_FETCH_EFLAGS(EFlags);
14857 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14858 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
14859 else
14860 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
14861
14862 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14863 IEM_MC_COMMIT_EFLAGS(EFlags);
14864 IEM_MC_ADVANCE_RIP();
14865 IEM_MC_END();
14866 return VINF_SUCCESS;
14867
14868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14869 }
14870}
14871
14872
14873/** Opcode 0xf6 /0. */
14874FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
14875{
14876 IEMOP_MNEMONIC("test Eb,Ib");
14877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14878
14879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14880 {
14881 /* register access */
14882 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14883 IEMOP_HLP_NO_LOCK_PREFIX();
14884
14885 IEM_MC_BEGIN(3, 0);
14886 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14887 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
14888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14889 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14890 IEM_MC_REF_EFLAGS(pEFlags);
14891 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14892 IEM_MC_ADVANCE_RIP();
14893 IEM_MC_END();
14894 }
14895 else
14896 {
14897 /* memory access. */
14898 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14899
14900 IEM_MC_BEGIN(3, 2);
14901 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14902 IEM_MC_ARG(uint8_t, u8Src, 1);
14903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14905
14906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
14907 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14908 IEM_MC_ASSIGN(u8Src, u8Imm);
14909 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14910 IEM_MC_FETCH_EFLAGS(EFlags);
14911 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14912
14913 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
14914 IEM_MC_COMMIT_EFLAGS(EFlags);
14915 IEM_MC_ADVANCE_RIP();
14916 IEM_MC_END();
14917 }
14918 return VINF_SUCCESS;
14919}
14920
14921
14922/** Opcode 0xf7 /0. */
14923FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14924{
14925 IEMOP_MNEMONIC("test Ev,Iv");
14926 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14928
14929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14930 {
14931 /* register access */
14932 switch (pIemCpu->enmEffOpSize)
14933 {
14934 case IEMMODE_16BIT:
14935 {
14936 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14937 IEM_MC_BEGIN(3, 0);
14938 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14939 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14940 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14941 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14942 IEM_MC_REF_EFLAGS(pEFlags);
14943 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14944 IEM_MC_ADVANCE_RIP();
14945 IEM_MC_END();
14946 return VINF_SUCCESS;
14947 }
14948
14949 case IEMMODE_32BIT:
14950 {
14951 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14952 IEM_MC_BEGIN(3, 0);
14953 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14954 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14955 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14956 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14957 IEM_MC_REF_EFLAGS(pEFlags);
14958 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14959 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14960 IEM_MC_ADVANCE_RIP();
14961 IEM_MC_END();
14962 return VINF_SUCCESS;
14963 }
14964
14965 case IEMMODE_64BIT:
14966 {
14967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14968 IEM_MC_BEGIN(3, 0);
14969 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14970 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14971 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14972 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14973 IEM_MC_REF_EFLAGS(pEFlags);
14974 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14975 IEM_MC_ADVANCE_RIP();
14976 IEM_MC_END();
14977 return VINF_SUCCESS;
14978 }
14979
14980 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14981 }
14982 }
14983 else
14984 {
14985 /* memory access. */
14986 switch (pIemCpu->enmEffOpSize)
14987 {
14988 case IEMMODE_16BIT:
14989 {
14990 IEM_MC_BEGIN(3, 2);
14991 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14992 IEM_MC_ARG(uint16_t, u16Src, 1);
14993 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14995
14996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14997 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14998 IEM_MC_ASSIGN(u16Src, u16Imm);
14999 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
15000 IEM_MC_FETCH_EFLAGS(EFlags);
15001 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
15002
15003 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
15004 IEM_MC_COMMIT_EFLAGS(EFlags);
15005 IEM_MC_ADVANCE_RIP();
15006 IEM_MC_END();
15007 return VINF_SUCCESS;
15008 }
15009
15010 case IEMMODE_32BIT:
15011 {
15012 IEM_MC_BEGIN(3, 2);
15013 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
15014 IEM_MC_ARG(uint32_t, u32Src, 1);
15015 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
15016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15017
15018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
15019 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
15020 IEM_MC_ASSIGN(u32Src, u32Imm);
15021 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
15022 IEM_MC_FETCH_EFLAGS(EFlags);
15023 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
15024
15025 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
15026 IEM_MC_COMMIT_EFLAGS(EFlags);
15027 IEM_MC_ADVANCE_RIP();
15028 IEM_MC_END();
15029 return VINF_SUCCESS;
15030 }
15031
15032 case IEMMODE_64BIT:
15033 {
15034 IEM_MC_BEGIN(3, 2);
15035 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
15036 IEM_MC_ARG(uint64_t, u64Src, 1);
15037 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
15038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15039
15040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
15041 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
15042 IEM_MC_ASSIGN(u64Src, u64Imm);
15043 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
15044 IEM_MC_FETCH_EFLAGS(EFlags);
15045 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
15046
15047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
15048 IEM_MC_COMMIT_EFLAGS(EFlags);
15049 IEM_MC_ADVANCE_RIP();
15050 IEM_MC_END();
15051 return VINF_SUCCESS;
15052 }
15053
15054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15055 }
15056 }
15057}
15058
15059
15060/** Opcode 0xf6 /4, /5, /6 and /7. */
15061FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
15062{
15063 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
15064
15065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15066 {
15067 /* register access */
15068 IEMOP_HLP_NO_LOCK_PREFIX();
15069 IEM_MC_BEGIN(3, 1);
15070 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15071 IEM_MC_ARG(uint8_t, u8Value, 1);
15072 IEM_MC_ARG(uint32_t *, pEFlags, 2);
15073 IEM_MC_LOCAL(int32_t, rc);
15074
15075 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15076 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15077 IEM_MC_REF_EFLAGS(pEFlags);
15078 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
15079 IEM_MC_IF_LOCAL_IS_Z(rc) {
15080 IEM_MC_ADVANCE_RIP();
15081 } IEM_MC_ELSE() {
15082 IEM_MC_RAISE_DIVIDE_ERROR();
15083 } IEM_MC_ENDIF();
15084
15085 IEM_MC_END();
15086 }
15087 else
15088 {
15089 /* memory access. */
15090 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
15091
15092 IEM_MC_BEGIN(3, 2);
15093 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15094 IEM_MC_ARG(uint8_t, u8Value, 1);
15095 IEM_MC_ARG(uint32_t *, pEFlags, 2);
15096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15097 IEM_MC_LOCAL(int32_t, rc);
15098
15099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15100 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
15101 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15102 IEM_MC_REF_EFLAGS(pEFlags);
15103 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
15104 IEM_MC_IF_LOCAL_IS_Z(rc) {
15105 IEM_MC_ADVANCE_RIP();
15106 } IEM_MC_ELSE() {
15107 IEM_MC_RAISE_DIVIDE_ERROR();
15108 } IEM_MC_ENDIF();
15109
15110 IEM_MC_END();
15111 }
15112 return VINF_SUCCESS;
15113}
15114
15115
15116/** Opcode 0xf7 /4, /5, /6 and /7. */
15117FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
15118{
15119 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
15120 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15121
15122 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15123 {
15124 /* register access */
15125 switch (pIemCpu->enmEffOpSize)
15126 {
15127 case IEMMODE_16BIT:
15128 {
15129 IEMOP_HLP_NO_LOCK_PREFIX();
15130 IEM_MC_BEGIN(4, 1);
15131 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15132 IEM_MC_ARG(uint16_t *, pu16DX, 1);
15133 IEM_MC_ARG(uint16_t, u16Value, 2);
15134 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15135 IEM_MC_LOCAL(int32_t, rc);
15136
15137 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15138 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15139 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
15140 IEM_MC_REF_EFLAGS(pEFlags);
15141 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
15142 IEM_MC_IF_LOCAL_IS_Z(rc) {
15143 IEM_MC_ADVANCE_RIP();
15144 } IEM_MC_ELSE() {
15145 IEM_MC_RAISE_DIVIDE_ERROR();
15146 } IEM_MC_ENDIF();
15147
15148 IEM_MC_END();
15149 return VINF_SUCCESS;
15150 }
15151
15152 case IEMMODE_32BIT:
15153 {
15154 IEMOP_HLP_NO_LOCK_PREFIX();
15155 IEM_MC_BEGIN(4, 1);
15156 IEM_MC_ARG(uint32_t *, pu32AX, 0);
15157 IEM_MC_ARG(uint32_t *, pu32DX, 1);
15158 IEM_MC_ARG(uint32_t, u32Value, 2);
15159 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15160 IEM_MC_LOCAL(int32_t, rc);
15161
15162 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15163 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
15164 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
15165 IEM_MC_REF_EFLAGS(pEFlags);
15166 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
15167 IEM_MC_IF_LOCAL_IS_Z(rc) {
15168 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
15169 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
15170 IEM_MC_ADVANCE_RIP();
15171 } IEM_MC_ELSE() {
15172 IEM_MC_RAISE_DIVIDE_ERROR();
15173 } IEM_MC_ENDIF();
15174
15175 IEM_MC_END();
15176 return VINF_SUCCESS;
15177 }
15178
15179 case IEMMODE_64BIT:
15180 {
15181 IEMOP_HLP_NO_LOCK_PREFIX();
15182 IEM_MC_BEGIN(4, 1);
15183 IEM_MC_ARG(uint64_t *, pu64AX, 0);
15184 IEM_MC_ARG(uint64_t *, pu64DX, 1);
15185 IEM_MC_ARG(uint64_t, u64Value, 2);
15186 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15187 IEM_MC_LOCAL(int32_t, rc);
15188
15189 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15190 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
15191 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
15192 IEM_MC_REF_EFLAGS(pEFlags);
15193 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
15194 IEM_MC_IF_LOCAL_IS_Z(rc) {
15195 IEM_MC_ADVANCE_RIP();
15196 } IEM_MC_ELSE() {
15197 IEM_MC_RAISE_DIVIDE_ERROR();
15198 } IEM_MC_ENDIF();
15199
15200 IEM_MC_END();
15201 return VINF_SUCCESS;
15202 }
15203
15204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15205 }
15206 }
15207 else
15208 {
15209 /* memory access. */
15210 switch (pIemCpu->enmEffOpSize)
15211 {
15212 case IEMMODE_16BIT:
15213 {
15214 IEMOP_HLP_NO_LOCK_PREFIX();
15215 IEM_MC_BEGIN(4, 2);
15216 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15217 IEM_MC_ARG(uint16_t *, pu16DX, 1);
15218 IEM_MC_ARG(uint16_t, u16Value, 2);
15219 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15221 IEM_MC_LOCAL(int32_t, rc);
15222
15223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15224 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
15225 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15226 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
15227 IEM_MC_REF_EFLAGS(pEFlags);
15228 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
15229 IEM_MC_IF_LOCAL_IS_Z(rc) {
15230 IEM_MC_ADVANCE_RIP();
15231 } IEM_MC_ELSE() {
15232 IEM_MC_RAISE_DIVIDE_ERROR();
15233 } IEM_MC_ENDIF();
15234
15235 IEM_MC_END();
15236 return VINF_SUCCESS;
15237 }
15238
15239 case IEMMODE_32BIT:
15240 {
15241 IEMOP_HLP_NO_LOCK_PREFIX();
15242 IEM_MC_BEGIN(4, 2);
15243 IEM_MC_ARG(uint32_t *, pu32AX, 0);
15244 IEM_MC_ARG(uint32_t *, pu32DX, 1);
15245 IEM_MC_ARG(uint32_t, u32Value, 2);
15246 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15248 IEM_MC_LOCAL(int32_t, rc);
15249
15250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15251 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
15252 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
15253 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
15254 IEM_MC_REF_EFLAGS(pEFlags);
15255 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
15256 IEM_MC_IF_LOCAL_IS_Z(rc) {
15257 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
15258 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
15259 IEM_MC_ADVANCE_RIP();
15260 } IEM_MC_ELSE() {
15261 IEM_MC_RAISE_DIVIDE_ERROR();
15262 } IEM_MC_ENDIF();
15263
15264 IEM_MC_END();
15265 return VINF_SUCCESS;
15266 }
15267
15268 case IEMMODE_64BIT:
15269 {
15270 IEMOP_HLP_NO_LOCK_PREFIX();
15271 IEM_MC_BEGIN(4, 2);
15272 IEM_MC_ARG(uint64_t *, pu64AX, 0);
15273 IEM_MC_ARG(uint64_t *, pu64DX, 1);
15274 IEM_MC_ARG(uint64_t, u64Value, 2);
15275 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15277 IEM_MC_LOCAL(int32_t, rc);
15278
15279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15280 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
15281 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
15282 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
15283 IEM_MC_REF_EFLAGS(pEFlags);
15284 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
15285 IEM_MC_IF_LOCAL_IS_Z(rc) {
15286 IEM_MC_ADVANCE_RIP();
15287 } IEM_MC_ELSE() {
15288 IEM_MC_RAISE_DIVIDE_ERROR();
15289 } IEM_MC_ENDIF();
15290
15291 IEM_MC_END();
15292 return VINF_SUCCESS;
15293 }
15294
15295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15296 }
15297 }
15298}
15299
15300/** Opcode 0xf6. */
15301FNIEMOP_DEF(iemOp_Grp3_Eb)
15302{
15303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15304 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15305 {
15306 case 0:
15307 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
15308 case 1:
15309 return IEMOP_RAISE_INVALID_OPCODE();
15310 case 2:
15311 IEMOP_MNEMONIC("not Eb");
15312 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
15313 case 3:
15314 IEMOP_MNEMONIC("neg Eb");
15315 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
15316 case 4:
15317 IEMOP_MNEMONIC("mul Eb");
15318 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15319 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
15320 case 5:
15321 IEMOP_MNEMONIC("imul Eb");
15322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15323 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
15324 case 6:
15325 IEMOP_MNEMONIC("div Eb");
15326 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15327 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
15328 case 7:
15329 IEMOP_MNEMONIC("idiv Eb");
15330 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15331 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
15332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15333 }
15334}
15335
15336
15337/** Opcode 0xf7. */
15338FNIEMOP_DEF(iemOp_Grp3_Ev)
15339{
15340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15341 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15342 {
15343 case 0:
15344 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
15345 case 1:
15346 return IEMOP_RAISE_INVALID_OPCODE();
15347 case 2:
15348 IEMOP_MNEMONIC("not Ev");
15349 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
15350 case 3:
15351 IEMOP_MNEMONIC("neg Ev");
15352 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
15353 case 4:
15354 IEMOP_MNEMONIC("mul Ev");
15355 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15356 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
15357 case 5:
15358 IEMOP_MNEMONIC("imul Ev");
15359 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15360 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
15361 case 6:
15362 IEMOP_MNEMONIC("div Ev");
15363 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15364 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
15365 case 7:
15366 IEMOP_MNEMONIC("idiv Ev");
15367 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15368 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
15369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15370 }
15371}
15372
15373
15374/** Opcode 0xf8. */
15375FNIEMOP_DEF(iemOp_clc)
15376{
15377 IEMOP_MNEMONIC("clc");
15378 IEMOP_HLP_NO_LOCK_PREFIX();
15379 IEM_MC_BEGIN(0, 0);
15380 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
15381 IEM_MC_ADVANCE_RIP();
15382 IEM_MC_END();
15383 return VINF_SUCCESS;
15384}
15385
15386
15387/** Opcode 0xf9. */
15388FNIEMOP_DEF(iemOp_stc)
15389{
15390 IEMOP_MNEMONIC("stc");
15391 IEMOP_HLP_NO_LOCK_PREFIX();
15392 IEM_MC_BEGIN(0, 0);
15393 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
15394 IEM_MC_ADVANCE_RIP();
15395 IEM_MC_END();
15396 return VINF_SUCCESS;
15397}
15398
15399
15400/** Opcode 0xfa. */
15401FNIEMOP_DEF(iemOp_cli)
15402{
15403 IEMOP_MNEMONIC("cli");
15404 IEMOP_HLP_NO_LOCK_PREFIX();
15405 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
15406}
15407
15408
15409FNIEMOP_DEF(iemOp_sti)
15410{
15411 IEMOP_MNEMONIC("sti");
15412 IEMOP_HLP_NO_LOCK_PREFIX();
15413 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
15414}
15415
15416
15417/** Opcode 0xfc. */
15418FNIEMOP_DEF(iemOp_cld)
15419{
15420 IEMOP_MNEMONIC("cld");
15421 IEMOP_HLP_NO_LOCK_PREFIX();
15422 IEM_MC_BEGIN(0, 0);
15423 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
15424 IEM_MC_ADVANCE_RIP();
15425 IEM_MC_END();
15426 return VINF_SUCCESS;
15427}
15428
15429
15430/** Opcode 0xfd. */
15431FNIEMOP_DEF(iemOp_std)
15432{
15433 IEMOP_MNEMONIC("std");
15434 IEMOP_HLP_NO_LOCK_PREFIX();
15435 IEM_MC_BEGIN(0, 0);
15436 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
15437 IEM_MC_ADVANCE_RIP();
15438 IEM_MC_END();
15439 return VINF_SUCCESS;
15440}
15441
15442
15443/** Opcode 0xfe. */
15444FNIEMOP_DEF(iemOp_Grp4)
15445{
15446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15447 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15448 {
15449 case 0:
15450 IEMOP_MNEMONIC("inc Ev");
15451 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
15452 case 1:
15453 IEMOP_MNEMONIC("dec Ev");
15454 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
15455 default:
15456 IEMOP_MNEMONIC("grp4-ud");
15457 return IEMOP_RAISE_INVALID_OPCODE();
15458 }
15459}
15460
15461
15462/**
15463 * Opcode 0xff /2.
15464 * @param bRm The RM byte.
15465 */
15466FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
15467{
15468 IEMOP_MNEMONIC("calln Ev");
15469 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15471
15472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15473 {
15474 /* The new RIP is taken from a register. */
15475 switch (pIemCpu->enmEffOpSize)
15476 {
15477 case IEMMODE_16BIT:
15478 IEM_MC_BEGIN(1, 0);
15479 IEM_MC_ARG(uint16_t, u16Target, 0);
15480 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15481 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
15482 IEM_MC_END()
15483 return VINF_SUCCESS;
15484
15485 case IEMMODE_32BIT:
15486 IEM_MC_BEGIN(1, 0);
15487 IEM_MC_ARG(uint32_t, u32Target, 0);
15488 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15489 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
15490 IEM_MC_END()
15491 return VINF_SUCCESS;
15492
15493 case IEMMODE_64BIT:
15494 IEM_MC_BEGIN(1, 0);
15495 IEM_MC_ARG(uint64_t, u64Target, 0);
15496 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15497 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
15498 IEM_MC_END()
15499 return VINF_SUCCESS;
15500
15501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15502 }
15503 }
15504 else
15505 {
15506 /* The new RIP is taken from a register. */
15507 switch (pIemCpu->enmEffOpSize)
15508 {
15509 case IEMMODE_16BIT:
15510 IEM_MC_BEGIN(1, 1);
15511 IEM_MC_ARG(uint16_t, u16Target, 0);
15512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15514 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15515 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
15516 IEM_MC_END()
15517 return VINF_SUCCESS;
15518
15519 case IEMMODE_32BIT:
15520 IEM_MC_BEGIN(1, 1);
15521 IEM_MC_ARG(uint32_t, u32Target, 0);
15522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15524 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15525 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
15526 IEM_MC_END()
15527 return VINF_SUCCESS;
15528
15529 case IEMMODE_64BIT:
15530 IEM_MC_BEGIN(1, 1);
15531 IEM_MC_ARG(uint64_t, u64Target, 0);
15532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15534 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15535 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
15536 IEM_MC_END()
15537 return VINF_SUCCESS;
15538
15539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15540 }
15541 }
15542}
15543
15544typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
15545
15546FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
15547{
15548 /* Registers? How?? */
15549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15550 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
15551
15552 /* Far pointer loaded from memory. */
15553 switch (pIemCpu->enmEffOpSize)
15554 {
15555 case IEMMODE_16BIT:
15556 IEM_MC_BEGIN(3, 1);
15557 IEM_MC_ARG(uint16_t, u16Sel, 0);
15558 IEM_MC_ARG(uint16_t, offSeg, 1);
15559 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
15560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15563 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
15564 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
15565 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
15566 IEM_MC_END();
15567 return VINF_SUCCESS;
15568
15569 case IEMMODE_32BIT:
15570 IEM_MC_BEGIN(3, 1);
15571 IEM_MC_ARG(uint16_t, u16Sel, 0);
15572 IEM_MC_ARG(uint32_t, offSeg, 1);
15573 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
15574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15577 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
15578 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
15579 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
15580 IEM_MC_END();
15581 return VINF_SUCCESS;
15582
15583 case IEMMODE_64BIT:
15584 IEM_MC_BEGIN(3, 1);
15585 IEM_MC_ARG(uint16_t, u16Sel, 0);
15586 IEM_MC_ARG(uint64_t, offSeg, 1);
15587 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
15588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15591 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
15592 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
15593 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
15594 IEM_MC_END();
15595 return VINF_SUCCESS;
15596
15597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15598 }
15599}
15600
15601
15602/**
15603 * Opcode 0xff /3.
15604 * @param bRm The RM byte.
15605 */
15606FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15607{
15608 IEMOP_MNEMONIC("callf Ep");
15609 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
15610}
15611
15612
15613/**
15614 * Opcode 0xff /4.
15615 * @param bRm The RM byte.
15616 */
15617FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15618{
15619 IEMOP_MNEMONIC("jmpn Ev");
15620 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15621 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15622
15623 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15624 {
15625 /* The new RIP is taken from a register. */
15626 switch (pIemCpu->enmEffOpSize)
15627 {
15628 case IEMMODE_16BIT:
15629 IEM_MC_BEGIN(0, 1);
15630 IEM_MC_LOCAL(uint16_t, u16Target);
15631 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15632 IEM_MC_SET_RIP_U16(u16Target);
15633 IEM_MC_END()
15634 return VINF_SUCCESS;
15635
15636 case IEMMODE_32BIT:
15637 IEM_MC_BEGIN(0, 1);
15638 IEM_MC_LOCAL(uint32_t, u32Target);
15639 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15640 IEM_MC_SET_RIP_U32(u32Target);
15641 IEM_MC_END()
15642 return VINF_SUCCESS;
15643
15644 case IEMMODE_64BIT:
15645 IEM_MC_BEGIN(0, 1);
15646 IEM_MC_LOCAL(uint64_t, u64Target);
15647 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15648 IEM_MC_SET_RIP_U64(u64Target);
15649 IEM_MC_END()
15650 return VINF_SUCCESS;
15651
15652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15653 }
15654 }
15655 else
15656 {
15657 /* The new RIP is taken from a register. */
15658 switch (pIemCpu->enmEffOpSize)
15659 {
15660 case IEMMODE_16BIT:
15661 IEM_MC_BEGIN(0, 2);
15662 IEM_MC_LOCAL(uint16_t, u16Target);
15663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15665 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15666 IEM_MC_SET_RIP_U16(u16Target);
15667 IEM_MC_END()
15668 return VINF_SUCCESS;
15669
15670 case IEMMODE_32BIT:
15671 IEM_MC_BEGIN(0, 2);
15672 IEM_MC_LOCAL(uint32_t, u32Target);
15673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15675 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15676 IEM_MC_SET_RIP_U32(u32Target);
15677 IEM_MC_END()
15678 return VINF_SUCCESS;
15679
15680 case IEMMODE_64BIT:
15681 IEM_MC_BEGIN(0, 2);
15682 IEM_MC_LOCAL(uint32_t, u32Target);
15683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15685 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15686 IEM_MC_SET_RIP_U32(u32Target);
15687 IEM_MC_END()
15688 return VINF_SUCCESS;
15689
15690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15691 }
15692 }
15693}
15694
15695
15696/**
15697 * Opcode 0xff /5.
15698 * @param bRm The RM byte.
15699 */
15700FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15701{
15702 IEMOP_MNEMONIC("jmp Ep");
15703 IEMOP_HLP_NO_64BIT();
15704 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
15705}
15706
15707
15708/**
15709 * Opcode 0xff /6.
15710 * @param bRm The RM byte.
15711 */
15712FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15713{
15714 IEMOP_MNEMONIC("push Ev");
15715 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15716
15717 /* Registers are handled by a common worker. */
15718 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15719 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15720
15721 /* Memory we do here. */
15722 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15723 switch (pIemCpu->enmEffOpSize)
15724 {
15725 case IEMMODE_16BIT:
15726 IEM_MC_BEGIN(0, 2);
15727 IEM_MC_LOCAL(uint16_t, u16Src);
15728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15730 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15731 IEM_MC_PUSH_U16(u16Src);
15732 IEM_MC_ADVANCE_RIP();
15733 IEM_MC_END();
15734 return VINF_SUCCESS;
15735
15736 case IEMMODE_32BIT:
15737 IEM_MC_BEGIN(0, 2);
15738 IEM_MC_LOCAL(uint32_t, u32Src);
15739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15741 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15742 IEM_MC_PUSH_U32(u32Src);
15743 IEM_MC_ADVANCE_RIP();
15744 IEM_MC_END();
15745 return VINF_SUCCESS;
15746
15747 case IEMMODE_64BIT:
15748 IEM_MC_BEGIN(0, 2);
15749 IEM_MC_LOCAL(uint64_t, u64Src);
15750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15752 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15753 IEM_MC_PUSH_U64(u64Src);
15754 IEM_MC_ADVANCE_RIP();
15755 IEM_MC_END();
15756 return VINF_SUCCESS;
15757
15758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15759 }
15760}
15761
15762
15763/** Opcode 0xff. */
15764FNIEMOP_DEF(iemOp_Grp5)
15765{
15766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15767 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15768 {
15769 case 0:
15770 IEMOP_MNEMONIC("inc Ev");
15771 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
15772 case 1:
15773 IEMOP_MNEMONIC("dec Ev");
15774 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
15775 case 2:
15776 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15777 case 3:
15778 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15779 case 4:
15780 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15781 case 5:
15782 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15783 case 6:
15784 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15785 case 7:
15786 IEMOP_MNEMONIC("grp5-ud");
15787 return IEMOP_RAISE_INVALID_OPCODE();
15788 }
15789 AssertFailedReturn(VERR_INTERNAL_ERROR_2);
15790}
15791
15792
15793
15794const PFNIEMOP g_apfnOneByteMap[256] =
15795{
15796 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15797 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15798 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15799 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15800 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15801 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15802 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15803 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15804 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15805 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15806 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15807 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15808 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15809 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15810 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15811 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15812 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15813 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15814 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15815 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15816 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15817 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15818 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15819 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15820 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15821 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15822 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15823 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15824 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15825 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15826 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15827 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15828 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15829 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15830 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15831 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
15832 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15833 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15834 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15835 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15836 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15837 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15838 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15839 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15840 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15841 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15842 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15843 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15844 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15845 /* 0xc4 */ iemOp_les_Gv_Mp, iemOp_lds_Gv_Mp, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15846 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15847 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15848 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15849 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_Invalid, iemOp_xlat,
15850 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15851 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15852 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15853 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15854 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15855 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15856 /* 0xf0 */ iemOp_lock, iemOp_Invalid, iemOp_repne, iemOp_repe, /** @todo 0xf1 is INT1 / ICEBP. */
15857 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15858 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15859 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15860};
15861
15862
15863/** @} */
15864
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette