VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 40255

Last change on this file since 40255 was 40255, checked in by vboxsync, 13 years ago

Implemented fpu instruction stubs starting with 0xdd (fiadd m32i ++).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 487.8 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 40255 2012-02-25 00:38:51Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2012 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 IEM_MC_ADVANCE_RIP();
133 IEM_MC_END();
134 break;
135
136 case IEMMODE_64BIT:
137 IEM_MC_BEGIN(3, 0);
138 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
139 IEM_MC_ARG(uint64_t, u64Src, 1);
140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
141
142 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
143 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
144 IEM_MC_REF_EFLAGS(pEFlags);
145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
146
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 break;
150 }
151 }
152 else
153 {
154 /*
155 * We're accessing memory.
156 * Note! We're putting the eflags on the stack here so we can commit them
157 * after the memory.
158 */
159 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
160 switch (pIemCpu->enmEffOpSize)
161 {
162 case IEMMODE_16BIT:
163 IEM_MC_BEGIN(3, 2);
164 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
165 IEM_MC_ARG(uint16_t, u16Src, 1);
166 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
168
169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
170 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
171 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
172 IEM_MC_FETCH_EFLAGS(EFlags);
173 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
175 else
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
177
178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
179 IEM_MC_COMMIT_EFLAGS(EFlags);
180 IEM_MC_ADVANCE_RIP();
181 IEM_MC_END();
182 break;
183
184 case IEMMODE_32BIT:
185 IEM_MC_BEGIN(3, 2);
186 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
187 IEM_MC_ARG(uint32_t, u32Src, 1);
188 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
190
191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
192 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
193 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
194 IEM_MC_FETCH_EFLAGS(EFlags);
195 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
197 else
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
199
200 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
201 IEM_MC_COMMIT_EFLAGS(EFlags);
202 IEM_MC_ADVANCE_RIP();
203 IEM_MC_END();
204 break;
205
206 case IEMMODE_64BIT:
207 IEM_MC_BEGIN(3, 2);
208 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
209 IEM_MC_ARG(uint64_t, u64Src, 1);
210 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
212
213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
214 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
215 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
216 IEM_MC_FETCH_EFLAGS(EFlags);
217 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
219 else
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
221
222 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
223 IEM_MC_COMMIT_EFLAGS(EFlags);
224 IEM_MC_ADVANCE_RIP();
225 IEM_MC_END();
226 break;
227 }
228 }
229 return VINF_SUCCESS;
230}
231
232
233/**
234 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
235 * the destination.
236 *
237 * @param pImpl Pointer to the instruction implementation (assembly).
238 */
239FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
240{
241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
242 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
243
244 /*
245 * If rm is denoting a register, no more instruction bytes.
246 */
247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
248 {
249 IEM_MC_BEGIN(3, 0);
250 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
251 IEM_MC_ARG(uint8_t, u8Src, 1);
252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
253
254 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
255 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
256 IEM_MC_REF_EFLAGS(pEFlags);
257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
258
259 IEM_MC_ADVANCE_RIP();
260 IEM_MC_END();
261 }
262 else
263 {
264 /*
265 * We're accessing memory.
266 */
267 IEM_MC_BEGIN(3, 1);
268 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
269 IEM_MC_ARG(uint8_t, u8Src, 1);
270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
272
273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
274 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
275 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
276 IEM_MC_REF_EFLAGS(pEFlags);
277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
288 * register as the destination.
289 *
290 * @param pImpl Pointer to the instruction implementation (assembly).
291 */
292FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
293{
294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
295 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
296
297 /*
298 * If rm is denoting a register, no more instruction bytes.
299 */
300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
301 {
302 switch (pIemCpu->enmEffOpSize)
303 {
304 case IEMMODE_16BIT:
305 IEM_MC_BEGIN(3, 0);
306 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
307 IEM_MC_ARG(uint16_t, u16Src, 1);
308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
309
310 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
311 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
312 IEM_MC_REF_EFLAGS(pEFlags);
313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 break;
318
319 case IEMMODE_32BIT:
320 IEM_MC_BEGIN(3, 0);
321 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
322 IEM_MC_ARG(uint32_t, u32Src, 1);
323 IEM_MC_ARG(uint32_t *, pEFlags, 2);
324
325 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
326 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
327 IEM_MC_REF_EFLAGS(pEFlags);
328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
329
330 IEM_MC_ADVANCE_RIP();
331 IEM_MC_END();
332 break;
333
334 case IEMMODE_64BIT:
335 IEM_MC_BEGIN(3, 0);
336 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
337 IEM_MC_ARG(uint64_t, u64Src, 1);
338 IEM_MC_ARG(uint32_t *, pEFlags, 2);
339
340 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
341 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
342 IEM_MC_REF_EFLAGS(pEFlags);
343 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
344
345 IEM_MC_ADVANCE_RIP();
346 IEM_MC_END();
347 break;
348 }
349 }
350 else
351 {
352 /*
353 * We're accessing memory.
354 */
355 switch (pIemCpu->enmEffOpSize)
356 {
357 case IEMMODE_16BIT:
358 IEM_MC_BEGIN(3, 1);
359 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
360 IEM_MC_ARG(uint16_t, u16Src, 1);
361 IEM_MC_ARG(uint32_t *, pEFlags, 2);
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
363
364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
365 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
366 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
367 IEM_MC_REF_EFLAGS(pEFlags);
368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
369
370 IEM_MC_ADVANCE_RIP();
371 IEM_MC_END();
372 break;
373
374 case IEMMODE_32BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
377 IEM_MC_ARG(uint32_t, u32Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
382 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
383 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
384 IEM_MC_REF_EFLAGS(pEFlags);
385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
386
387 IEM_MC_ADVANCE_RIP();
388 IEM_MC_END();
389 break;
390
391 case IEMMODE_64BIT:
392 IEM_MC_BEGIN(3, 1);
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
394 IEM_MC_ARG(uint64_t, u64Src, 1);
395 IEM_MC_ARG(uint32_t *, pEFlags, 2);
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
397
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
399 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
400 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
401 IEM_MC_REF_EFLAGS(pEFlags);
402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
403
404 IEM_MC_ADVANCE_RIP();
405 IEM_MC_END();
406 break;
407 }
408 }
409 return VINF_SUCCESS;
410}
411
412
413/**
414 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
415 * a byte immediate.
416 *
417 * @param pImpl Pointer to the instruction implementation (assembly).
418 */
419FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
420{
421 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
422 IEMOP_HLP_NO_LOCK_PREFIX();
423
424 IEM_MC_BEGIN(3, 0);
425 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
426 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
427 IEM_MC_ARG(uint32_t *, pEFlags, 2);
428
429 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
430 IEM_MC_REF_EFLAGS(pEFlags);
431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
432
433 IEM_MC_ADVANCE_RIP();
434 IEM_MC_END();
435 return VINF_SUCCESS;
436}
437
438
439/**
440 * Common worker for instructions like ADD, AND, OR, ++ with working on
441 * AX/EAX/RAX with a word/dword immediate.
442 *
443 * @param pImpl Pointer to the instruction implementation (assembly).
444 */
445FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
446{
447 switch (pIemCpu->enmEffOpSize)
448 {
449 case IEMMODE_16BIT:
450 {
451 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
452 IEMOP_HLP_NO_LOCK_PREFIX();
453
454 IEM_MC_BEGIN(3, 0);
455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
456 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
458
459 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
460 IEM_MC_REF_EFLAGS(pEFlags);
461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
462
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 return VINF_SUCCESS;
466 }
467
468 case IEMMODE_32BIT:
469 {
470 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
471 IEMOP_HLP_NO_LOCK_PREFIX();
472
473 IEM_MC_BEGIN(3, 0);
474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
475 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
476 IEM_MC_ARG(uint32_t *, pEFlags, 2);
477
478 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
479 IEM_MC_REF_EFLAGS(pEFlags);
480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
481
482 IEM_MC_ADVANCE_RIP();
483 IEM_MC_END();
484 return VINF_SUCCESS;
485 }
486
487 case IEMMODE_64BIT:
488 {
489 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
490 IEMOP_HLP_NO_LOCK_PREFIX();
491
492 IEM_MC_BEGIN(3, 0);
493 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
494 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
495 IEM_MC_ARG(uint32_t *, pEFlags, 2);
496
497 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
498 IEM_MC_REF_EFLAGS(pEFlags);
499 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
500
501 IEM_MC_ADVANCE_RIP();
502 IEM_MC_END();
503 return VINF_SUCCESS;
504 }
505
506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
507 }
508}
509
510
511/** Opcodes 0xf1, 0xd6. */
512FNIEMOP_DEF(iemOp_Invalid)
513{
514 IEMOP_MNEMONIC("Invalid");
515 return IEMOP_RAISE_INVALID_OPCODE();
516}
517
518
519
520/** @name ..... opcodes.
521 *
522 * @{
523 */
524
525/** @} */
526
527
528/** @name Two byte opcodes (first byte 0x0f).
529 *
530 * @{
531 */
532
533/** Opcode 0x0f 0x00 /0. */
534FNIEMOP_STUB_1(iemOp_Grp6_sldt, uint8_t, bRm);
535
536
537/** Opcode 0x0f 0x00 /1. */
538FNIEMOP_STUB_1(iemOp_Grp6_str, uint8_t, bRm);
539
540
541/** Opcode 0x0f 0x00 /2. */
542FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
543{
544 IEMOP_HLP_NO_LOCK_PREFIX();
545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
546 {
547 IEM_MC_BEGIN(1, 0);
548 IEM_MC_ARG(uint16_t, u16Sel, 0);
549 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
550 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
551 IEM_MC_END();
552 }
553 else
554 {
555 IEM_MC_BEGIN(1, 1);
556 IEM_MC_ARG(uint16_t, u16Sel, 0);
557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
558 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
560 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
561 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
562 IEM_MC_END();
563 }
564 return VINF_SUCCESS;
565}
566
567
568/** Opcode 0x0f 0x00 /3. */
569FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
570{
571 IEMOP_HLP_NO_LOCK_PREFIX();
572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
573 {
574 IEM_MC_BEGIN(1, 0);
575 IEM_MC_ARG(uint16_t, u16Sel, 0);
576 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
577 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
578 IEM_MC_END();
579 }
580 else
581 {
582 IEM_MC_BEGIN(1, 1);
583 IEM_MC_ARG(uint16_t, u16Sel, 0);
584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
585 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
587 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
588 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
589 IEM_MC_END();
590 }
591 return VINF_SUCCESS;
592}
593
594
595/** Opcode 0x0f 0x00 /4. */
596FNIEMOP_STUB_1(iemOp_Grp6_verr, uint8_t, bRm);
597
598
599/** Opcode 0x0f 0x00 /5. */
600FNIEMOP_STUB_1(iemOp_Grp6_verw, uint8_t, bRm);
601
602
603/** Opcode 0x0f 0x00. */
604FNIEMOP_DEF(iemOp_Grp6)
605{
606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
607 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
608 {
609 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
610 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
611 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
612 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
613 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
614 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
615 case 6: return IEMOP_RAISE_INVALID_OPCODE();
616 case 7: return IEMOP_RAISE_INVALID_OPCODE();
617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
618 }
619
620}
621
622
623/** Opcode 0x0f 0x01 /0. */
624FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
625{
626 NOREF(pIemCpu); NOREF(bRm);
627 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
628}
629
630
631/** Opcode 0x0f 0x01 /0. */
632FNIEMOP_DEF(iemOp_Grp7_vmcall)
633{
634 AssertFailed();
635 return IEMOP_RAISE_INVALID_OPCODE();
636}
637
638
639/** Opcode 0x0f 0x01 /0. */
640FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
641{
642 AssertFailed();
643 return IEMOP_RAISE_INVALID_OPCODE();
644}
645
646
647/** Opcode 0x0f 0x01 /0. */
648FNIEMOP_DEF(iemOp_Grp7_vmresume)
649{
650 AssertFailed();
651 return IEMOP_RAISE_INVALID_OPCODE();
652}
653
654
655/** Opcode 0x0f 0x01 /0. */
656FNIEMOP_DEF(iemOp_Grp7_vmxoff)
657{
658 AssertFailed();
659 return IEMOP_RAISE_INVALID_OPCODE();
660}
661
662
663/** Opcode 0x0f 0x01 /1. */
664FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
665{
666 NOREF(pIemCpu); NOREF(bRm);
667 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
668}
669
670
671/** Opcode 0x0f 0x01 /1. */
672FNIEMOP_DEF(iemOp_Grp7_monitor)
673{
674 NOREF(pIemCpu);
675 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
676}
677
678
679/** Opcode 0x0f 0x01 /1. */
680FNIEMOP_DEF(iemOp_Grp7_mwait)
681{
682 NOREF(pIemCpu);
683 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
684}
685
686
687/** Opcode 0x0f 0x01 /2. */
688FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
689{
690 IEMOP_HLP_NO_LOCK_PREFIX();
691
692 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
693 ? IEMMODE_64BIT
694 : pIemCpu->enmEffOpSize;
695 IEM_MC_BEGIN(3, 1);
696 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
697 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
698 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
700 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
701 IEM_MC_END();
702 return VINF_SUCCESS;
703}
704
705
706/** Opcode 0x0f 0x01 /2. */
707FNIEMOP_DEF(iemOp_Grp7_xgetbv)
708{
709 AssertFailed();
710 return IEMOP_RAISE_INVALID_OPCODE();
711}
712
713
714/** Opcode 0x0f 0x01 /2. */
715FNIEMOP_DEF(iemOp_Grp7_xsetbv)
716{
717 AssertFailed();
718 return IEMOP_RAISE_INVALID_OPCODE();
719}
720
721
722/** Opcode 0x0f 0x01 /3. */
723FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
724{
725 IEMOP_HLP_NO_LOCK_PREFIX();
726
727 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
728 ? IEMMODE_64BIT
729 : pIemCpu->enmEffOpSize;
730 IEM_MC_BEGIN(3, 1);
731 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
732 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
733 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
735 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
736 IEM_MC_END();
737 return VINF_SUCCESS;
738}
739
740
741/** Opcode 0x0f 0x01 /4. */
742FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
743{
744 IEMOP_HLP_NO_LOCK_PREFIX();
745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
746 {
747 switch (pIemCpu->enmEffOpSize)
748 {
749 case IEMMODE_16BIT:
750 IEM_MC_BEGIN(0, 1);
751 IEM_MC_LOCAL(uint16_t, u16Tmp);
752 IEM_MC_FETCH_CR0_U16(u16Tmp);
753 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
754 IEM_MC_ADVANCE_RIP();
755 IEM_MC_END();
756 return VINF_SUCCESS;
757
758 case IEMMODE_32BIT:
759 IEM_MC_BEGIN(0, 1);
760 IEM_MC_LOCAL(uint32_t, u32Tmp);
761 IEM_MC_FETCH_CR0_U32(u32Tmp);
762 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
763 IEM_MC_ADVANCE_RIP();
764 IEM_MC_END();
765 return VINF_SUCCESS;
766
767 case IEMMODE_64BIT:
768 IEM_MC_BEGIN(0, 1);
769 IEM_MC_LOCAL(uint64_t, u64Tmp);
770 IEM_MC_FETCH_CR0_U64(u64Tmp);
771 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
772 IEM_MC_ADVANCE_RIP();
773 IEM_MC_END();
774 return VINF_SUCCESS;
775
776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
777 }
778 }
779 else
780 {
781 /* Ignore operand size here, memory refs are always 16-bit. */
782 IEM_MC_BEGIN(0, 2);
783 IEM_MC_LOCAL(uint16_t, u16Tmp);
784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
786 IEM_MC_FETCH_CR0_U16(u16Tmp);
787 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
788 IEM_MC_ADVANCE_RIP();
789 IEM_MC_END();
790 return VINF_SUCCESS;
791 }
792}
793
794
795/** Opcode 0x0f 0x01 /6. */
796FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
797{
798 /* The operand size is effectively ignored, all is 16-bit and only the
799 lower 3-bits are used. */
800 IEMOP_HLP_NO_LOCK_PREFIX();
801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
802 {
803 IEM_MC_BEGIN(1, 0);
804 IEM_MC_ARG(uint16_t, u16Tmp, 0);
805 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
806 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
807 IEM_MC_END();
808 }
809 else
810 {
811 IEM_MC_BEGIN(1, 1);
812 IEM_MC_ARG(uint16_t, u16Tmp, 0);
813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
815 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
816 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
817 IEM_MC_END();
818 }
819 return VINF_SUCCESS;
820}
821
822
823/** Opcode 0x0f 0x01 /7. */
824FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
825{
826 IEMOP_HLP_NO_LOCK_PREFIX();
827 IEM_MC_BEGIN(1, 1);
828 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
830 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
831 IEM_MC_END();
832 return VINF_SUCCESS;
833}
834
835
836/** Opcode 0x0f 0x01 /7. */
837FNIEMOP_DEF(iemOp_Grp7_swapgs)
838{
839 NOREF(pIemCpu);
840 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
841}
842
843
844/** Opcode 0x0f 0x01 /7. */
845FNIEMOP_DEF(iemOp_Grp7_rdtscp)
846{
847 NOREF(pIemCpu);
848 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
849}
850
851
852/** Opcode 0x0f 0x01. */
853FNIEMOP_DEF(iemOp_Grp7)
854{
855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
856 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
857 {
858 case 0:
859 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
860 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
861 switch (bRm & X86_MODRM_RM_MASK)
862 {
863 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
864 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
865 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
866 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
867 }
868 return IEMOP_RAISE_INVALID_OPCODE();
869
870 case 1:
871 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
872 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
873 switch (bRm & X86_MODRM_RM_MASK)
874 {
875 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
876 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
877 }
878 return IEMOP_RAISE_INVALID_OPCODE();
879
880 case 2:
881 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
882 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
883 switch (bRm & X86_MODRM_RM_MASK)
884 {
885 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
886 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
887 }
888 return IEMOP_RAISE_INVALID_OPCODE();
889
890 case 3:
891 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
892 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
893 return IEMOP_RAISE_INVALID_OPCODE();
894
895 case 4:
896 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
897
898 case 5:
899 return IEMOP_RAISE_INVALID_OPCODE();
900
901 case 6:
902 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
903
904 case 7:
905 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
906 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
907 switch (bRm & X86_MODRM_RM_MASK)
908 {
909 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
910 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
911 }
912 return IEMOP_RAISE_INVALID_OPCODE();
913
914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
915 }
916}
917
918
919/** Opcode 0x0f 0x02. */
920FNIEMOP_STUB(iemOp_lar_Gv_Ew);
921/** Opcode 0x0f 0x03. */
922FNIEMOP_STUB(iemOp_lsl_Gv_Ew);
923/** Opcode 0x0f 0x04. */
924FNIEMOP_STUB(iemOp_syscall);
925
926
927/** Opcode 0x0f 0x05. */
928FNIEMOP_DEF(iemOp_clts)
929{
930 IEMOP_MNEMONIC("clts");
931 IEMOP_HLP_NO_LOCK_PREFIX();
932 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
933}
934
935
936/** Opcode 0x0f 0x06. */
937FNIEMOP_STUB(iemOp_sysret);
938/** Opcode 0x0f 0x08. */
939FNIEMOP_STUB(iemOp_invd);
940/** Opcode 0x0f 0x09. */
941FNIEMOP_STUB(iemOp_wbinvd);
942/** Opcode 0x0f 0x0b. */
943FNIEMOP_STUB(iemOp_ud2);
944
945/** Opcode 0x0f 0x0d. */
946FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
947{
948 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
949 if (!IEM_IS_AMD_CPUID_FEATURES_ANY_PRESENT(X86_CPUID_AMD_FEATURE_EDX_LONG_MODE | X86_CPUID_AMD_FEATURE_EDX_3DNOW,
950 X86_CPUID_AMD_FEATURE_ECX_3DNOWPRF))
951 {
952 IEMOP_MNEMONIC("GrpP");
953 return IEMOP_RAISE_INVALID_OPCODE();
954 }
955
956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
958 {
959 IEMOP_MNEMONIC("GrpP");
960 return IEMOP_RAISE_INVALID_OPCODE();
961 }
962
963 IEMOP_HLP_NO_LOCK_PREFIX();
964 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
965 {
966 case 2: /* Aliased to /0 for the time being. */
967 case 4: /* Aliased to /0 for the time being. */
968 case 5: /* Aliased to /0 for the time being. */
969 case 6: /* Aliased to /0 for the time being. */
970 case 7: /* Aliased to /0 for the time being. */
971 case 0: IEMOP_MNEMONIC("prefetch"); break;
972 case 1: IEMOP_MNEMONIC("prefetchw "); break;
973 case 3: IEMOP_MNEMONIC("prefetchw"); break;
974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
975 }
976
977 IEM_MC_BEGIN(0, 1);
978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
980 /* Currently a NOP. */
981 IEM_MC_ADVANCE_RIP();
982 IEM_MC_END();
983 return VINF_SUCCESS;
984}
985
986
987/** Opcode 0x0f 0x0e. */
988FNIEMOP_STUB(iemOp_femms);
989/** Opcode 0x0f 0x0f. */
990FNIEMOP_STUB(iemOp_3Dnow);
991/** Opcode 0x0f 0x10. */
992FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
993/** Opcode 0x0f 0x11. */
994FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
995/** Opcode 0x0f 0x12. */
996FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq);
997/** Opcode 0x0f 0x13. */
998FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq);
999/** Opcode 0x0f 0x14. */
1000FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1001/** Opcode 0x0f 0x15. */
1002FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1003/** Opcode 0x0f 0x16. */
1004FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq);
1005/** Opcode 0x0f 0x17. */
1006FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq);
1007
1008
1009/** Opcode 0x0f 0x18. */
1010FNIEMOP_DEF(iemOp_prefetch_Grp16)
1011{
1012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1013 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1014 {
1015 IEMOP_HLP_NO_LOCK_PREFIX();
1016 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1017 {
1018 case 4: /* Aliased to /0 for the time being according to AMD. */
1019 case 5: /* Aliased to /0 for the time being according to AMD. */
1020 case 6: /* Aliased to /0 for the time being according to AMD. */
1021 case 7: /* Aliased to /0 for the time being according to AMD. */
1022 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1023 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1024 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1025 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1027 }
1028
1029 IEM_MC_BEGIN(0, 1);
1030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1032 /* Currently a NOP. */
1033 IEM_MC_ADVANCE_RIP();
1034 IEM_MC_END();
1035 return VINF_SUCCESS;
1036 }
1037
1038 return IEMOP_RAISE_INVALID_OPCODE();
1039}
1040
1041
1042/** Opcode 0x0f 0x19..0x1f. */
1043FNIEMOP_DEF(iemOp_nop_Ev)
1044{
1045 IEMOP_HLP_NO_LOCK_PREFIX();
1046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1048 {
1049 IEM_MC_BEGIN(0, 0);
1050 IEM_MC_ADVANCE_RIP();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 IEM_MC_BEGIN(0, 1);
1056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1058 /* Currently a NOP. */
1059 IEM_MC_ADVANCE_RIP();
1060 IEM_MC_END();
1061 }
1062 return VINF_SUCCESS;
1063}
1064
1065
1066/** Opcode 0x0f 0x20. */
1067FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1068{
1069 /* mod is ignored, as is operand size overrides. */
1070 IEMOP_MNEMONIC("mov Rd,Cd");
1071 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1072 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1073 else
1074 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1075
1076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1077 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1078 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1079 {
1080 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1081 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1082 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1083 iCrReg |= 8;
1084 }
1085 switch (iCrReg)
1086 {
1087 case 0: case 2: case 3: case 4: case 8:
1088 break;
1089 default:
1090 return IEMOP_RAISE_INVALID_OPCODE();
1091 }
1092 IEMOP_HLP_DONE_DECODING();
1093
1094 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1095}
1096
1097
1098/** Opcode 0x0f 0x21. */
1099FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1100{
1101 IEMOP_MNEMONIC("mov Rd,Dd");
1102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1103 IEMOP_HLP_NO_LOCK_PREFIX();
1104 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1105 return IEMOP_RAISE_INVALID_OPCODE();
1106 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1107 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1108 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1109}
1110
1111
1112/** Opcode 0x0f 0x22. */
1113FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1114{
1115 /* mod is ignored, as is operand size overrides. */
1116 IEMOP_MNEMONIC("mov Cd,Rd");
1117 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1118 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1119 else
1120 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1121
1122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1123 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1124 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1125 {
1126 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1127 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1128 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1129 iCrReg |= 8;
1130 }
1131 switch (iCrReg)
1132 {
1133 case 0: case 2: case 3: case 4: case 8:
1134 break;
1135 default:
1136 return IEMOP_RAISE_INVALID_OPCODE();
1137 }
1138 IEMOP_HLP_DONE_DECODING();
1139
1140 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1141}
1142
1143
1144/** Opcode 0x0f 0x23. */
1145FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1146{
1147 IEMOP_MNEMONIC("mov Dd,Rd");
1148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1150 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1151 return IEMOP_RAISE_INVALID_OPCODE();
1152 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1153 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1154 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1155}
1156
1157
1158/** Opcode 0x0f 0x24. */
1159FNIEMOP_DEF(iemOp_mov_Rd_Td)
1160{
1161 IEMOP_MNEMONIC("mov Rd,Td");
1162 /* The RM byte is not considered, see testcase. */
1163 return IEMOP_RAISE_INVALID_OPCODE();
1164}
1165
1166
1167/** Opcode 0x0f 0x26. */
1168FNIEMOP_DEF(iemOp_mov_Td_Rd)
1169{
1170 IEMOP_MNEMONIC("mov Td,Rd");
1171 /* The RM byte is not considered, see testcase. */
1172 return IEMOP_RAISE_INVALID_OPCODE();
1173}
1174
1175
1176/** Opcode 0x0f 0x28. */
1177FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1178/** Opcode 0x0f 0x29. */
1179FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1180/** Opcode 0x0f 0x2a. */
1181FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey);
1182/** Opcode 0x0f 0x2b. */
1183FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1184/** Opcode 0x0f 0x2c. */
1185FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd);
1186/** Opcode 0x0f 0x2d. */
1187FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1188/** Opcode 0x0f 0x2e. */
1189FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd);
1190/** Opcode 0x0f 0x2f. */
1191FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1192/** Opcode 0x0f 0x30. */
1193FNIEMOP_STUB(iemOp_wrmsr);
1194
1195
1196/** Opcode 0x0f 0x31. */
1197FNIEMOP_DEF(iemOp_rdtsc)
1198{
1199 IEMOP_MNEMONIC("rdtsc");
1200 IEMOP_HLP_NO_LOCK_PREFIX();
1201 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1202}
1203
1204
1205/** Opcode 0x0f 0x33. */
1206FNIEMOP_DEF(iemOp_rdmsr)
1207{
1208 IEMOP_MNEMONIC("rdmsr");
1209 IEMOP_HLP_NO_LOCK_PREFIX();
1210 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1211}
1212
1213
1214/** Opcode 0x0f 0x34. */
1215FNIEMOP_STUB(iemOp_rdpmc);
1216/** Opcode 0x0f 0x34. */
1217FNIEMOP_STUB(iemOp_sysenter);
1218/** Opcode 0x0f 0x35. */
1219FNIEMOP_STUB(iemOp_sysexit);
1220/** Opcode 0x0f 0x37. */
1221FNIEMOP_STUB(iemOp_getsec);
1222/** Opcode 0x0f 0x38. */
1223FNIEMOP_STUB(iemOp_3byte_Esc_A4);
1224/** Opcode 0x0f 0x39. */
1225FNIEMOP_STUB(iemOp_3byte_Esc_A5);
1226/** Opcode 0x0f 0x3c (?). */
1227FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1228
1229/**
1230 * Implements a conditional move.
1231 *
1232 * Wish there was an obvious way to do this where we could share and reduce
1233 * code bloat.
1234 *
1235 * @param a_Cnd The conditional "microcode" operation.
1236 */
1237#define CMOV_X(a_Cnd) \
1238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1240 { \
1241 switch (pIemCpu->enmEffOpSize) \
1242 { \
1243 case IEMMODE_16BIT: \
1244 IEM_MC_BEGIN(0, 1); \
1245 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1246 a_Cnd { \
1247 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1248 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1249 } IEM_MC_ENDIF(); \
1250 IEM_MC_ADVANCE_RIP(); \
1251 IEM_MC_END(); \
1252 return VINF_SUCCESS; \
1253 \
1254 case IEMMODE_32BIT: \
1255 IEM_MC_BEGIN(0, 1); \
1256 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1257 a_Cnd { \
1258 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1259 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1260 } IEM_MC_ELSE() { \
1261 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1262 } IEM_MC_ENDIF(); \
1263 IEM_MC_ADVANCE_RIP(); \
1264 IEM_MC_END(); \
1265 return VINF_SUCCESS; \
1266 \
1267 case IEMMODE_64BIT: \
1268 IEM_MC_BEGIN(0, 1); \
1269 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1270 a_Cnd { \
1271 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1272 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1273 } IEM_MC_ENDIF(); \
1274 IEM_MC_ADVANCE_RIP(); \
1275 IEM_MC_END(); \
1276 return VINF_SUCCESS; \
1277 \
1278 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1279 } \
1280 } \
1281 else \
1282 { \
1283 switch (pIemCpu->enmEffOpSize) \
1284 { \
1285 case IEMMODE_16BIT: \
1286 IEM_MC_BEGIN(0, 2); \
1287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1288 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1290 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1291 a_Cnd { \
1292 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1293 } IEM_MC_ENDIF(); \
1294 IEM_MC_ADVANCE_RIP(); \
1295 IEM_MC_END(); \
1296 return VINF_SUCCESS; \
1297 \
1298 case IEMMODE_32BIT: \
1299 IEM_MC_BEGIN(0, 2); \
1300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1301 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1303 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1304 a_Cnd { \
1305 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1306 } IEM_MC_ELSE() { \
1307 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1308 } IEM_MC_ENDIF(); \
1309 IEM_MC_ADVANCE_RIP(); \
1310 IEM_MC_END(); \
1311 return VINF_SUCCESS; \
1312 \
1313 case IEMMODE_64BIT: \
1314 IEM_MC_BEGIN(0, 2); \
1315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1316 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1318 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1319 a_Cnd { \
1320 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1321 } IEM_MC_ENDIF(); \
1322 IEM_MC_ADVANCE_RIP(); \
1323 IEM_MC_END(); \
1324 return VINF_SUCCESS; \
1325 \
1326 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1327 } \
1328 } do {} while (0)
1329
1330
1331
1332/** Opcode 0x0f 0x40. */
1333FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1334{
1335 IEMOP_MNEMONIC("cmovo Gv,Ev");
1336 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1337}
1338
1339
1340/** Opcode 0x0f 0x41. */
1341FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1342{
1343 IEMOP_MNEMONIC("cmovno Gv,Ev");
1344 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1345}
1346
1347
1348/** Opcode 0x0f 0x42. */
1349FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1350{
1351 IEMOP_MNEMONIC("cmovc Gv,Ev");
1352 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1353}
1354
1355
1356/** Opcode 0x0f 0x43. */
1357FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1358{
1359 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1360 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1361}
1362
1363
1364/** Opcode 0x0f 0x44. */
1365FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1366{
1367 IEMOP_MNEMONIC("cmove Gv,Ev");
1368 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1369}
1370
1371
1372/** Opcode 0x0f 0x45. */
1373FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1374{
1375 IEMOP_MNEMONIC("cmovne Gv,Ev");
1376 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1377}
1378
1379
1380/** Opcode 0x0f 0x46. */
1381FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1382{
1383 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1384 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1385}
1386
1387
1388/** Opcode 0x0f 0x47. */
1389FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1390{
1391 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1392 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1393}
1394
1395
1396/** Opcode 0x0f 0x48. */
1397FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1398{
1399 IEMOP_MNEMONIC("cmovs Gv,Ev");
1400 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1401}
1402
1403
1404/** Opcode 0x0f 0x49. */
1405FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1406{
1407 IEMOP_MNEMONIC("cmovns Gv,Ev");
1408 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1409}
1410
1411
1412/** Opcode 0x0f 0x4a. */
1413FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1414{
1415 IEMOP_MNEMONIC("cmovp Gv,Ev");
1416 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1417}
1418
1419
1420/** Opcode 0x0f 0x4b. */
1421FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1422{
1423 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1424 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1425}
1426
1427
1428/** Opcode 0x0f 0x4c. */
1429FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1430{
1431 IEMOP_MNEMONIC("cmovl Gv,Ev");
1432 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1433}
1434
1435
1436/** Opcode 0x0f 0x4d. */
1437FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1438{
1439 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1440 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1441}
1442
1443
1444/** Opcode 0x0f 0x4e. */
1445FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1446{
1447 IEMOP_MNEMONIC("cmovle Gv,Ev");
1448 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1449}
1450
1451
1452/** Opcode 0x0f 0x4f. */
1453FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1454{
1455 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1456 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1457}
1458
1459#undef CMOV_X
1460
1461/** Opcode 0x0f 0x50. */
1462FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1463/** Opcode 0x0f 0x51. */
1464FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1465/** Opcode 0x0f 0x52. */
1466FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1467/** Opcode 0x0f 0x53. */
1468FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1469/** Opcode 0x0f 0x54. */
1470FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1471/** Opcode 0x0f 0x55. */
1472FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1473/** Opcode 0x0f 0x56. */
1474FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1475/** Opcode 0x0f 0x57. */
1476FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1477/** Opcode 0x0f 0x58. */
1478FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd);
1479/** Opcode 0x0f 0x59. */
1480FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);
1481/** Opcode 0x0f 0x5a. */
1482FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1483/** Opcode 0x0f 0x5b. */
1484FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1485/** Opcode 0x0f 0x5c. */
1486FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1487/** Opcode 0x0f 0x5d. */
1488FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1489/** Opcode 0x0f 0x5e. */
1490FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1491/** Opcode 0x0f 0x5f. */
1492FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1493/** Opcode 0x0f 0x60. */
1494FNIEMOP_STUB(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq);
1495/** Opcode 0x0f 0x61. */
1496FNIEMOP_STUB(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq);
1497/** Opcode 0x0f 0x62. */
1498FNIEMOP_STUB(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq);
1499/** Opcode 0x0f 0x63. */
1500FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
1501/** Opcode 0x0f 0x64. */
1502FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
1503/** Opcode 0x0f 0x65. */
1504FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
1505/** Opcode 0x0f 0x66. */
1506FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
1507/** Opcode 0x0f 0x67. */
1508FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
1509/** Opcode 0x0f 0x68. */
1510FNIEMOP_STUB(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq);
1511/** Opcode 0x0f 0x69. */
1512FNIEMOP_STUB(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq);
1513/** Opcode 0x0f 0x6a. */
1514FNIEMOP_STUB(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq);
1515/** Opcode 0x0f 0x6b. */
1516FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
1517/** Opcode 0x0f 0x6c. */
1518FNIEMOP_STUB(iemOp_punpcklqdq_Vdq_Wdq);
1519/** Opcode 0x0f 0x6d. */
1520FNIEMOP_STUB(iemOp_punpckhqdq_Vdq_Wdq);
1521/** Opcode 0x0f 0x6e. */
1522FNIEMOP_STUB(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey);
1523/** Opcode 0x0f 0x6f. */
1524FNIEMOP_STUB(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq);
1525/** Opcode 0x0f 0x70. */
1526FNIEMOP_STUB(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib);
1527/** Opcode 0x0f 0x71. */
1528FNIEMOP_STUB(iemOp_Grp12);
1529/** Opcode 0x0f 0x72. */
1530FNIEMOP_STUB(iemOp_Grp13);
1531/** Opcode 0x0f 0x73. */
1532FNIEMOP_STUB(iemOp_Grp14);
1533/** Opcode 0x0f 0x74. */
1534FNIEMOP_STUB(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq);
1535/** Opcode 0x0f 0x75. */
1536FNIEMOP_STUB(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq);
1537/** Opcode 0x0f 0x76. */
1538FNIEMOP_STUB(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq);
1539/** Opcode 0x0f 0x77. */
1540FNIEMOP_STUB(iemOp_emms);
1541/** Opcode 0x0f 0x78. */
1542FNIEMOP_STUB(iemOp_vmread);
1543/** Opcode 0x0f 0x79. */
1544FNIEMOP_STUB(iemOp_vmwrite);
1545/** Opcode 0x0f 0x7c. */
1546FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
1547/** Opcode 0x0f 0x7d. */
1548FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
1549/** Opcode 0x0f 0x7e. */
1550FNIEMOP_STUB(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq);
1551/** Opcode 0x0f 0x7f. */
1552FNIEMOP_STUB(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq);
1553
1554
1555/** Opcode 0x0f 0x80. */
1556FNIEMOP_DEF(iemOp_jo_Jv)
1557{
1558 IEMOP_MNEMONIC("jo Jv");
1559 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1560 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1561 {
1562 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1563 IEMOP_HLP_NO_LOCK_PREFIX();
1564
1565 IEM_MC_BEGIN(0, 0);
1566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1567 IEM_MC_REL_JMP_S16(i16Imm);
1568 } IEM_MC_ELSE() {
1569 IEM_MC_ADVANCE_RIP();
1570 } IEM_MC_ENDIF();
1571 IEM_MC_END();
1572 }
1573 else
1574 {
1575 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1576 IEMOP_HLP_NO_LOCK_PREFIX();
1577
1578 IEM_MC_BEGIN(0, 0);
1579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1580 IEM_MC_REL_JMP_S32(i32Imm);
1581 } IEM_MC_ELSE() {
1582 IEM_MC_ADVANCE_RIP();
1583 } IEM_MC_ENDIF();
1584 IEM_MC_END();
1585 }
1586 return VINF_SUCCESS;
1587}
1588
1589
1590/** Opcode 0x0f 0x81. */
1591FNIEMOP_DEF(iemOp_jno_Jv)
1592{
1593 IEMOP_MNEMONIC("jno Jv");
1594 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1595 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1596 {
1597 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1598 IEMOP_HLP_NO_LOCK_PREFIX();
1599
1600 IEM_MC_BEGIN(0, 0);
1601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1602 IEM_MC_ADVANCE_RIP();
1603 } IEM_MC_ELSE() {
1604 IEM_MC_REL_JMP_S16(i16Imm);
1605 } IEM_MC_ENDIF();
1606 IEM_MC_END();
1607 }
1608 else
1609 {
1610 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1611 IEMOP_HLP_NO_LOCK_PREFIX();
1612
1613 IEM_MC_BEGIN(0, 0);
1614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1615 IEM_MC_ADVANCE_RIP();
1616 } IEM_MC_ELSE() {
1617 IEM_MC_REL_JMP_S32(i32Imm);
1618 } IEM_MC_ENDIF();
1619 IEM_MC_END();
1620 }
1621 return VINF_SUCCESS;
1622}
1623
1624
1625/** Opcode 0x0f 0x82. */
1626FNIEMOP_DEF(iemOp_jc_Jv)
1627{
1628 IEMOP_MNEMONIC("jc/jb/jnae Jv");
1629 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1630 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1631 {
1632 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1633 IEMOP_HLP_NO_LOCK_PREFIX();
1634
1635 IEM_MC_BEGIN(0, 0);
1636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1637 IEM_MC_REL_JMP_S16(i16Imm);
1638 } IEM_MC_ELSE() {
1639 IEM_MC_ADVANCE_RIP();
1640 } IEM_MC_ENDIF();
1641 IEM_MC_END();
1642 }
1643 else
1644 {
1645 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1646 IEMOP_HLP_NO_LOCK_PREFIX();
1647
1648 IEM_MC_BEGIN(0, 0);
1649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1650 IEM_MC_REL_JMP_S32(i32Imm);
1651 } IEM_MC_ELSE() {
1652 IEM_MC_ADVANCE_RIP();
1653 } IEM_MC_ENDIF();
1654 IEM_MC_END();
1655 }
1656 return VINF_SUCCESS;
1657}
1658
1659
1660/** Opcode 0x0f 0x83. */
1661FNIEMOP_DEF(iemOp_jnc_Jv)
1662{
1663 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
1664 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1665 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1666 {
1667 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1668 IEMOP_HLP_NO_LOCK_PREFIX();
1669
1670 IEM_MC_BEGIN(0, 0);
1671 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1672 IEM_MC_ADVANCE_RIP();
1673 } IEM_MC_ELSE() {
1674 IEM_MC_REL_JMP_S16(i16Imm);
1675 } IEM_MC_ENDIF();
1676 IEM_MC_END();
1677 }
1678 else
1679 {
1680 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1681 IEMOP_HLP_NO_LOCK_PREFIX();
1682
1683 IEM_MC_BEGIN(0, 0);
1684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1685 IEM_MC_ADVANCE_RIP();
1686 } IEM_MC_ELSE() {
1687 IEM_MC_REL_JMP_S32(i32Imm);
1688 } IEM_MC_ENDIF();
1689 IEM_MC_END();
1690 }
1691 return VINF_SUCCESS;
1692}
1693
1694
1695/** Opcode 0x0f 0x84. */
1696FNIEMOP_DEF(iemOp_je_Jv)
1697{
1698 IEMOP_MNEMONIC("je/jz Jv");
1699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1700 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1701 {
1702 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1703 IEMOP_HLP_NO_LOCK_PREFIX();
1704
1705 IEM_MC_BEGIN(0, 0);
1706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
1707 IEM_MC_REL_JMP_S16(i16Imm);
1708 } IEM_MC_ELSE() {
1709 IEM_MC_ADVANCE_RIP();
1710 } IEM_MC_ENDIF();
1711 IEM_MC_END();
1712 }
1713 else
1714 {
1715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1716 IEMOP_HLP_NO_LOCK_PREFIX();
1717
1718 IEM_MC_BEGIN(0, 0);
1719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
1720 IEM_MC_REL_JMP_S32(i32Imm);
1721 } IEM_MC_ELSE() {
1722 IEM_MC_ADVANCE_RIP();
1723 } IEM_MC_ENDIF();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x85. */
1731FNIEMOP_DEF(iemOp_jne_Jv)
1732{
1733 IEMOP_MNEMONIC("jne/jnz Jv");
1734 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1735 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1736 {
1737 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1738 IEMOP_HLP_NO_LOCK_PREFIX();
1739
1740 IEM_MC_BEGIN(0, 0);
1741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
1742 IEM_MC_ADVANCE_RIP();
1743 } IEM_MC_ELSE() {
1744 IEM_MC_REL_JMP_S16(i16Imm);
1745 } IEM_MC_ENDIF();
1746 IEM_MC_END();
1747 }
1748 else
1749 {
1750 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1751 IEMOP_HLP_NO_LOCK_PREFIX();
1752
1753 IEM_MC_BEGIN(0, 0);
1754 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
1755 IEM_MC_ADVANCE_RIP();
1756 } IEM_MC_ELSE() {
1757 IEM_MC_REL_JMP_S32(i32Imm);
1758 } IEM_MC_ENDIF();
1759 IEM_MC_END();
1760 }
1761 return VINF_SUCCESS;
1762}
1763
1764
1765/** Opcode 0x0f 0x86. */
1766FNIEMOP_DEF(iemOp_jbe_Jv)
1767{
1768 IEMOP_MNEMONIC("jbe/jna Jv");
1769 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1770 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1771 {
1772 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1773 IEMOP_HLP_NO_LOCK_PREFIX();
1774
1775 IEM_MC_BEGIN(0, 0);
1776 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
1777 IEM_MC_REL_JMP_S16(i16Imm);
1778 } IEM_MC_ELSE() {
1779 IEM_MC_ADVANCE_RIP();
1780 } IEM_MC_ENDIF();
1781 IEM_MC_END();
1782 }
1783 else
1784 {
1785 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1786 IEMOP_HLP_NO_LOCK_PREFIX();
1787
1788 IEM_MC_BEGIN(0, 0);
1789 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
1790 IEM_MC_REL_JMP_S32(i32Imm);
1791 } IEM_MC_ELSE() {
1792 IEM_MC_ADVANCE_RIP();
1793 } IEM_MC_ENDIF();
1794 IEM_MC_END();
1795 }
1796 return VINF_SUCCESS;
1797}
1798
1799
1800/** Opcode 0x0f 0x87. */
1801FNIEMOP_DEF(iemOp_jnbe_Jv)
1802{
1803 IEMOP_MNEMONIC("jnbe/ja Jv");
1804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1805 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1806 {
1807 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1808 IEMOP_HLP_NO_LOCK_PREFIX();
1809
1810 IEM_MC_BEGIN(0, 0);
1811 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
1812 IEM_MC_ADVANCE_RIP();
1813 } IEM_MC_ELSE() {
1814 IEM_MC_REL_JMP_S16(i16Imm);
1815 } IEM_MC_ENDIF();
1816 IEM_MC_END();
1817 }
1818 else
1819 {
1820 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1821 IEMOP_HLP_NO_LOCK_PREFIX();
1822
1823 IEM_MC_BEGIN(0, 0);
1824 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
1825 IEM_MC_ADVANCE_RIP();
1826 } IEM_MC_ELSE() {
1827 IEM_MC_REL_JMP_S32(i32Imm);
1828 } IEM_MC_ENDIF();
1829 IEM_MC_END();
1830 }
1831 return VINF_SUCCESS;
1832}
1833
1834
1835/** Opcode 0x0f 0x88. */
1836FNIEMOP_DEF(iemOp_js_Jv)
1837{
1838 IEMOP_MNEMONIC("js Jv");
1839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1840 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1841 {
1842 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1843 IEMOP_HLP_NO_LOCK_PREFIX();
1844
1845 IEM_MC_BEGIN(0, 0);
1846 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
1847 IEM_MC_REL_JMP_S16(i16Imm);
1848 } IEM_MC_ELSE() {
1849 IEM_MC_ADVANCE_RIP();
1850 } IEM_MC_ENDIF();
1851 IEM_MC_END();
1852 }
1853 else
1854 {
1855 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1856 IEMOP_HLP_NO_LOCK_PREFIX();
1857
1858 IEM_MC_BEGIN(0, 0);
1859 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
1860 IEM_MC_REL_JMP_S32(i32Imm);
1861 } IEM_MC_ELSE() {
1862 IEM_MC_ADVANCE_RIP();
1863 } IEM_MC_ENDIF();
1864 IEM_MC_END();
1865 }
1866 return VINF_SUCCESS;
1867}
1868
1869
1870/** Opcode 0x0f 0x89. */
1871FNIEMOP_DEF(iemOp_jns_Jv)
1872{
1873 IEMOP_MNEMONIC("jns Jv");
1874 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1875 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1876 {
1877 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1878 IEMOP_HLP_NO_LOCK_PREFIX();
1879
1880 IEM_MC_BEGIN(0, 0);
1881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
1882 IEM_MC_ADVANCE_RIP();
1883 } IEM_MC_ELSE() {
1884 IEM_MC_REL_JMP_S16(i16Imm);
1885 } IEM_MC_ENDIF();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1891 IEMOP_HLP_NO_LOCK_PREFIX();
1892
1893 IEM_MC_BEGIN(0, 0);
1894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
1895 IEM_MC_ADVANCE_RIP();
1896 } IEM_MC_ELSE() {
1897 IEM_MC_REL_JMP_S32(i32Imm);
1898 } IEM_MC_ENDIF();
1899 IEM_MC_END();
1900 }
1901 return VINF_SUCCESS;
1902}
1903
1904
1905/** Opcode 0x0f 0x8a. */
1906FNIEMOP_DEF(iemOp_jp_Jv)
1907{
1908 IEMOP_MNEMONIC("jp Jv");
1909 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1910 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1911 {
1912 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1913 IEMOP_HLP_NO_LOCK_PREFIX();
1914
1915 IEM_MC_BEGIN(0, 0);
1916 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
1917 IEM_MC_REL_JMP_S16(i16Imm);
1918 } IEM_MC_ELSE() {
1919 IEM_MC_ADVANCE_RIP();
1920 } IEM_MC_ENDIF();
1921 IEM_MC_END();
1922 }
1923 else
1924 {
1925 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1926 IEMOP_HLP_NO_LOCK_PREFIX();
1927
1928 IEM_MC_BEGIN(0, 0);
1929 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
1930 IEM_MC_REL_JMP_S32(i32Imm);
1931 } IEM_MC_ELSE() {
1932 IEM_MC_ADVANCE_RIP();
1933 } IEM_MC_ENDIF();
1934 IEM_MC_END();
1935 }
1936 return VINF_SUCCESS;
1937}
1938
1939
1940/** Opcode 0x0f 0x8b. */
1941FNIEMOP_DEF(iemOp_jnp_Jv)
1942{
1943 IEMOP_MNEMONIC("jo Jv");
1944 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1945 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1946 {
1947 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1948 IEMOP_HLP_NO_LOCK_PREFIX();
1949
1950 IEM_MC_BEGIN(0, 0);
1951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
1952 IEM_MC_ADVANCE_RIP();
1953 } IEM_MC_ELSE() {
1954 IEM_MC_REL_JMP_S16(i16Imm);
1955 } IEM_MC_ENDIF();
1956 IEM_MC_END();
1957 }
1958 else
1959 {
1960 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1961 IEMOP_HLP_NO_LOCK_PREFIX();
1962
1963 IEM_MC_BEGIN(0, 0);
1964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
1965 IEM_MC_ADVANCE_RIP();
1966 } IEM_MC_ELSE() {
1967 IEM_MC_REL_JMP_S32(i32Imm);
1968 } IEM_MC_ENDIF();
1969 IEM_MC_END();
1970 }
1971 return VINF_SUCCESS;
1972}
1973
1974
1975/** Opcode 0x0f 0x8c. */
1976FNIEMOP_DEF(iemOp_jl_Jv)
1977{
1978 IEMOP_MNEMONIC("jl/jnge Jv");
1979 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1980 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1981 {
1982 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
1983 IEMOP_HLP_NO_LOCK_PREFIX();
1984
1985 IEM_MC_BEGIN(0, 0);
1986 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
1987 IEM_MC_REL_JMP_S16(i16Imm);
1988 } IEM_MC_ELSE() {
1989 IEM_MC_ADVANCE_RIP();
1990 } IEM_MC_ENDIF();
1991 IEM_MC_END();
1992 }
1993 else
1994 {
1995 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
1996 IEMOP_HLP_NO_LOCK_PREFIX();
1997
1998 IEM_MC_BEGIN(0, 0);
1999 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2000 IEM_MC_REL_JMP_S32(i32Imm);
2001 } IEM_MC_ELSE() {
2002 IEM_MC_ADVANCE_RIP();
2003 } IEM_MC_ENDIF();
2004 IEM_MC_END();
2005 }
2006 return VINF_SUCCESS;
2007}
2008
2009
2010/** Opcode 0x0f 0x8d. */
2011FNIEMOP_DEF(iemOp_jnl_Jv)
2012{
2013 IEMOP_MNEMONIC("jnl/jge Jv");
2014 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2015 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2016 {
2017 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2018 IEMOP_HLP_NO_LOCK_PREFIX();
2019
2020 IEM_MC_BEGIN(0, 0);
2021 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2022 IEM_MC_ADVANCE_RIP();
2023 } IEM_MC_ELSE() {
2024 IEM_MC_REL_JMP_S16(i16Imm);
2025 } IEM_MC_ENDIF();
2026 IEM_MC_END();
2027 }
2028 else
2029 {
2030 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2031 IEMOP_HLP_NO_LOCK_PREFIX();
2032
2033 IEM_MC_BEGIN(0, 0);
2034 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2035 IEM_MC_ADVANCE_RIP();
2036 } IEM_MC_ELSE() {
2037 IEM_MC_REL_JMP_S32(i32Imm);
2038 } IEM_MC_ENDIF();
2039 IEM_MC_END();
2040 }
2041 return VINF_SUCCESS;
2042}
2043
2044
2045/** Opcode 0x0f 0x8e. */
2046FNIEMOP_DEF(iemOp_jle_Jv)
2047{
2048 IEMOP_MNEMONIC("jle/jng Jv");
2049 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2050 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2051 {
2052 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2053 IEMOP_HLP_NO_LOCK_PREFIX();
2054
2055 IEM_MC_BEGIN(0, 0);
2056 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2057 IEM_MC_REL_JMP_S16(i16Imm);
2058 } IEM_MC_ELSE() {
2059 IEM_MC_ADVANCE_RIP();
2060 } IEM_MC_ENDIF();
2061 IEM_MC_END();
2062 }
2063 else
2064 {
2065 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2066 IEMOP_HLP_NO_LOCK_PREFIX();
2067
2068 IEM_MC_BEGIN(0, 0);
2069 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2070 IEM_MC_REL_JMP_S32(i32Imm);
2071 } IEM_MC_ELSE() {
2072 IEM_MC_ADVANCE_RIP();
2073 } IEM_MC_ENDIF();
2074 IEM_MC_END();
2075 }
2076 return VINF_SUCCESS;
2077}
2078
2079
2080/** Opcode 0x0f 0x8f. */
2081FNIEMOP_DEF(iemOp_jnle_Jv)
2082{
2083 IEMOP_MNEMONIC("jnle/jg Jv");
2084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2085 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2086 {
2087 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2088 IEMOP_HLP_NO_LOCK_PREFIX();
2089
2090 IEM_MC_BEGIN(0, 0);
2091 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2092 IEM_MC_ADVANCE_RIP();
2093 } IEM_MC_ELSE() {
2094 IEM_MC_REL_JMP_S16(i16Imm);
2095 } IEM_MC_ENDIF();
2096 IEM_MC_END();
2097 }
2098 else
2099 {
2100 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2101 IEMOP_HLP_NO_LOCK_PREFIX();
2102
2103 IEM_MC_BEGIN(0, 0);
2104 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2105 IEM_MC_ADVANCE_RIP();
2106 } IEM_MC_ELSE() {
2107 IEM_MC_REL_JMP_S32(i32Imm);
2108 } IEM_MC_ENDIF();
2109 IEM_MC_END();
2110 }
2111 return VINF_SUCCESS;
2112}
2113
2114
2115/** Opcode 0x0f 0x90. */
2116FNIEMOP_DEF(iemOp_seto_Eb)
2117{
2118 IEMOP_MNEMONIC("seto Eb");
2119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2120 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2121
2122 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2123 * any way. AMD says it's "unused", whatever that means. We're
2124 * ignoring for now. */
2125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2126 {
2127 /* register target */
2128 IEM_MC_BEGIN(0, 0);
2129 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2130 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2131 } IEM_MC_ELSE() {
2132 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2133 } IEM_MC_ENDIF();
2134 IEM_MC_ADVANCE_RIP();
2135 IEM_MC_END();
2136 }
2137 else
2138 {
2139 /* memory target */
2140 IEM_MC_BEGIN(0, 1);
2141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2143 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2144 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2145 } IEM_MC_ELSE() {
2146 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2147 } IEM_MC_ENDIF();
2148 IEM_MC_ADVANCE_RIP();
2149 IEM_MC_END();
2150 }
2151 return VINF_SUCCESS;
2152}
2153
2154
2155/** Opcode 0x0f 0x91. */
2156FNIEMOP_DEF(iemOp_setno_Eb)
2157{
2158 IEMOP_MNEMONIC("setno Eb");
2159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2160 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2161
2162 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2163 * any way. AMD says it's "unused", whatever that means. We're
2164 * ignoring for now. */
2165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2166 {
2167 /* register target */
2168 IEM_MC_BEGIN(0, 0);
2169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2170 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2171 } IEM_MC_ELSE() {
2172 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2173 } IEM_MC_ENDIF();
2174 IEM_MC_ADVANCE_RIP();
2175 IEM_MC_END();
2176 }
2177 else
2178 {
2179 /* memory target */
2180 IEM_MC_BEGIN(0, 1);
2181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2183 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2184 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2185 } IEM_MC_ELSE() {
2186 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2187 } IEM_MC_ENDIF();
2188 IEM_MC_ADVANCE_RIP();
2189 IEM_MC_END();
2190 }
2191 return VINF_SUCCESS;
2192}
2193
2194
2195/** Opcode 0x0f 0x92. */
2196FNIEMOP_DEF(iemOp_setc_Eb)
2197{
2198 IEMOP_MNEMONIC("setc Eb");
2199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2200 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2201
2202 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2203 * any way. AMD says it's "unused", whatever that means. We're
2204 * ignoring for now. */
2205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2206 {
2207 /* register target */
2208 IEM_MC_BEGIN(0, 0);
2209 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2210 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2211 } IEM_MC_ELSE() {
2212 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2213 } IEM_MC_ENDIF();
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 }
2217 else
2218 {
2219 /* memory target */
2220 IEM_MC_BEGIN(0, 1);
2221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2223 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2224 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2225 } IEM_MC_ELSE() {
2226 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2227 } IEM_MC_ENDIF();
2228 IEM_MC_ADVANCE_RIP();
2229 IEM_MC_END();
2230 }
2231 return VINF_SUCCESS;
2232}
2233
2234
2235/** Opcode 0x0f 0x93. */
2236FNIEMOP_DEF(iemOp_setnc_Eb)
2237{
2238 IEMOP_MNEMONIC("setnc Eb");
2239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2240 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2241
2242 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2243 * any way. AMD says it's "unused", whatever that means. We're
2244 * ignoring for now. */
2245 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2246 {
2247 /* register target */
2248 IEM_MC_BEGIN(0, 0);
2249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2250 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2251 } IEM_MC_ELSE() {
2252 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2253 } IEM_MC_ENDIF();
2254 IEM_MC_ADVANCE_RIP();
2255 IEM_MC_END();
2256 }
2257 else
2258 {
2259 /* memory target */
2260 IEM_MC_BEGIN(0, 1);
2261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2263 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2264 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2265 } IEM_MC_ELSE() {
2266 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2267 } IEM_MC_ENDIF();
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 return VINF_SUCCESS;
2272}
2273
2274
2275/** Opcode 0x0f 0x94. */
2276FNIEMOP_DEF(iemOp_sete_Eb)
2277{
2278 IEMOP_MNEMONIC("sete Eb");
2279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2280 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2281
2282 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2283 * any way. AMD says it's "unused", whatever that means. We're
2284 * ignoring for now. */
2285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2286 {
2287 /* register target */
2288 IEM_MC_BEGIN(0, 0);
2289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2290 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2291 } IEM_MC_ELSE() {
2292 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2293 } IEM_MC_ENDIF();
2294 IEM_MC_ADVANCE_RIP();
2295 IEM_MC_END();
2296 }
2297 else
2298 {
2299 /* memory target */
2300 IEM_MC_BEGIN(0, 1);
2301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2304 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2305 } IEM_MC_ELSE() {
2306 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2307 } IEM_MC_ENDIF();
2308 IEM_MC_ADVANCE_RIP();
2309 IEM_MC_END();
2310 }
2311 return VINF_SUCCESS;
2312}
2313
2314
2315/** Opcode 0x0f 0x95. */
2316FNIEMOP_DEF(iemOp_setne_Eb)
2317{
2318 IEMOP_MNEMONIC("setne Eb");
2319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2320 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2321
2322 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2323 * any way. AMD says it's "unused", whatever that means. We're
2324 * ignoring for now. */
2325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2326 {
2327 /* register target */
2328 IEM_MC_BEGIN(0, 0);
2329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2330 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2331 } IEM_MC_ELSE() {
2332 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2333 } IEM_MC_ENDIF();
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 else
2338 {
2339 /* memory target */
2340 IEM_MC_BEGIN(0, 1);
2341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2344 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2345 } IEM_MC_ELSE() {
2346 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2347 } IEM_MC_ENDIF();
2348 IEM_MC_ADVANCE_RIP();
2349 IEM_MC_END();
2350 }
2351 return VINF_SUCCESS;
2352}
2353
2354
2355/** Opcode 0x0f 0x96. */
2356FNIEMOP_DEF(iemOp_setbe_Eb)
2357{
2358 IEMOP_MNEMONIC("setbe Eb");
2359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2360 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2361
2362 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2363 * any way. AMD says it's "unused", whatever that means. We're
2364 * ignoring for now. */
2365 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2366 {
2367 /* register target */
2368 IEM_MC_BEGIN(0, 0);
2369 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2370 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2371 } IEM_MC_ELSE() {
2372 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2373 } IEM_MC_ENDIF();
2374 IEM_MC_ADVANCE_RIP();
2375 IEM_MC_END();
2376 }
2377 else
2378 {
2379 /* memory target */
2380 IEM_MC_BEGIN(0, 1);
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2383 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2384 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2385 } IEM_MC_ELSE() {
2386 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2387 } IEM_MC_ENDIF();
2388 IEM_MC_ADVANCE_RIP();
2389 IEM_MC_END();
2390 }
2391 return VINF_SUCCESS;
2392}
2393
2394
2395/** Opcode 0x0f 0x97. */
2396FNIEMOP_DEF(iemOp_setnbe_Eb)
2397{
2398 IEMOP_MNEMONIC("setnbe Eb");
2399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2400 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2401
2402 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2403 * any way. AMD says it's "unused", whatever that means. We're
2404 * ignoring for now. */
2405 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2406 {
2407 /* register target */
2408 IEM_MC_BEGIN(0, 0);
2409 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2410 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2411 } IEM_MC_ELSE() {
2412 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2413 } IEM_MC_ENDIF();
2414 IEM_MC_ADVANCE_RIP();
2415 IEM_MC_END();
2416 }
2417 else
2418 {
2419 /* memory target */
2420 IEM_MC_BEGIN(0, 1);
2421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2423 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2424 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2425 } IEM_MC_ELSE() {
2426 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2427 } IEM_MC_ENDIF();
2428 IEM_MC_ADVANCE_RIP();
2429 IEM_MC_END();
2430 }
2431 return VINF_SUCCESS;
2432}
2433
2434
2435/** Opcode 0x0f 0x98. */
2436FNIEMOP_DEF(iemOp_sets_Eb)
2437{
2438 IEMOP_MNEMONIC("sets Eb");
2439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2440 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2441
2442 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2443 * any way. AMD says it's "unused", whatever that means. We're
2444 * ignoring for now. */
2445 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2446 {
2447 /* register target */
2448 IEM_MC_BEGIN(0, 0);
2449 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2450 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2451 } IEM_MC_ELSE() {
2452 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2453 } IEM_MC_ENDIF();
2454 IEM_MC_ADVANCE_RIP();
2455 IEM_MC_END();
2456 }
2457 else
2458 {
2459 /* memory target */
2460 IEM_MC_BEGIN(0, 1);
2461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2463 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2464 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2465 } IEM_MC_ELSE() {
2466 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2467 } IEM_MC_ENDIF();
2468 IEM_MC_ADVANCE_RIP();
2469 IEM_MC_END();
2470 }
2471 return VINF_SUCCESS;
2472}
2473
2474
2475/** Opcode 0x0f 0x99. */
2476FNIEMOP_DEF(iemOp_setns_Eb)
2477{
2478 IEMOP_MNEMONIC("setns Eb");
2479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2480 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2481
2482 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2483 * any way. AMD says it's "unused", whatever that means. We're
2484 * ignoring for now. */
2485 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2486 {
2487 /* register target */
2488 IEM_MC_BEGIN(0, 0);
2489 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2490 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2491 } IEM_MC_ELSE() {
2492 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2493 } IEM_MC_ENDIF();
2494 IEM_MC_ADVANCE_RIP();
2495 IEM_MC_END();
2496 }
2497 else
2498 {
2499 /* memory target */
2500 IEM_MC_BEGIN(0, 1);
2501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2503 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2504 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2505 } IEM_MC_ELSE() {
2506 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2507 } IEM_MC_ENDIF();
2508 IEM_MC_ADVANCE_RIP();
2509 IEM_MC_END();
2510 }
2511 return VINF_SUCCESS;
2512}
2513
2514
2515/** Opcode 0x0f 0x9a. */
2516FNIEMOP_DEF(iemOp_setp_Eb)
2517{
2518 IEMOP_MNEMONIC("setnp Eb");
2519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2520 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2521
2522 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2523 * any way. AMD says it's "unused", whatever that means. We're
2524 * ignoring for now. */
2525 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2526 {
2527 /* register target */
2528 IEM_MC_BEGIN(0, 0);
2529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2530 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2531 } IEM_MC_ELSE() {
2532 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2533 } IEM_MC_ENDIF();
2534 IEM_MC_ADVANCE_RIP();
2535 IEM_MC_END();
2536 }
2537 else
2538 {
2539 /* memory target */
2540 IEM_MC_BEGIN(0, 1);
2541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2543 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2544 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2545 } IEM_MC_ELSE() {
2546 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2547 } IEM_MC_ENDIF();
2548 IEM_MC_ADVANCE_RIP();
2549 IEM_MC_END();
2550 }
2551 return VINF_SUCCESS;
2552}
2553
2554
2555/** Opcode 0x0f 0x9b. */
2556FNIEMOP_DEF(iemOp_setnp_Eb)
2557{
2558 IEMOP_MNEMONIC("setnp Eb");
2559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2560 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2561
2562 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2563 * any way. AMD says it's "unused", whatever that means. We're
2564 * ignoring for now. */
2565 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2566 {
2567 /* register target */
2568 IEM_MC_BEGIN(0, 0);
2569 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2570 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2571 } IEM_MC_ELSE() {
2572 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2573 } IEM_MC_ENDIF();
2574 IEM_MC_ADVANCE_RIP();
2575 IEM_MC_END();
2576 }
2577 else
2578 {
2579 /* memory target */
2580 IEM_MC_BEGIN(0, 1);
2581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2583 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2584 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2585 } IEM_MC_ELSE() {
2586 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2587 } IEM_MC_ENDIF();
2588 IEM_MC_ADVANCE_RIP();
2589 IEM_MC_END();
2590 }
2591 return VINF_SUCCESS;
2592}
2593
2594
2595/** Opcode 0x0f 0x9c. */
2596FNIEMOP_DEF(iemOp_setl_Eb)
2597{
2598 IEMOP_MNEMONIC("setl Eb");
2599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2600 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2601
2602 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2603 * any way. AMD says it's "unused", whatever that means. We're
2604 * ignoring for now. */
2605 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2606 {
2607 /* register target */
2608 IEM_MC_BEGIN(0, 0);
2609 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2610 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2611 } IEM_MC_ELSE() {
2612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2613 } IEM_MC_ENDIF();
2614 IEM_MC_ADVANCE_RIP();
2615 IEM_MC_END();
2616 }
2617 else
2618 {
2619 /* memory target */
2620 IEM_MC_BEGIN(0, 1);
2621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2623 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2624 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2625 } IEM_MC_ELSE() {
2626 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2627 } IEM_MC_ENDIF();
2628 IEM_MC_ADVANCE_RIP();
2629 IEM_MC_END();
2630 }
2631 return VINF_SUCCESS;
2632}
2633
2634
2635/** Opcode 0x0f 0x9d. */
2636FNIEMOP_DEF(iemOp_setnl_Eb)
2637{
2638 IEMOP_MNEMONIC("setnl Eb");
2639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2640 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2641
2642 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2643 * any way. AMD says it's "unused", whatever that means. We're
2644 * ignoring for now. */
2645 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2646 {
2647 /* register target */
2648 IEM_MC_BEGIN(0, 0);
2649 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2650 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2651 } IEM_MC_ELSE() {
2652 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2653 } IEM_MC_ENDIF();
2654 IEM_MC_ADVANCE_RIP();
2655 IEM_MC_END();
2656 }
2657 else
2658 {
2659 /* memory target */
2660 IEM_MC_BEGIN(0, 1);
2661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2663 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2664 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2665 } IEM_MC_ELSE() {
2666 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2667 } IEM_MC_ENDIF();
2668 IEM_MC_ADVANCE_RIP();
2669 IEM_MC_END();
2670 }
2671 return VINF_SUCCESS;
2672}
2673
2674
2675/** Opcode 0x0f 0x9e. */
2676FNIEMOP_DEF(iemOp_setle_Eb)
2677{
2678 IEMOP_MNEMONIC("setle Eb");
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2681
2682 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2683 * any way. AMD says it's "unused", whatever that means. We're
2684 * ignoring for now. */
2685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2686 {
2687 /* register target */
2688 IEM_MC_BEGIN(0, 0);
2689 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2690 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2691 } IEM_MC_ELSE() {
2692 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2693 } IEM_MC_ENDIF();
2694 IEM_MC_ADVANCE_RIP();
2695 IEM_MC_END();
2696 }
2697 else
2698 {
2699 /* memory target */
2700 IEM_MC_BEGIN(0, 1);
2701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2703 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2704 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2705 } IEM_MC_ELSE() {
2706 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2707 } IEM_MC_ENDIF();
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 return VINF_SUCCESS;
2712}
2713
2714
2715/** Opcode 0x0f 0x9f. */
2716FNIEMOP_DEF(iemOp_setnle_Eb)
2717{
2718 IEMOP_MNEMONIC("setnle Eb");
2719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2720 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2721
2722 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2723 * any way. AMD says it's "unused", whatever that means. We're
2724 * ignoring for now. */
2725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2726 {
2727 /* register target */
2728 IEM_MC_BEGIN(0, 0);
2729 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2730 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2731 } IEM_MC_ELSE() {
2732 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2733 } IEM_MC_ENDIF();
2734 IEM_MC_ADVANCE_RIP();
2735 IEM_MC_END();
2736 }
2737 else
2738 {
2739 /* memory target */
2740 IEM_MC_BEGIN(0, 1);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2743 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2744 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2745 } IEM_MC_ELSE() {
2746 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2747 } IEM_MC_ENDIF();
2748 IEM_MC_ADVANCE_RIP();
2749 IEM_MC_END();
2750 }
2751 return VINF_SUCCESS;
2752}
2753
2754
2755/**
2756 * Common 'push segment-register' helper.
2757 */
2758FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
2759{
2760 IEMOP_HLP_NO_LOCK_PREFIX();
2761 if (iReg < X86_SREG_FS)
2762 IEMOP_HLP_NO_64BIT();
2763 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2764
2765 switch (pIemCpu->enmEffOpSize)
2766 {
2767 case IEMMODE_16BIT:
2768 IEM_MC_BEGIN(0, 1);
2769 IEM_MC_LOCAL(uint16_t, u16Value);
2770 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
2771 IEM_MC_PUSH_U16(u16Value);
2772 IEM_MC_ADVANCE_RIP();
2773 IEM_MC_END();
2774 break;
2775
2776 case IEMMODE_32BIT:
2777 IEM_MC_BEGIN(0, 1);
2778 IEM_MC_LOCAL(uint32_t, u32Value);
2779 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
2780 IEM_MC_PUSH_U32(u32Value);
2781 IEM_MC_ADVANCE_RIP();
2782 IEM_MC_END();
2783 break;
2784
2785 case IEMMODE_64BIT:
2786 IEM_MC_BEGIN(0, 1);
2787 IEM_MC_LOCAL(uint64_t, u64Value);
2788 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
2789 IEM_MC_PUSH_U64(u64Value);
2790 IEM_MC_ADVANCE_RIP();
2791 IEM_MC_END();
2792 break;
2793 }
2794
2795 return VINF_SUCCESS;
2796}
2797
2798
2799/** Opcode 0x0f 0xa0. */
2800FNIEMOP_DEF(iemOp_push_fs)
2801{
2802 IEMOP_MNEMONIC("push fs");
2803 IEMOP_HLP_NO_LOCK_PREFIX();
2804 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
2805}
2806
2807
2808/** Opcode 0x0f 0xa1. */
2809FNIEMOP_DEF(iemOp_pop_fs)
2810{
2811 IEMOP_MNEMONIC("pop fs");
2812 IEMOP_HLP_NO_LOCK_PREFIX();
2813 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
2814}
2815
2816
2817/** Opcode 0x0f 0xa2. */
2818FNIEMOP_DEF(iemOp_cpuid)
2819{
2820 IEMOP_MNEMONIC("cpuid");
2821 IEMOP_HLP_NO_LOCK_PREFIX();
2822 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
2823}
2824
2825
2826/**
2827 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
2828 * iemOp_bts_Ev_Gv.
2829 */
2830FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
2831{
2832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2833 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2834
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /* register destination. */
2838 IEMOP_HLP_NO_LOCK_PREFIX();
2839 switch (pIemCpu->enmEffOpSize)
2840 {
2841 case IEMMODE_16BIT:
2842 IEM_MC_BEGIN(3, 0);
2843 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2844 IEM_MC_ARG(uint16_t, u16Src, 1);
2845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2846
2847 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2848 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
2849 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2850 IEM_MC_REF_EFLAGS(pEFlags);
2851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2852
2853 IEM_MC_ADVANCE_RIP();
2854 IEM_MC_END();
2855 return VINF_SUCCESS;
2856
2857 case IEMMODE_32BIT:
2858 IEM_MC_BEGIN(3, 0);
2859 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2860 IEM_MC_ARG(uint32_t, u32Src, 1);
2861 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2862
2863 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2864 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
2865 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2866 IEM_MC_REF_EFLAGS(pEFlags);
2867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2868
2869 IEM_MC_ADVANCE_RIP();
2870 IEM_MC_END();
2871 return VINF_SUCCESS;
2872
2873 case IEMMODE_64BIT:
2874 IEM_MC_BEGIN(3, 0);
2875 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2876 IEM_MC_ARG(uint64_t, u64Src, 1);
2877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2878
2879 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2880 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
2881 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2882 IEM_MC_REF_EFLAGS(pEFlags);
2883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2884
2885 IEM_MC_ADVANCE_RIP();
2886 IEM_MC_END();
2887 return VINF_SUCCESS;
2888
2889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2890 }
2891 }
2892 else
2893 {
2894 /* memory destination. */
2895
2896 uint32_t fAccess;
2897 if (pImpl->pfnLockedU16)
2898 fAccess = IEM_ACCESS_DATA_RW;
2899 else /* BT */
2900 {
2901 IEMOP_HLP_NO_LOCK_PREFIX();
2902 fAccess = IEM_ACCESS_DATA_R;
2903 }
2904
2905 /** @todo test negative bit offsets! */
2906 switch (pIemCpu->enmEffOpSize)
2907 {
2908 case IEMMODE_16BIT:
2909 IEM_MC_BEGIN(3, 2);
2910 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2911 IEM_MC_ARG(uint16_t, u16Src, 1);
2912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2914 IEM_MC_LOCAL(int16_t, i16AddrAdj);
2915
2916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2917 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2918 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
2919 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
2920 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
2921 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
2922 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
2923 IEM_MC_FETCH_EFLAGS(EFlags);
2924
2925 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
2926 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
2927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2928 else
2929 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2930 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2931
2932 IEM_MC_COMMIT_EFLAGS(EFlags);
2933 IEM_MC_ADVANCE_RIP();
2934 IEM_MC_END();
2935 return VINF_SUCCESS;
2936
2937 case IEMMODE_32BIT:
2938 IEM_MC_BEGIN(3, 2);
2939 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2940 IEM_MC_ARG(uint32_t, u32Src, 1);
2941 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2943 IEM_MC_LOCAL(int32_t, i32AddrAdj);
2944
2945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2946 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2947 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
2948 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
2949 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
2950 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
2951 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
2952 IEM_MC_FETCH_EFLAGS(EFlags);
2953
2954 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
2955 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
2956 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2957 else
2958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2959 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
2960
2961 IEM_MC_COMMIT_EFLAGS(EFlags);
2962 IEM_MC_ADVANCE_RIP();
2963 IEM_MC_END();
2964 return VINF_SUCCESS;
2965
2966 case IEMMODE_64BIT:
2967 IEM_MC_BEGIN(3, 2);
2968 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2969 IEM_MC_ARG(uint64_t, u64Src, 1);
2970 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2972 IEM_MC_LOCAL(int64_t, i64AddrAdj);
2973
2974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2975 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2976 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
2977 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
2978 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
2979 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
2980 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
2981 IEM_MC_FETCH_EFLAGS(EFlags);
2982
2983 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
2984 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
2985 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2986 else
2987 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2988 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
2989
2990 IEM_MC_COMMIT_EFLAGS(EFlags);
2991 IEM_MC_ADVANCE_RIP();
2992 IEM_MC_END();
2993 return VINF_SUCCESS;
2994
2995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2996 }
2997 }
2998}
2999
3000
3001/** Opcode 0x0f 0xa3. */
3002FNIEMOP_DEF(iemOp_bt_Ev_Gv)
3003{
3004 IEMOP_MNEMONIC("bt Gv,Gv");
3005 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
3006}
3007
3008
3009/**
3010 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
3011 */
3012FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
3013{
3014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3015 IEMOP_HLP_NO_LOCK_PREFIX();
3016 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3017
3018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3019 {
3020 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3021 IEMOP_HLP_NO_LOCK_PREFIX();
3022
3023 switch (pIemCpu->enmEffOpSize)
3024 {
3025 case IEMMODE_16BIT:
3026 IEM_MC_BEGIN(4, 0);
3027 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3028 IEM_MC_ARG(uint16_t, u16Src, 1);
3029 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3030 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3031
3032 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3033 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3034 IEM_MC_REF_EFLAGS(pEFlags);
3035 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3036
3037 IEM_MC_ADVANCE_RIP();
3038 IEM_MC_END();
3039 return VINF_SUCCESS;
3040
3041 case IEMMODE_32BIT:
3042 IEM_MC_BEGIN(4, 0);
3043 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3044 IEM_MC_ARG(uint32_t, u32Src, 1);
3045 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3046 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3047
3048 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3049 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3050 IEM_MC_REF_EFLAGS(pEFlags);
3051 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3052
3053 IEM_MC_ADVANCE_RIP();
3054 IEM_MC_END();
3055 return VINF_SUCCESS;
3056
3057 case IEMMODE_64BIT:
3058 IEM_MC_BEGIN(4, 0);
3059 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3060 IEM_MC_ARG(uint64_t, u64Src, 1);
3061 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3062 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3063
3064 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3065 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3066 IEM_MC_REF_EFLAGS(pEFlags);
3067 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3068
3069 IEM_MC_ADVANCE_RIP();
3070 IEM_MC_END();
3071 return VINF_SUCCESS;
3072
3073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3074 }
3075 }
3076 else
3077 {
3078 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3079
3080 switch (pIemCpu->enmEffOpSize)
3081 {
3082 case IEMMODE_16BIT:
3083 IEM_MC_BEGIN(4, 2);
3084 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3085 IEM_MC_ARG(uint16_t, u16Src, 1);
3086 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3087 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3089
3090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3091 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3092 IEM_MC_ASSIGN(cShiftArg, cShift);
3093 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3094 IEM_MC_FETCH_EFLAGS(EFlags);
3095 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3096 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3097
3098 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3099 IEM_MC_COMMIT_EFLAGS(EFlags);
3100 IEM_MC_ADVANCE_RIP();
3101 IEM_MC_END();
3102 return VINF_SUCCESS;
3103
3104 case IEMMODE_32BIT:
3105 IEM_MC_BEGIN(4, 2);
3106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3107 IEM_MC_ARG(uint32_t, u32Src, 1);
3108 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3109 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3111
3112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3113 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3114 IEM_MC_ASSIGN(cShiftArg, cShift);
3115 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3116 IEM_MC_FETCH_EFLAGS(EFlags);
3117 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3118 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3119
3120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3121 IEM_MC_COMMIT_EFLAGS(EFlags);
3122 IEM_MC_ADVANCE_RIP();
3123 IEM_MC_END();
3124 return VINF_SUCCESS;
3125
3126 case IEMMODE_64BIT:
3127 IEM_MC_BEGIN(4, 2);
3128 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3129 IEM_MC_ARG(uint64_t, u64Src, 1);
3130 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3131 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3133
3134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3135 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3136 IEM_MC_ASSIGN(cShiftArg, cShift);
3137 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3138 IEM_MC_FETCH_EFLAGS(EFlags);
3139 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3140 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3141
3142 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3143 IEM_MC_COMMIT_EFLAGS(EFlags);
3144 IEM_MC_ADVANCE_RIP();
3145 IEM_MC_END();
3146 return VINF_SUCCESS;
3147
3148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3149 }
3150 }
3151}
3152
3153
3154/**
3155 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
3156 */
3157FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
3158{
3159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3160 IEMOP_HLP_NO_LOCK_PREFIX();
3161 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3162
3163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3164 {
3165 IEMOP_HLP_NO_LOCK_PREFIX();
3166
3167 switch (pIemCpu->enmEffOpSize)
3168 {
3169 case IEMMODE_16BIT:
3170 IEM_MC_BEGIN(4, 0);
3171 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3172 IEM_MC_ARG(uint16_t, u16Src, 1);
3173 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3174 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3175
3176 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3177 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3178 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3179 IEM_MC_REF_EFLAGS(pEFlags);
3180 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3181
3182 IEM_MC_ADVANCE_RIP();
3183 IEM_MC_END();
3184 return VINF_SUCCESS;
3185
3186 case IEMMODE_32BIT:
3187 IEM_MC_BEGIN(4, 0);
3188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3189 IEM_MC_ARG(uint32_t, u32Src, 1);
3190 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3191 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3192
3193 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3194 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3195 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3196 IEM_MC_REF_EFLAGS(pEFlags);
3197 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3198
3199 IEM_MC_ADVANCE_RIP();
3200 IEM_MC_END();
3201 return VINF_SUCCESS;
3202
3203 case IEMMODE_64BIT:
3204 IEM_MC_BEGIN(4, 0);
3205 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3206 IEM_MC_ARG(uint64_t, u64Src, 1);
3207 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3208 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3209
3210 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3211 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3212 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3213 IEM_MC_REF_EFLAGS(pEFlags);
3214 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3215
3216 IEM_MC_ADVANCE_RIP();
3217 IEM_MC_END();
3218 return VINF_SUCCESS;
3219
3220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3221 }
3222 }
3223 else
3224 {
3225 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3226
3227 switch (pIemCpu->enmEffOpSize)
3228 {
3229 case IEMMODE_16BIT:
3230 IEM_MC_BEGIN(4, 2);
3231 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3232 IEM_MC_ARG(uint16_t, u16Src, 1);
3233 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3234 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3236
3237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3238 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3239 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3240 IEM_MC_FETCH_EFLAGS(EFlags);
3241 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3242 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3243
3244 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3245 IEM_MC_COMMIT_EFLAGS(EFlags);
3246 IEM_MC_ADVANCE_RIP();
3247 IEM_MC_END();
3248 return VINF_SUCCESS;
3249
3250 case IEMMODE_32BIT:
3251 IEM_MC_BEGIN(4, 2);
3252 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3253 IEM_MC_ARG(uint32_t, u32Src, 1);
3254 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3255 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3257
3258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3259 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3260 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3261 IEM_MC_FETCH_EFLAGS(EFlags);
3262 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3264
3265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3266 IEM_MC_COMMIT_EFLAGS(EFlags);
3267 IEM_MC_ADVANCE_RIP();
3268 IEM_MC_END();
3269 return VINF_SUCCESS;
3270
3271 case IEMMODE_64BIT:
3272 IEM_MC_BEGIN(4, 2);
3273 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3274 IEM_MC_ARG(uint64_t, u64Src, 1);
3275 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3278
3279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3280 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3281 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3282 IEM_MC_FETCH_EFLAGS(EFlags);
3283 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3284 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3285
3286 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3287 IEM_MC_COMMIT_EFLAGS(EFlags);
3288 IEM_MC_ADVANCE_RIP();
3289 IEM_MC_END();
3290 return VINF_SUCCESS;
3291
3292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3293 }
3294 }
3295}
3296
3297
3298
3299/** Opcode 0x0f 0xa4. */
3300FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
3301{
3302 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
3303 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
3304}
3305
3306
3307/** Opcode 0x0f 0xa7. */
3308FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
3309{
3310 IEMOP_MNEMONIC("shld Ev,Gv,CL");
3311 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
3312}
3313
3314
3315/** Opcode 0x0f 0xa8. */
3316FNIEMOP_DEF(iemOp_push_gs)
3317{
3318 IEMOP_MNEMONIC("push gs");
3319 IEMOP_HLP_NO_LOCK_PREFIX();
3320 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
3321}
3322
3323
3324/** Opcode 0x0f 0xa9. */
3325FNIEMOP_DEF(iemOp_pop_gs)
3326{
3327 IEMOP_MNEMONIC("pop gs");
3328 IEMOP_HLP_NO_LOCK_PREFIX();
3329 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
3330}
3331
3332
3333/** Opcode 0x0f 0xaa. */
3334FNIEMOP_STUB(iemOp_rsm);
3335
3336
3337/** Opcode 0x0f 0xab. */
3338FNIEMOP_DEF(iemOp_bts_Ev_Gv)
3339{
3340 IEMOP_MNEMONIC("bts Ev,Gv");
3341 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
3342}
3343
3344
3345/** Opcode 0x0f 0xac. */
3346FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
3347{
3348 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
3349 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
3350}
3351
3352
3353/** Opcode 0x0f 0xad. */
3354FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
3355{
3356 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
3357 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
3358}
3359
3360
3361/** Opcode 0x0f 0xae mem/0. */
3362FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
3363{
3364 IEMOP_MNEMONIC("fxsave m512");
3365 IEMOP_HLP_NO_LOCK_PREFIX();
3366 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3367 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3368
3369 IEM_MC_BEGIN(3, 1);
3370 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3371 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3372 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3374 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
3375 IEM_MC_END();
3376 return VINF_SUCCESS;
3377}
3378
3379
3380/** Opcode 0x0f 0xae mem/1. */
3381FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
3382{
3383 IEMOP_MNEMONIC("fxrstor m512");
3384 IEMOP_HLP_NO_LOCK_PREFIX();
3385 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3386 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3387
3388 IEM_MC_BEGIN(3, 1);
3389 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3390 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3391 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3393 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
3394 IEM_MC_END();
3395 return VINF_SUCCESS;
3396}
3397
3398
3399/** Opcode 0x0f 0xae mem/2. */
3400FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
3401
3402/** Opcode 0x0f 0xae mem/3. */
3403FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
3404
3405/** Opcode 0x0f 0xae mem/4. */
3406FNIEMOP_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
3407
3408/** Opcode 0x0f 0xae mem/5. */
3409FNIEMOP_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
3410
3411/** Opcode 0x0f 0xae mem/6. */
3412FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
3413
3414/** Opcode 0x0f 0xae mem/7. */
3415FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
3416
3417/** Opcode 0x0f 0xae 11b/5. */
3418FNIEMOP_STUB_1(iemOp_Grp15_lfence, uint8_t, bRm);
3419
3420/** Opcode 0x0f 0xae 11b/6. */
3421FNIEMOP_STUB_1(iemOp_Grp15_mfence, uint8_t, bRm);
3422
3423/** Opcode 0x0f 0xae 11b/7. */
3424FNIEMOP_STUB_1(iemOp_Grp15_sfence, uint8_t, bRm);
3425
3426/** Opcode 0xf3 0x0f 0xae 11b/0. */
3427FNIEMOP_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
3428
3429/** Opcode 0xf3 0x0f 0xae 11b/1. */
3430FNIEMOP_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
3431
3432/** Opcode 0xf3 0x0f 0xae 11b/2. */
3433FNIEMOP_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
3434
3435/** Opcode 0xf3 0x0f 0xae 11b/3. */
3436FNIEMOP_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
3437
3438
3439/** Opcode 0x0f 0xae. */
3440FNIEMOP_DEF(iemOp_Grp15)
3441{
3442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3443 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3444 {
3445 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3446 {
3447 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
3448 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
3449 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
3450 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
3451 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
3452 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
3453 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
3454 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
3455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3456 }
3457 }
3458 else
3459 {
3460 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
3461 {
3462 case 0:
3463 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3464 {
3465 case 0: return IEMOP_RAISE_INVALID_OPCODE();
3466 case 1: return IEMOP_RAISE_INVALID_OPCODE();
3467 case 2: return IEMOP_RAISE_INVALID_OPCODE();
3468 case 3: return IEMOP_RAISE_INVALID_OPCODE();
3469 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3470 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
3471 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
3472 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
3473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3474 }
3475 break;
3476
3477 case IEM_OP_PRF_REPZ:
3478 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3479 {
3480 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
3481 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
3482 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
3483 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
3484 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3485 case 5: return IEMOP_RAISE_INVALID_OPCODE();
3486 case 6: return IEMOP_RAISE_INVALID_OPCODE();
3487 case 7: return IEMOP_RAISE_INVALID_OPCODE();
3488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3489 }
3490 break;
3491
3492 default:
3493 return IEMOP_RAISE_INVALID_OPCODE();
3494 }
3495 }
3496}
3497
3498
3499/** Opcode 0x0f 0xaf. */
3500FNIEMOP_DEF(iemOp_imul_Gv_Ev)
3501{
3502 IEMOP_MNEMONIC("imul Gv,Ev");
3503 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3504 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
3505}
3506
3507
3508/** Opcode 0x0f 0xb0. */
3509FNIEMOP_STUB(iemOp_cmpxchg_Eb_Gb);
3510/** Opcode 0x0f 0xb1. */
3511FNIEMOP_STUB(iemOp_cmpxchg_Ev_Gv);
3512
3513
3514FNIEMOP_DEF_1(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg)
3515{
3516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3517 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3518
3519 /* The source cannot be a register. */
3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3521 return IEMOP_RAISE_INVALID_OPCODE();
3522 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
3523
3524 switch (pIemCpu->enmEffOpSize)
3525 {
3526 case IEMMODE_16BIT:
3527 IEM_MC_BEGIN(5, 1);
3528 IEM_MC_ARG(uint16_t, uSel, 0);
3529 IEM_MC_ARG(uint16_t, offSeg, 1);
3530 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
3531 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
3532 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
3533 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
3534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3535 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
3536 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
3537 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
3538 IEM_MC_END();
3539 return VINF_SUCCESS;
3540
3541 case IEMMODE_32BIT:
3542 IEM_MC_BEGIN(5, 1);
3543 IEM_MC_ARG(uint16_t, uSel, 0);
3544 IEM_MC_ARG(uint32_t, offSeg, 1);
3545 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
3546 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
3547 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
3548 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
3549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3550 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
3551 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
3552 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
3553 IEM_MC_END();
3554 return VINF_SUCCESS;
3555
3556 case IEMMODE_64BIT:
3557 IEM_MC_BEGIN(5, 1);
3558 IEM_MC_ARG(uint16_t, uSel, 0);
3559 IEM_MC_ARG(uint64_t, offSeg, 1);
3560 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
3561 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
3562 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
3563 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
3564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3565 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
3566 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
3567 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
3568 IEM_MC_END();
3569 return VINF_SUCCESS;
3570
3571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3572 }
3573}
3574
3575
3576/** Opcode 0x0f 0xb2. */
3577FNIEMOP_DEF(iemOp_lss_Gv_Mp)
3578{
3579 IEMOP_MNEMONIC("lss Gv,Mp");
3580 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_SS);
3581}
3582
3583
3584/** Opcode 0x0f 0xb3. */
3585FNIEMOP_DEF(iemOp_btr_Ev_Gv)
3586{
3587 IEMOP_MNEMONIC("btr Ev,Gv");
3588 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
3589}
3590
3591
3592/** Opcode 0x0f 0xb4. */
3593FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
3594{
3595 IEMOP_MNEMONIC("lfs Gv,Mp");
3596 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_FS);
3597}
3598
3599
3600/** Opcode 0x0f 0xb5. */
3601FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
3602{
3603 IEMOP_MNEMONIC("lgs Gv,Mp");
3604 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_GS);
3605}
3606
3607
3608/** Opcode 0x0f 0xb6. */
3609FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
3610{
3611 IEMOP_MNEMONIC("movzx Gv,Eb");
3612
3613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3614 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3615
3616 /*
3617 * If rm is denoting a register, no more instruction bytes.
3618 */
3619 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3620 {
3621 switch (pIemCpu->enmEffOpSize)
3622 {
3623 case IEMMODE_16BIT:
3624 IEM_MC_BEGIN(0, 1);
3625 IEM_MC_LOCAL(uint16_t, u16Value);
3626 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3627 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
3628 IEM_MC_ADVANCE_RIP();
3629 IEM_MC_END();
3630 return VINF_SUCCESS;
3631
3632 case IEMMODE_32BIT:
3633 IEM_MC_BEGIN(0, 1);
3634 IEM_MC_LOCAL(uint32_t, u32Value);
3635 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3636 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
3637 IEM_MC_ADVANCE_RIP();
3638 IEM_MC_END();
3639 return VINF_SUCCESS;
3640
3641 case IEMMODE_64BIT:
3642 IEM_MC_BEGIN(0, 1);
3643 IEM_MC_LOCAL(uint64_t, u64Value);
3644 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3645 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
3646 IEM_MC_ADVANCE_RIP();
3647 IEM_MC_END();
3648 return VINF_SUCCESS;
3649
3650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3651 }
3652 }
3653 else
3654 {
3655 /*
3656 * We're loading a register from memory.
3657 */
3658 switch (pIemCpu->enmEffOpSize)
3659 {
3660 case IEMMODE_16BIT:
3661 IEM_MC_BEGIN(0, 2);
3662 IEM_MC_LOCAL(uint16_t, u16Value);
3663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3665 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
3666 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
3667 IEM_MC_ADVANCE_RIP();
3668 IEM_MC_END();
3669 return VINF_SUCCESS;
3670
3671 case IEMMODE_32BIT:
3672 IEM_MC_BEGIN(0, 2);
3673 IEM_MC_LOCAL(uint32_t, u32Value);
3674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3676 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
3677 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
3678 IEM_MC_ADVANCE_RIP();
3679 IEM_MC_END();
3680 return VINF_SUCCESS;
3681
3682 case IEMMODE_64BIT:
3683 IEM_MC_BEGIN(0, 2);
3684 IEM_MC_LOCAL(uint64_t, u64Value);
3685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3687 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
3688 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 return VINF_SUCCESS;
3692
3693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3694 }
3695 }
3696}
3697
3698
3699/** Opcode 0x0f 0xb7. */
3700FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
3701{
3702 IEMOP_MNEMONIC("movzx Gv,Ew");
3703
3704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3705 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3706
3707 /** @todo Not entirely sure how the operand size prefix is handled here,
3708 * assuming that it will be ignored. Would be nice to have a few
3709 * test for this. */
3710 /*
3711 * If rm is denoting a register, no more instruction bytes.
3712 */
3713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3714 {
3715 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
3716 {
3717 IEM_MC_BEGIN(0, 1);
3718 IEM_MC_LOCAL(uint32_t, u32Value);
3719 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3720 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
3721 IEM_MC_ADVANCE_RIP();
3722 IEM_MC_END();
3723 }
3724 else
3725 {
3726 IEM_MC_BEGIN(0, 1);
3727 IEM_MC_LOCAL(uint64_t, u64Value);
3728 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3729 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 }
3733 }
3734 else
3735 {
3736 /*
3737 * We're loading a register from memory.
3738 */
3739 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
3740 {
3741 IEM_MC_BEGIN(0, 2);
3742 IEM_MC_LOCAL(uint32_t, u32Value);
3743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3745 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
3746 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
3747 IEM_MC_ADVANCE_RIP();
3748 IEM_MC_END();
3749 }
3750 else
3751 {
3752 IEM_MC_BEGIN(0, 2);
3753 IEM_MC_LOCAL(uint64_t, u64Value);
3754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3756 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
3757 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
3758 IEM_MC_ADVANCE_RIP();
3759 IEM_MC_END();
3760 }
3761 }
3762 return VINF_SUCCESS;
3763}
3764
3765
3766/** Opcode 0x0f 0xb8. */
3767FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
3768/** Opcode 0x0f 0xb9. */
3769FNIEMOP_STUB(iemOp_Grp10);
3770
3771
3772/** Opcode 0x0f 0xba. */
3773FNIEMOP_DEF(iemOp_Grp8)
3774{
3775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3776 PCIEMOPBINSIZES pImpl;
3777 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3778 {
3779 case 0: case 1: case 2: case 3:
3780 return IEMOP_RAISE_INVALID_OPCODE();
3781 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
3782 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
3783 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
3784 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
3785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3786 }
3787 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3788
3789 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3790 {
3791 /* register destination. */
3792 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
3793 IEMOP_HLP_NO_LOCK_PREFIX();
3794
3795 switch (pIemCpu->enmEffOpSize)
3796 {
3797 case IEMMODE_16BIT:
3798 IEM_MC_BEGIN(3, 0);
3799 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3800 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
3801 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3802
3803 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3804 IEM_MC_REF_EFLAGS(pEFlags);
3805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3806
3807 IEM_MC_ADVANCE_RIP();
3808 IEM_MC_END();
3809 return VINF_SUCCESS;
3810
3811 case IEMMODE_32BIT:
3812 IEM_MC_BEGIN(3, 0);
3813 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3814 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
3815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3816
3817 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3818 IEM_MC_REF_EFLAGS(pEFlags);
3819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3820
3821 IEM_MC_ADVANCE_RIP();
3822 IEM_MC_END();
3823 return VINF_SUCCESS;
3824
3825 case IEMMODE_64BIT:
3826 IEM_MC_BEGIN(3, 0);
3827 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3828 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
3829 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3830
3831 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3832 IEM_MC_REF_EFLAGS(pEFlags);
3833 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3834
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 return VINF_SUCCESS;
3838
3839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3840 }
3841 }
3842 else
3843 {
3844 /* memory destination. */
3845
3846 uint32_t fAccess;
3847 if (pImpl->pfnLockedU16)
3848 fAccess = IEM_ACCESS_DATA_RW;
3849 else /* BT */
3850 {
3851 IEMOP_HLP_NO_LOCK_PREFIX();
3852 fAccess = IEM_ACCESS_DATA_R;
3853 }
3854
3855 /** @todo test negative bit offsets! */
3856 switch (pIemCpu->enmEffOpSize)
3857 {
3858 case IEMMODE_16BIT:
3859 IEM_MC_BEGIN(3, 1);
3860 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3861 IEM_MC_ARG(uint16_t, u16Src, 1);
3862 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3864
3865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3866 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
3867 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
3868 IEM_MC_FETCH_EFLAGS(EFlags);
3869 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3870 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3871 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3872 else
3873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3874 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3875
3876 IEM_MC_COMMIT_EFLAGS(EFlags);
3877 IEM_MC_ADVANCE_RIP();
3878 IEM_MC_END();
3879 return VINF_SUCCESS;
3880
3881 case IEMMODE_32BIT:
3882 IEM_MC_BEGIN(3, 1);
3883 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3884 IEM_MC_ARG(uint32_t, u32Src, 1);
3885 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3887
3888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3889 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
3890 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
3891 IEM_MC_FETCH_EFLAGS(EFlags);
3892 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3893 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3895 else
3896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3897 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3898
3899 IEM_MC_COMMIT_EFLAGS(EFlags);
3900 IEM_MC_ADVANCE_RIP();
3901 IEM_MC_END();
3902 return VINF_SUCCESS;
3903
3904 case IEMMODE_64BIT:
3905 IEM_MC_BEGIN(3, 1);
3906 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3907 IEM_MC_ARG(uint64_t, u64Src, 1);
3908 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3910
3911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3912 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
3913 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
3914 IEM_MC_FETCH_EFLAGS(EFlags);
3915 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3916 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3918 else
3919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3921
3922 IEM_MC_COMMIT_EFLAGS(EFlags);
3923 IEM_MC_ADVANCE_RIP();
3924 IEM_MC_END();
3925 return VINF_SUCCESS;
3926
3927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3928 }
3929 }
3930
3931}
3932
3933
3934/** Opcode 0x0f 0xbb. */
3935FNIEMOP_DEF(iemOp_btc_Ev_Gv)
3936{
3937 IEMOP_MNEMONIC("btc Ev,Gv");
3938 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
3939}
3940
3941
3942/** Opcode 0x0f 0xbc. */
3943FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
3944{
3945 IEMOP_MNEMONIC("bsf Gv,Ev");
3946 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
3947 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
3948}
3949
3950
3951/** Opcode 0x0f 0xbd. */
3952FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
3953{
3954 IEMOP_MNEMONIC("bsr Gv,Ev");
3955 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
3956 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
3957}
3958
3959
3960/** Opcode 0x0f 0xbe. */
3961FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
3962{
3963 IEMOP_MNEMONIC("movsx Gv,Eb");
3964
3965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3966 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3967
3968 /*
3969 * If rm is denoting a register, no more instruction bytes.
3970 */
3971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3972 {
3973 switch (pIemCpu->enmEffOpSize)
3974 {
3975 case IEMMODE_16BIT:
3976 IEM_MC_BEGIN(0, 1);
3977 IEM_MC_LOCAL(uint16_t, u16Value);
3978 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3979 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
3980 IEM_MC_ADVANCE_RIP();
3981 IEM_MC_END();
3982 return VINF_SUCCESS;
3983
3984 case IEMMODE_32BIT:
3985 IEM_MC_BEGIN(0, 1);
3986 IEM_MC_LOCAL(uint32_t, u32Value);
3987 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3988 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
3989 IEM_MC_ADVANCE_RIP();
3990 IEM_MC_END();
3991 return VINF_SUCCESS;
3992
3993 case IEMMODE_64BIT:
3994 IEM_MC_BEGIN(0, 1);
3995 IEM_MC_LOCAL(uint64_t, u64Value);
3996 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3997 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
3998 IEM_MC_ADVANCE_RIP();
3999 IEM_MC_END();
4000 return VINF_SUCCESS;
4001
4002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4003 }
4004 }
4005 else
4006 {
4007 /*
4008 * We're loading a register from memory.
4009 */
4010 switch (pIemCpu->enmEffOpSize)
4011 {
4012 case IEMMODE_16BIT:
4013 IEM_MC_BEGIN(0, 2);
4014 IEM_MC_LOCAL(uint16_t, u16Value);
4015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4017 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
4018 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4019 IEM_MC_ADVANCE_RIP();
4020 IEM_MC_END();
4021 return VINF_SUCCESS;
4022
4023 case IEMMODE_32BIT:
4024 IEM_MC_BEGIN(0, 2);
4025 IEM_MC_LOCAL(uint32_t, u32Value);
4026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4028 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4029 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4030 IEM_MC_ADVANCE_RIP();
4031 IEM_MC_END();
4032 return VINF_SUCCESS;
4033
4034 case IEMMODE_64BIT:
4035 IEM_MC_BEGIN(0, 2);
4036 IEM_MC_LOCAL(uint64_t, u64Value);
4037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4039 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4040 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4041 IEM_MC_ADVANCE_RIP();
4042 IEM_MC_END();
4043 return VINF_SUCCESS;
4044
4045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4046 }
4047 }
4048}
4049
4050
4051/** Opcode 0x0f 0xbf. */
4052FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
4053{
4054 IEMOP_MNEMONIC("movsx Gv,Ew");
4055
4056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4057 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4058
4059 /** @todo Not entirely sure how the operand size prefix is handled here,
4060 * assuming that it will be ignored. Would be nice to have a few
4061 * test for this. */
4062 /*
4063 * If rm is denoting a register, no more instruction bytes.
4064 */
4065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4066 {
4067 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4068 {
4069 IEM_MC_BEGIN(0, 1);
4070 IEM_MC_LOCAL(uint32_t, u32Value);
4071 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4072 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4073 IEM_MC_ADVANCE_RIP();
4074 IEM_MC_END();
4075 }
4076 else
4077 {
4078 IEM_MC_BEGIN(0, 1);
4079 IEM_MC_LOCAL(uint64_t, u64Value);
4080 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4081 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4082 IEM_MC_ADVANCE_RIP();
4083 IEM_MC_END();
4084 }
4085 }
4086 else
4087 {
4088 /*
4089 * We're loading a register from memory.
4090 */
4091 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4092 {
4093 IEM_MC_BEGIN(0, 2);
4094 IEM_MC_LOCAL(uint32_t, u32Value);
4095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4097 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4098 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4099 IEM_MC_ADVANCE_RIP();
4100 IEM_MC_END();
4101 }
4102 else
4103 {
4104 IEM_MC_BEGIN(0, 2);
4105 IEM_MC_LOCAL(uint64_t, u64Value);
4106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4108 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4109 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4110 IEM_MC_ADVANCE_RIP();
4111 IEM_MC_END();
4112 }
4113 }
4114 return VINF_SUCCESS;
4115}
4116
4117
4118/** Opcode 0x0f 0xc0. */
4119FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
4120{
4121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4122 IEMOP_MNEMONIC("xadd Eb,Gb");
4123
4124 /*
4125 * If rm is denoting a register, no more instruction bytes.
4126 */
4127 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4128 {
4129 IEMOP_HLP_NO_LOCK_PREFIX();
4130
4131 IEM_MC_BEGIN(3, 0);
4132 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4133 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4134 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4135
4136 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4137 IEM_MC_REF_GREG_U8(pu8Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4138 IEM_MC_REF_EFLAGS(pEFlags);
4139 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4140
4141 IEM_MC_ADVANCE_RIP();
4142 IEM_MC_END();
4143 }
4144 else
4145 {
4146 /*
4147 * We're accessing memory.
4148 */
4149 IEM_MC_BEGIN(3, 3);
4150 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4151 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4152 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4153 IEM_MC_LOCAL(uint8_t, u8RegCopy);
4154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4155
4156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4157 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4158 IEM_MC_FETCH_GREG_U8(u8RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4159 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
4160 IEM_MC_FETCH_EFLAGS(EFlags);
4161 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4162 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4163 else
4164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
4165
4166 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
4167 IEM_MC_COMMIT_EFLAGS(EFlags);
4168 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8RegCopy);
4169 IEM_MC_ADVANCE_RIP();
4170 IEM_MC_END();
4171 return VINF_SUCCESS;
4172 }
4173 return VINF_SUCCESS;
4174}
4175
4176
4177/** Opcode 0x0f 0xc1. */
4178FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
4179{
4180 IEMOP_MNEMONIC("xadd Ev,Gv");
4181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4182
4183 /*
4184 * If rm is denoting a register, no more instruction bytes.
4185 */
4186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4187 {
4188 IEMOP_HLP_NO_LOCK_PREFIX();
4189
4190 switch (pIemCpu->enmEffOpSize)
4191 {
4192 case IEMMODE_16BIT:
4193 IEM_MC_BEGIN(3, 0);
4194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4195 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4196 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4197
4198 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4199 IEM_MC_REF_GREG_U16(pu16Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4200 IEM_MC_REF_EFLAGS(pEFlags);
4201 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4202
4203 IEM_MC_ADVANCE_RIP();
4204 IEM_MC_END();
4205 return VINF_SUCCESS;
4206
4207 case IEMMODE_32BIT:
4208 IEM_MC_BEGIN(3, 0);
4209 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4210 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4211 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4212
4213 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4214 IEM_MC_REF_GREG_U32(pu32Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4215 IEM_MC_REF_EFLAGS(pEFlags);
4216 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
4217
4218 IEM_MC_ADVANCE_RIP();
4219 IEM_MC_END();
4220 return VINF_SUCCESS;
4221
4222 case IEMMODE_64BIT:
4223 IEM_MC_BEGIN(3, 0);
4224 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4225 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4226 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4227
4228 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4229 IEM_MC_REF_GREG_U64(pu64Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4230 IEM_MC_REF_EFLAGS(pEFlags);
4231 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
4232
4233 IEM_MC_ADVANCE_RIP();
4234 IEM_MC_END();
4235 return VINF_SUCCESS;
4236
4237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4238 }
4239 }
4240 else
4241 {
4242 /*
4243 * We're accessing memory.
4244 */
4245 switch (pIemCpu->enmEffOpSize)
4246 {
4247 case IEMMODE_16BIT:
4248 IEM_MC_BEGIN(3, 3);
4249 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4250 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4251 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4252 IEM_MC_LOCAL(uint16_t, u16RegCopy);
4253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4254
4255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4256 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4257 IEM_MC_FETCH_GREG_U16(u16RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4258 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
4259 IEM_MC_FETCH_EFLAGS(EFlags);
4260 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4261 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4262 else
4263 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
4264
4265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4266 IEM_MC_COMMIT_EFLAGS(EFlags);
4267 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16RegCopy);
4268 IEM_MC_ADVANCE_RIP();
4269 IEM_MC_END();
4270 return VINF_SUCCESS;
4271
4272 case IEMMODE_32BIT:
4273 IEM_MC_BEGIN(3, 3);
4274 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4275 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4276 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4277 IEM_MC_LOCAL(uint32_t, u32RegCopy);
4278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4279
4280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4281 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4282 IEM_MC_FETCH_GREG_U32(u32RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4283 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
4284 IEM_MC_FETCH_EFLAGS(EFlags);
4285 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4286 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
4287 else
4288 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
4289
4290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4291 IEM_MC_COMMIT_EFLAGS(EFlags);
4292 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32RegCopy);
4293 IEM_MC_ADVANCE_RIP();
4294 IEM_MC_END();
4295 return VINF_SUCCESS;
4296
4297 case IEMMODE_64BIT:
4298 IEM_MC_BEGIN(3, 3);
4299 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4300 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4301 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4302 IEM_MC_LOCAL(uint64_t, u64RegCopy);
4303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4304
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4306 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4307 IEM_MC_FETCH_GREG_U64(u64RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4308 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
4309 IEM_MC_FETCH_EFLAGS(EFlags);
4310 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4311 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
4312 else
4313 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
4314
4315 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4316 IEM_MC_COMMIT_EFLAGS(EFlags);
4317 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64RegCopy);
4318 IEM_MC_ADVANCE_RIP();
4319 IEM_MC_END();
4320 return VINF_SUCCESS;
4321
4322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4323 }
4324 }
4325}
4326
4327/** Opcode 0x0f 0xc2. */
4328FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
4329/** Opcode 0x0f 0xc3. */
4330FNIEMOP_STUB(iemOp_movnti_My_Gy);
4331/** Opcode 0x0f 0xc4. */
4332FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
4333/** Opcode 0x0f 0xc5. */
4334FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
4335/** Opcode 0x0f 0xc6. */
4336FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
4337/** Opcode 0x0f 0xc7. */
4338FNIEMOP_STUB(iemOp_Grp9);
4339
4340
4341/**
4342 * Common 'bswap register' helper.
4343 */
4344FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
4345{
4346 IEMOP_HLP_NO_LOCK_PREFIX();
4347 switch (pIemCpu->enmEffOpSize)
4348 {
4349 case IEMMODE_16BIT:
4350 IEM_MC_BEGIN(1, 0);
4351 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4352 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
4353 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
4354 IEM_MC_ADVANCE_RIP();
4355 IEM_MC_END();
4356 return VINF_SUCCESS;
4357
4358 case IEMMODE_32BIT:
4359 IEM_MC_BEGIN(1, 0);
4360 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4361 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
4362 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4363 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
4364 IEM_MC_ADVANCE_RIP();
4365 IEM_MC_END();
4366 return VINF_SUCCESS;
4367
4368 case IEMMODE_64BIT:
4369 IEM_MC_BEGIN(1, 0);
4370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4371 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
4372 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
4373 IEM_MC_ADVANCE_RIP();
4374 IEM_MC_END();
4375 return VINF_SUCCESS;
4376
4377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4378 }
4379}
4380
4381
4382/** Opcode 0x0f 0xc8. */
4383FNIEMOP_DEF(iemOp_bswap_rAX_r8)
4384{
4385 IEMOP_MNEMONIC("bswap rAX/r8");
4386 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexReg);
4387}
4388
4389
4390/** Opcode 0x0f 0xc9. */
4391FNIEMOP_DEF(iemOp_bswap_rCX_r9)
4392{
4393 IEMOP_MNEMONIC("bswap rCX/r9");
4394 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexReg);
4395}
4396
4397
4398/** Opcode 0x0f 0xca. */
4399FNIEMOP_DEF(iemOp_bswap_rDX_r10)
4400{
4401 IEMOP_MNEMONIC("bswap rDX/r9");
4402 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexReg);
4403}
4404
4405
4406/** Opcode 0x0f 0xcb. */
4407FNIEMOP_DEF(iemOp_bswap_rBX_r11)
4408{
4409 IEMOP_MNEMONIC("bswap rBX/r9");
4410 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexReg);
4411}
4412
4413
4414/** Opcode 0x0f 0xcc. */
4415FNIEMOP_DEF(iemOp_bswap_rSP_r12)
4416{
4417 IEMOP_MNEMONIC("bswap rSP/r12");
4418 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexReg);
4419}
4420
4421
4422/** Opcode 0x0f 0xcd. */
4423FNIEMOP_DEF(iemOp_bswap_rBP_r13)
4424{
4425 IEMOP_MNEMONIC("bswap rBP/r13");
4426 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexReg);
4427}
4428
4429
4430/** Opcode 0x0f 0xce. */
4431FNIEMOP_DEF(iemOp_bswap_rSI_r14)
4432{
4433 IEMOP_MNEMONIC("bswap rSI/r14");
4434 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexReg);
4435}
4436
4437
4438/** Opcode 0x0f 0xcf. */
4439FNIEMOP_DEF(iemOp_bswap_rDI_r15)
4440{
4441 IEMOP_MNEMONIC("bswap rDI/r15");
4442 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexReg);
4443}
4444
4445
4446
4447/** Opcode 0x0f 0xd0. */
4448FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
4449/** Opcode 0x0f 0xd1. */
4450FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
4451/** Opcode 0x0f 0xd2. */
4452FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
4453/** Opcode 0x0f 0xd3. */
4454FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
4455/** Opcode 0x0f 0xd4. */
4456FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
4457/** Opcode 0x0f 0xd5. */
4458FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
4459/** Opcode 0x0f 0xd6. */
4460FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
4461/** Opcode 0x0f 0xd7. */
4462FNIEMOP_STUB(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq);
4463/** Opcode 0x0f 0xd8. */
4464FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
4465/** Opcode 0x0f 0xd9. */
4466FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
4467/** Opcode 0x0f 0xda. */
4468FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
4469/** Opcode 0x0f 0xdb. */
4470FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
4471/** Opcode 0x0f 0xdc. */
4472FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
4473/** Opcode 0x0f 0xdd. */
4474FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
4475/** Opcode 0x0f 0xde. */
4476FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
4477/** Opcode 0x0f 0xdf. */
4478FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
4479/** Opcode 0x0f 0xe0. */
4480FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
4481/** Opcode 0x0f 0xe1. */
4482FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
4483/** Opcode 0x0f 0xe2. */
4484FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
4485/** Opcode 0x0f 0xe3. */
4486FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
4487/** Opcode 0x0f 0xe4. */
4488FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
4489/** Opcode 0x0f 0xe5. */
4490FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
4491/** Opcode 0x0f 0xe6. */
4492FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
4493/** Opcode 0x0f 0xe7. */
4494FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
4495/** Opcode 0x0f 0xe8. */
4496FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
4497/** Opcode 0x0f 0xe9. */
4498FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
4499/** Opcode 0x0f 0xea. */
4500FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
4501/** Opcode 0x0f 0xeb. */
4502FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
4503/** Opcode 0x0f 0xec. */
4504FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
4505/** Opcode 0x0f 0xed. */
4506FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
4507/** Opcode 0x0f 0xee. */
4508FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
4509/** Opcode 0x0f 0xef. */
4510FNIEMOP_STUB(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq);
4511/** Opcode 0x0f 0xf0. */
4512FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
4513/** Opcode 0x0f 0xf1. */
4514FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
4515/** Opcode 0x0f 0xf2. */
4516FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
4517/** Opcode 0x0f 0xf3. */
4518FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
4519/** Opcode 0x0f 0xf4. */
4520FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
4521/** Opcode 0x0f 0xf5. */
4522FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
4523/** Opcode 0x0f 0xf6. */
4524FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
4525/** Opcode 0x0f 0xf7. */
4526FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
4527/** Opcode 0x0f 0xf8. */
4528FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq);
4529/** Opcode 0x0f 0xf9. */
4530FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
4531/** Opcode 0x0f 0xfa. */
4532FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
4533/** Opcode 0x0f 0xfb. */
4534FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
4535/** Opcode 0x0f 0xfc. */
4536FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
4537/** Opcode 0x0f 0xfd. */
4538FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
4539/** Opcode 0x0f 0xfe. */
4540FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
4541
4542
4543const PFNIEMOP g_apfnTwoByteMap[256] =
4544{
4545 /* 0x00 */ iemOp_Grp6, iemOp_Grp7, iemOp_lar_Gv_Ew, iemOp_lsl_Gv_Ew,
4546 /* 0x04 */ iemOp_Invalid, iemOp_syscall, iemOp_clts, iemOp_sysret,
4547 /* 0x08 */ iemOp_invd, iemOp_wbinvd, iemOp_Invalid, iemOp_ud2,
4548 /* 0x0c */ iemOp_Invalid, iemOp_nop_Ev_GrpP, iemOp_femms, iemOp_3Dnow,
4549 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
4550 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
4551 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
4552 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
4553 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
4554 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
4555 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
4556 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
4557 /* 0x18 */ iemOp_prefetch_Grp16, iemOp_nop_Ev, iemOp_nop_Ev, iemOp_nop_Ev,
4558 /* 0x1c */ iemOp_nop_Ev, iemOp_nop_Ev, iemOp_nop_Ev, iemOp_nop_Ev,
4559 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Dd, iemOp_mov_Cd_Rd, iemOp_mov_Dd_Rd,
4560 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_Invalid, iemOp_mov_Td_Rd, iemOp_Invalid,
4561 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
4562 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
4563 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
4564 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
4565 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
4566 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
4567 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
4568 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
4569 /* 0x30 */ iemOp_wrmsr, iemOp_rdtsc, iemOp_rdmsr, iemOp_rdpmc,
4570 /* 0x34 */ iemOp_sysenter, iemOp_sysexit, iemOp_Invalid, iemOp_getsec,
4571 /* 0x38 */ iemOp_3byte_Esc_A4, iemOp_Invalid, iemOp_3byte_Esc_A5, iemOp_Invalid,
4572 /* 0x3c */ iemOp_movnti_Gv_Ev/*?*/,iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
4573 /* 0x40 */ iemOp_cmovo_Gv_Ev, iemOp_cmovno_Gv_Ev, iemOp_cmovc_Gv_Ev, iemOp_cmovnc_Gv_Ev,
4574 /* 0x44 */ iemOp_cmove_Gv_Ev, iemOp_cmovne_Gv_Ev, iemOp_cmovbe_Gv_Ev, iemOp_cmovnbe_Gv_Ev,
4575 /* 0x48 */ iemOp_cmovs_Gv_Ev, iemOp_cmovns_Gv_Ev, iemOp_cmovp_Gv_Ev, iemOp_cmovnp_Gv_Ev,
4576 /* 0x4c */ iemOp_cmovl_Gv_Ev, iemOp_cmovnl_Gv_Ev, iemOp_cmovle_Gv_Ev, iemOp_cmovnle_Gv_Ev,
4577 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
4578 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
4579 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
4580 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
4581 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
4582 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
4583 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
4584 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
4585 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
4586 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
4587 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
4588 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
4589 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
4590 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
4591 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
4592 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
4593 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
4594 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
4595 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
4596 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
4597 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
4598 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
4599 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
4600 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
4601 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
4602 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
4603 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
4604 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
4605 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
4606 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
4607 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
4608 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
4609 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
4610 /* 0x71 */ iemOp_Grp12,
4611 /* 0x72 */ iemOp_Grp13,
4612 /* 0x73 */ iemOp_Grp14,
4613 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
4614 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
4615 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
4616 /* 0x77 */ iemOp_emms,
4617 /* 0x78 */ iemOp_vmread, iemOp_vmwrite, iemOp_Invalid, iemOp_Invalid,
4618 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
4619 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
4620 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
4621 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
4622 /* 0x80 */ iemOp_jo_Jv, iemOp_jno_Jv, iemOp_jc_Jv, iemOp_jnc_Jv,
4623 /* 0x84 */ iemOp_je_Jv, iemOp_jne_Jv, iemOp_jbe_Jv, iemOp_jnbe_Jv,
4624 /* 0x88 */ iemOp_js_Jv, iemOp_jns_Jv, iemOp_jp_Jv, iemOp_jnp_Jv,
4625 /* 0x8c */ iemOp_jl_Jv, iemOp_jnl_Jv, iemOp_jle_Jv, iemOp_jnle_Jv,
4626 /* 0x90 */ iemOp_seto_Eb, iemOp_setno_Eb, iemOp_setc_Eb, iemOp_setnc_Eb,
4627 /* 0x94 */ iemOp_sete_Eb, iemOp_setne_Eb, iemOp_setbe_Eb, iemOp_setnbe_Eb,
4628 /* 0x98 */ iemOp_sets_Eb, iemOp_setns_Eb, iemOp_setp_Eb, iemOp_setnp_Eb,
4629 /* 0x9c */ iemOp_setl_Eb, iemOp_setnl_Eb, iemOp_setle_Eb, iemOp_setnle_Eb,
4630 /* 0xa0 */ iemOp_push_fs, iemOp_pop_fs, iemOp_cpuid, iemOp_bt_Ev_Gv,
4631 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib, iemOp_shld_Ev_Gv_CL, iemOp_Invalid, iemOp_Invalid,
4632 /* 0xa8 */ iemOp_push_gs, iemOp_pop_gs, iemOp_rsm, iemOp_bts_Ev_Gv,
4633 /* 0xac */ iemOp_shrd_Ev_Gv_Ib, iemOp_shrd_Ev_Gv_CL, iemOp_Grp15, iemOp_imul_Gv_Ev,
4634 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb, iemOp_cmpxchg_Ev_Gv, iemOp_lss_Gv_Mp, iemOp_btr_Ev_Gv,
4635 /* 0xb4 */ iemOp_lfs_Gv_Mp, iemOp_lgs_Gv_Mp, iemOp_movzx_Gv_Eb, iemOp_movzx_Gv_Ew,
4636 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,iemOp_Grp10, iemOp_Grp8, iemOp_btc_Ev_Gv,
4637 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_movsx_Gv_Eb, iemOp_movsx_Gv_Ew,
4638 /* 0xc0 */ iemOp_xadd_Eb_Gb,
4639 /* 0xc1 */ iemOp_xadd_Ev_Gv,
4640 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
4641 /* 0xc3 */ iemOp_movnti_My_Gy,
4642 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
4643 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
4644 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
4645 /* 0xc7 */ iemOp_Grp9,
4646 /* 0xc8 */ iemOp_bswap_rAX_r8, iemOp_bswap_rCX_r9, iemOp_bswap_rDX_r10, iemOp_bswap_rBX_r11,
4647 /* 0xcc */ iemOp_bswap_rSP_r12, iemOp_bswap_rBP_r13, iemOp_bswap_rSI_r14, iemOp_bswap_rDI_r15,
4648 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
4649 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
4650 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
4651 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
4652 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
4653 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
4654 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
4655 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
4656 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
4657 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
4658 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
4659 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
4660 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
4661 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
4662 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
4663 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
4664 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
4665 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
4666 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
4667 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
4668 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
4669 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
4670 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
4671 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
4672 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
4673 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
4674 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
4675 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
4676 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
4677 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
4678 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
4679 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
4680 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
4681 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
4682 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
4683 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
4684 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
4685 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
4686 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
4687 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
4688 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
4689 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
4690 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
4691 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
4692 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
4693 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
4694 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
4695 /* 0xff */ iemOp_Invalid
4696};
4697
4698/** @} */
4699
4700
4701/** @name One byte opcodes.
4702 *
4703 * @{
4704 */
4705
4706/** Opcode 0x00. */
4707FNIEMOP_DEF(iemOp_add_Eb_Gb)
4708{
4709 IEMOP_MNEMONIC("add Eb,Gb");
4710 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
4711}
4712
4713
4714/** Opcode 0x01. */
4715FNIEMOP_DEF(iemOp_add_Ev_Gv)
4716{
4717 IEMOP_MNEMONIC("add Ev,Gv");
4718 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
4719}
4720
4721
4722/** Opcode 0x02. */
4723FNIEMOP_DEF(iemOp_add_Gb_Eb)
4724{
4725 IEMOP_MNEMONIC("add Gb,Eb");
4726 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
4727}
4728
4729
4730/** Opcode 0x03. */
4731FNIEMOP_DEF(iemOp_add_Gv_Ev)
4732{
4733 IEMOP_MNEMONIC("add Gv,Ev");
4734 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
4735}
4736
4737
4738/** Opcode 0x04. */
4739FNIEMOP_DEF(iemOp_add_Al_Ib)
4740{
4741 IEMOP_MNEMONIC("add al,Ib");
4742 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
4743}
4744
4745
4746/** Opcode 0x05. */
4747FNIEMOP_DEF(iemOp_add_eAX_Iz)
4748{
4749 IEMOP_MNEMONIC("add rAX,Iz");
4750 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
4751}
4752
4753
4754/** Opcode 0x06. */
4755FNIEMOP_DEF(iemOp_push_ES)
4756{
4757 IEMOP_MNEMONIC("push es");
4758 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
4759}
4760
4761
4762/** Opcode 0x07. */
4763FNIEMOP_DEF(iemOp_pop_ES)
4764{
4765 IEMOP_MNEMONIC("pop es");
4766 IEMOP_HLP_NO_64BIT();
4767 IEMOP_HLP_NO_LOCK_PREFIX();
4768 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
4769}
4770
4771
4772/** Opcode 0x08. */
4773FNIEMOP_DEF(iemOp_or_Eb_Gb)
4774{
4775 IEMOP_MNEMONIC("or Eb,Gb");
4776 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4777 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
4778}
4779
4780
4781/** Opcode 0x09. */
4782FNIEMOP_DEF(iemOp_or_Ev_Gv)
4783{
4784 IEMOP_MNEMONIC("or Ev,Gv ");
4785 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4786 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
4787}
4788
4789
4790/** Opcode 0x0a. */
4791FNIEMOP_DEF(iemOp_or_Gb_Eb)
4792{
4793 IEMOP_MNEMONIC("or Gb,Eb");
4794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4795 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
4796}
4797
4798
4799/** Opcode 0x0b. */
4800FNIEMOP_DEF(iemOp_or_Gv_Ev)
4801{
4802 IEMOP_MNEMONIC("or Gv,Ev");
4803 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4804 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
4805}
4806
4807
4808/** Opcode 0x0c. */
4809FNIEMOP_DEF(iemOp_or_Al_Ib)
4810{
4811 IEMOP_MNEMONIC("or al,Ib");
4812 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4813 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
4814}
4815
4816
4817/** Opcode 0x0d. */
4818FNIEMOP_DEF(iemOp_or_eAX_Iz)
4819{
4820 IEMOP_MNEMONIC("or rAX,Iz");
4821 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4822 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
4823}
4824
4825
4826/** Opcode 0x0e. */
4827FNIEMOP_DEF(iemOp_push_CS)
4828{
4829 IEMOP_MNEMONIC("push cs");
4830 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
4831}
4832
4833
4834/** Opcode 0x0f. */
4835FNIEMOP_DEF(iemOp_2byteEscape)
4836{
4837 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
4838 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
4839}
4840
4841/** Opcode 0x10. */
4842FNIEMOP_DEF(iemOp_adc_Eb_Gb)
4843{
4844 IEMOP_MNEMONIC("adc Eb,Gb");
4845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
4846}
4847
4848
4849/** Opcode 0x11. */
4850FNIEMOP_DEF(iemOp_adc_Ev_Gv)
4851{
4852 IEMOP_MNEMONIC("adc Ev,Gv");
4853 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
4854}
4855
4856
4857/** Opcode 0x12. */
4858FNIEMOP_DEF(iemOp_adc_Gb_Eb)
4859{
4860 IEMOP_MNEMONIC("adc Gb,Eb");
4861 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
4862}
4863
4864
4865/** Opcode 0x13. */
4866FNIEMOP_DEF(iemOp_adc_Gv_Ev)
4867{
4868 IEMOP_MNEMONIC("adc Gv,Ev");
4869 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
4870}
4871
4872
4873/** Opcode 0x14. */
4874FNIEMOP_DEF(iemOp_adc_Al_Ib)
4875{
4876 IEMOP_MNEMONIC("adc al,Ib");
4877 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
4878}
4879
4880
4881/** Opcode 0x15. */
4882FNIEMOP_DEF(iemOp_adc_eAX_Iz)
4883{
4884 IEMOP_MNEMONIC("adc rAX,Iz");
4885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
4886}
4887
4888
4889/** Opcode 0x16. */
4890FNIEMOP_DEF(iemOp_push_SS)
4891{
4892 IEMOP_MNEMONIC("push ss");
4893 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
4894}
4895
4896
4897/** Opcode 0x17. */
4898FNIEMOP_DEF(iemOp_pop_SS)
4899{
4900 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
4901 IEMOP_HLP_NO_LOCK_PREFIX();
4902 IEMOP_HLP_NO_64BIT();
4903 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
4904}
4905
4906
4907/** Opcode 0x18. */
4908FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
4909{
4910 IEMOP_MNEMONIC("sbb Eb,Gb");
4911 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
4912}
4913
4914
4915/** Opcode 0x19. */
4916FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
4917{
4918 IEMOP_MNEMONIC("sbb Ev,Gv");
4919 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
4920}
4921
4922
4923/** Opcode 0x1a. */
4924FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
4925{
4926 IEMOP_MNEMONIC("sbb Gb,Eb");
4927 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
4928}
4929
4930
4931/** Opcode 0x1b. */
4932FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
4933{
4934 IEMOP_MNEMONIC("sbb Gv,Ev");
4935 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
4936}
4937
4938
4939/** Opcode 0x1c. */
4940FNIEMOP_DEF(iemOp_sbb_Al_Ib)
4941{
4942 IEMOP_MNEMONIC("sbb al,Ib");
4943 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
4944}
4945
4946
4947/** Opcode 0x1d. */
4948FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
4949{
4950 IEMOP_MNEMONIC("sbb rAX,Iz");
4951 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
4952}
4953
4954
4955/** Opcode 0x1e. */
4956FNIEMOP_DEF(iemOp_push_DS)
4957{
4958 IEMOP_MNEMONIC("push ds");
4959 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
4960}
4961
4962
4963/** Opcode 0x1f. */
4964FNIEMOP_DEF(iemOp_pop_DS)
4965{
4966 IEMOP_MNEMONIC("pop ds");
4967 IEMOP_HLP_NO_LOCK_PREFIX();
4968 IEMOP_HLP_NO_64BIT();
4969 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
4970}
4971
4972
4973/** Opcode 0x20. */
4974FNIEMOP_DEF(iemOp_and_Eb_Gb)
4975{
4976 IEMOP_MNEMONIC("and Eb,Gb");
4977 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4978 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
4979}
4980
4981
4982/** Opcode 0x21. */
4983FNIEMOP_DEF(iemOp_and_Ev_Gv)
4984{
4985 IEMOP_MNEMONIC("and Ev,Gv");
4986 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4987 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
4988}
4989
4990
4991/** Opcode 0x22. */
4992FNIEMOP_DEF(iemOp_and_Gb_Eb)
4993{
4994 IEMOP_MNEMONIC("and Gb,Eb");
4995 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4996 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
4997}
4998
4999
5000/** Opcode 0x23. */
5001FNIEMOP_DEF(iemOp_and_Gv_Ev)
5002{
5003 IEMOP_MNEMONIC("and Gv,Ev");
5004 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5005 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
5006}
5007
5008
5009/** Opcode 0x24. */
5010FNIEMOP_DEF(iemOp_and_Al_Ib)
5011{
5012 IEMOP_MNEMONIC("and al,Ib");
5013 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
5015}
5016
5017
5018/** Opcode 0x25. */
5019FNIEMOP_DEF(iemOp_and_eAX_Iz)
5020{
5021 IEMOP_MNEMONIC("and rAX,Iz");
5022 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5023 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
5024}
5025
5026
5027/** Opcode 0x26. */
5028FNIEMOP_DEF(iemOp_seg_ES)
5029{
5030 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
5031 pIemCpu->iEffSeg = X86_SREG_ES;
5032
5033 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5034 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5035}
5036
5037
5038/** Opcode 0x27. */
5039FNIEMOP_STUB(iemOp_daa);
5040
5041
5042/** Opcode 0x28. */
5043FNIEMOP_DEF(iemOp_sub_Eb_Gb)
5044{
5045 IEMOP_MNEMONIC("sub Eb,Gb");
5046 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
5047}
5048
5049
5050/** Opcode 0x29. */
5051FNIEMOP_DEF(iemOp_sub_Ev_Gv)
5052{
5053 IEMOP_MNEMONIC("sub Ev,Gv");
5054 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
5055}
5056
5057
5058/** Opcode 0x2a. */
5059FNIEMOP_DEF(iemOp_sub_Gb_Eb)
5060{
5061 IEMOP_MNEMONIC("sub Gb,Eb");
5062 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
5063}
5064
5065
5066/** Opcode 0x2b. */
5067FNIEMOP_DEF(iemOp_sub_Gv_Ev)
5068{
5069 IEMOP_MNEMONIC("sub Gv,Ev");
5070 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
5071}
5072
5073
5074/** Opcode 0x2c. */
5075FNIEMOP_DEF(iemOp_sub_Al_Ib)
5076{
5077 IEMOP_MNEMONIC("sub al,Ib");
5078 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
5079}
5080
5081
5082/** Opcode 0x2d. */
5083FNIEMOP_DEF(iemOp_sub_eAX_Iz)
5084{
5085 IEMOP_MNEMONIC("sub rAX,Iz");
5086 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
5087}
5088
5089
5090/** Opcode 0x2e. */
5091FNIEMOP_DEF(iemOp_seg_CS)
5092{
5093 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
5094 pIemCpu->iEffSeg = X86_SREG_CS;
5095
5096 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5097 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5098}
5099
5100
5101/** Opcode 0x2f. */
5102FNIEMOP_STUB(iemOp_das);
5103
5104
5105/** Opcode 0x30. */
5106FNIEMOP_DEF(iemOp_xor_Eb_Gb)
5107{
5108 IEMOP_MNEMONIC("xor Eb,Gb");
5109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5110 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
5111}
5112
5113
5114/** Opcode 0x31. */
5115FNIEMOP_DEF(iemOp_xor_Ev_Gv)
5116{
5117 IEMOP_MNEMONIC("xor Ev,Gv");
5118 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5119 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
5120}
5121
5122
5123/** Opcode 0x32. */
5124FNIEMOP_DEF(iemOp_xor_Gb_Eb)
5125{
5126 IEMOP_MNEMONIC("xor Gb,Eb");
5127 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5128 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
5129}
5130
5131
5132/** Opcode 0x33. */
5133FNIEMOP_DEF(iemOp_xor_Gv_Ev)
5134{
5135 IEMOP_MNEMONIC("xor Gv,Ev");
5136 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5137 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
5138}
5139
5140
5141/** Opcode 0x34. */
5142FNIEMOP_DEF(iemOp_xor_Al_Ib)
5143{
5144 IEMOP_MNEMONIC("xor al,Ib");
5145 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5146 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
5147}
5148
5149
5150/** Opcode 0x35. */
5151FNIEMOP_DEF(iemOp_xor_eAX_Iz)
5152{
5153 IEMOP_MNEMONIC("xor rAX,Iz");
5154 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5155 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
5156}
5157
5158
5159/** Opcode 0x36. */
5160FNIEMOP_DEF(iemOp_seg_SS)
5161{
5162 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
5163 pIemCpu->iEffSeg = X86_SREG_SS;
5164
5165 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5166 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5167}
5168
5169
5170/** Opcode 0x37. */
5171FNIEMOP_STUB(iemOp_aaa);
5172
5173
5174/** Opcode 0x38. */
5175FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
5176{
5177 IEMOP_MNEMONIC("cmp Eb,Gb");
5178 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
5179 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
5180}
5181
5182
5183/** Opcode 0x39. */
5184FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
5185{
5186 IEMOP_MNEMONIC("cmp Ev,Gv");
5187 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
5188 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
5189}
5190
5191
5192/** Opcode 0x3a. */
5193FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
5194{
5195 IEMOP_MNEMONIC("cmp Gb,Eb");
5196 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
5197}
5198
5199
5200/** Opcode 0x3b. */
5201FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
5202{
5203 IEMOP_MNEMONIC("cmp Gv,Ev");
5204 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
5205}
5206
5207
5208/** Opcode 0x3c. */
5209FNIEMOP_DEF(iemOp_cmp_Al_Ib)
5210{
5211 IEMOP_MNEMONIC("cmp al,Ib");
5212 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
5213}
5214
5215
5216/** Opcode 0x3d. */
5217FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
5218{
5219 IEMOP_MNEMONIC("cmp rAX,Iz");
5220 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
5221}
5222
5223
5224/** Opcode 0x3e. */
5225FNIEMOP_DEF(iemOp_seg_DS)
5226{
5227 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
5228 pIemCpu->iEffSeg = X86_SREG_DS;
5229
5230 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5231 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5232}
5233
5234
5235/** Opcode 0x3f. */
5236FNIEMOP_STUB(iemOp_aas);
5237
5238/**
5239 * Common 'inc/dec/not/neg register' helper.
5240 */
5241FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
5242{
5243 IEMOP_HLP_NO_LOCK_PREFIX();
5244 switch (pIemCpu->enmEffOpSize)
5245 {
5246 case IEMMODE_16BIT:
5247 IEM_MC_BEGIN(2, 0);
5248 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5249 IEM_MC_ARG(uint32_t *, pEFlags, 1);
5250 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
5251 IEM_MC_REF_EFLAGS(pEFlags);
5252 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
5253 IEM_MC_ADVANCE_RIP();
5254 IEM_MC_END();
5255 return VINF_SUCCESS;
5256
5257 case IEMMODE_32BIT:
5258 IEM_MC_BEGIN(2, 0);
5259 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5260 IEM_MC_ARG(uint32_t *, pEFlags, 1);
5261 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
5262 IEM_MC_REF_EFLAGS(pEFlags);
5263 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
5264 IEM_MC_ADVANCE_RIP();
5265 IEM_MC_END();
5266 return VINF_SUCCESS;
5267
5268 case IEMMODE_64BIT:
5269 IEM_MC_BEGIN(2, 0);
5270 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5271 IEM_MC_ARG(uint32_t *, pEFlags, 1);
5272 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
5273 IEM_MC_REF_EFLAGS(pEFlags);
5274 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
5275 IEM_MC_ADVANCE_RIP();
5276 IEM_MC_END();
5277 return VINF_SUCCESS;
5278 }
5279 return VINF_SUCCESS;
5280}
5281
5282
5283/** Opcode 0x40. */
5284FNIEMOP_DEF(iemOp_inc_eAX)
5285{
5286 /*
5287 * This is a REX prefix in 64-bit mode.
5288 */
5289 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5290 {
5291 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
5292
5293 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5294 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5295 }
5296
5297 IEMOP_MNEMONIC("inc eAX");
5298 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
5299}
5300
5301
5302/** Opcode 0x41. */
5303FNIEMOP_DEF(iemOp_inc_eCX)
5304{
5305 /*
5306 * This is a REX prefix in 64-bit mode.
5307 */
5308 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5309 {
5310 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
5311 pIemCpu->uRexB = 1 << 3;
5312
5313 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5314 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5315 }
5316
5317 IEMOP_MNEMONIC("inc eCX");
5318 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
5319}
5320
5321
5322/** Opcode 0x42. */
5323FNIEMOP_DEF(iemOp_inc_eDX)
5324{
5325 /*
5326 * This is a REX prefix in 64-bit mode.
5327 */
5328 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5329 {
5330 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
5331 pIemCpu->uRexIndex = 1 << 3;
5332
5333 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5334 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5335 }
5336
5337 IEMOP_MNEMONIC("inc eDX");
5338 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
5339}
5340
5341
5342
5343/** Opcode 0x43. */
5344FNIEMOP_DEF(iemOp_inc_eBX)
5345{
5346 /*
5347 * This is a REX prefix in 64-bit mode.
5348 */
5349 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5350 {
5351 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
5352 pIemCpu->uRexB = 1 << 3;
5353 pIemCpu->uRexIndex = 1 << 3;
5354
5355 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5356 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5357 }
5358
5359 IEMOP_MNEMONIC("inc eBX");
5360 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
5361}
5362
5363
5364/** Opcode 0x44. */
5365FNIEMOP_DEF(iemOp_inc_eSP)
5366{
5367 /*
5368 * This is a REX prefix in 64-bit mode.
5369 */
5370 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5371 {
5372 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
5373 pIemCpu->uRexReg = 1 << 3;
5374
5375 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5376 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5377 }
5378
5379 IEMOP_MNEMONIC("inc eSP");
5380 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
5381}
5382
5383
5384/** Opcode 0x45. */
5385FNIEMOP_DEF(iemOp_inc_eBP)
5386{
5387 /*
5388 * This is a REX prefix in 64-bit mode.
5389 */
5390 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5391 {
5392 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
5393 pIemCpu->uRexReg = 1 << 3;
5394 pIemCpu->uRexB = 1 << 3;
5395
5396 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5397 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5398 }
5399
5400 IEMOP_MNEMONIC("inc eBP");
5401 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
5402}
5403
5404
5405/** Opcode 0x46. */
5406FNIEMOP_DEF(iemOp_inc_eSI)
5407{
5408 /*
5409 * This is a REX prefix in 64-bit mode.
5410 */
5411 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5412 {
5413 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
5414 pIemCpu->uRexReg = 1 << 3;
5415 pIemCpu->uRexIndex = 1 << 3;
5416
5417 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5418 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5419 }
5420
5421 IEMOP_MNEMONIC("inc eSI");
5422 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
5423}
5424
5425
5426/** Opcode 0x47. */
5427FNIEMOP_DEF(iemOp_inc_eDI)
5428{
5429 /*
5430 * This is a REX prefix in 64-bit mode.
5431 */
5432 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5433 {
5434 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
5435 pIemCpu->uRexReg = 1 << 3;
5436 pIemCpu->uRexB = 1 << 3;
5437 pIemCpu->uRexIndex = 1 << 3;
5438
5439 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5440 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5441 }
5442
5443 IEMOP_MNEMONIC("inc eDI");
5444 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
5445}
5446
5447
5448/** Opcode 0x48. */
5449FNIEMOP_DEF(iemOp_dec_eAX)
5450{
5451 /*
5452 * This is a REX prefix in 64-bit mode.
5453 */
5454 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5455 {
5456 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
5457 iemRecalEffOpSize(pIemCpu);
5458
5459 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5460 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5461 }
5462
5463 IEMOP_MNEMONIC("dec eAX");
5464 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
5465}
5466
5467
5468/** Opcode 0x49. */
5469FNIEMOP_DEF(iemOp_dec_eCX)
5470{
5471 /*
5472 * This is a REX prefix in 64-bit mode.
5473 */
5474 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5475 {
5476 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
5477 pIemCpu->uRexB = 1 << 3;
5478 iemRecalEffOpSize(pIemCpu);
5479
5480 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5481 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5482 }
5483
5484 IEMOP_MNEMONIC("dec eCX");
5485 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
5486}
5487
5488
5489/** Opcode 0x4a. */
5490FNIEMOP_DEF(iemOp_dec_eDX)
5491{
5492 /*
5493 * This is a REX prefix in 64-bit mode.
5494 */
5495 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5496 {
5497 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
5498 pIemCpu->uRexIndex = 1 << 3;
5499 iemRecalEffOpSize(pIemCpu);
5500
5501 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5502 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5503 }
5504
5505 IEMOP_MNEMONIC("dec eDX");
5506 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
5507}
5508
5509
5510/** Opcode 0x4b. */
5511FNIEMOP_DEF(iemOp_dec_eBX)
5512{
5513 /*
5514 * This is a REX prefix in 64-bit mode.
5515 */
5516 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5517 {
5518 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
5519 pIemCpu->uRexB = 1 << 3;
5520 pIemCpu->uRexIndex = 1 << 3;
5521 iemRecalEffOpSize(pIemCpu);
5522
5523 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5524 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5525 }
5526
5527 IEMOP_MNEMONIC("dec eBX");
5528 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
5529}
5530
5531
5532/** Opcode 0x4c. */
5533FNIEMOP_DEF(iemOp_dec_eSP)
5534{
5535 /*
5536 * This is a REX prefix in 64-bit mode.
5537 */
5538 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5539 {
5540 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
5541 pIemCpu->uRexReg = 1 << 3;
5542 iemRecalEffOpSize(pIemCpu);
5543
5544 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5545 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5546 }
5547
5548 IEMOP_MNEMONIC("dec eSP");
5549 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
5550}
5551
5552
5553/** Opcode 0x4d. */
5554FNIEMOP_DEF(iemOp_dec_eBP)
5555{
5556 /*
5557 * This is a REX prefix in 64-bit mode.
5558 */
5559 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5560 {
5561 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
5562 pIemCpu->uRexReg = 1 << 3;
5563 pIemCpu->uRexB = 1 << 3;
5564 iemRecalEffOpSize(pIemCpu);
5565
5566 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5567 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5568 }
5569
5570 IEMOP_MNEMONIC("dec eBP");
5571 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
5572}
5573
5574
5575/** Opcode 0x4e. */
5576FNIEMOP_DEF(iemOp_dec_eSI)
5577{
5578 /*
5579 * This is a REX prefix in 64-bit mode.
5580 */
5581 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5582 {
5583 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
5584 pIemCpu->uRexReg = 1 << 3;
5585 pIemCpu->uRexIndex = 1 << 3;
5586 iemRecalEffOpSize(pIemCpu);
5587
5588 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5589 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5590 }
5591
5592 IEMOP_MNEMONIC("dec eSI");
5593 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
5594}
5595
5596
5597/** Opcode 0x4f. */
5598FNIEMOP_DEF(iemOp_dec_eDI)
5599{
5600 /*
5601 * This is a REX prefix in 64-bit mode.
5602 */
5603 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5604 {
5605 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
5606 pIemCpu->uRexReg = 1 << 3;
5607 pIemCpu->uRexB = 1 << 3;
5608 pIemCpu->uRexIndex = 1 << 3;
5609 iemRecalEffOpSize(pIemCpu);
5610
5611 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5612 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5613 }
5614
5615 IEMOP_MNEMONIC("dec eDI");
5616 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
5617}
5618
5619
5620/**
5621 * Common 'push register' helper.
5622 */
5623FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
5624{
5625 IEMOP_HLP_NO_LOCK_PREFIX();
5626 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5627 {
5628 iReg |= pIemCpu->uRexB;
5629 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
5630 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
5631 }
5632
5633 switch (pIemCpu->enmEffOpSize)
5634 {
5635 case IEMMODE_16BIT:
5636 IEM_MC_BEGIN(0, 1);
5637 IEM_MC_LOCAL(uint16_t, u16Value);
5638 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
5639 IEM_MC_PUSH_U16(u16Value);
5640 IEM_MC_ADVANCE_RIP();
5641 IEM_MC_END();
5642 break;
5643
5644 case IEMMODE_32BIT:
5645 IEM_MC_BEGIN(0, 1);
5646 IEM_MC_LOCAL(uint32_t, u32Value);
5647 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
5648 IEM_MC_PUSH_U32(u32Value);
5649 IEM_MC_ADVANCE_RIP();
5650 IEM_MC_END();
5651 break;
5652
5653 case IEMMODE_64BIT:
5654 IEM_MC_BEGIN(0, 1);
5655 IEM_MC_LOCAL(uint64_t, u64Value);
5656 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
5657 IEM_MC_PUSH_U64(u64Value);
5658 IEM_MC_ADVANCE_RIP();
5659 IEM_MC_END();
5660 break;
5661 }
5662
5663 return VINF_SUCCESS;
5664}
5665
5666
5667/** Opcode 0x50. */
5668FNIEMOP_DEF(iemOp_push_eAX)
5669{
5670 IEMOP_MNEMONIC("push rAX");
5671 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
5672}
5673
5674
5675/** Opcode 0x51. */
5676FNIEMOP_DEF(iemOp_push_eCX)
5677{
5678 IEMOP_MNEMONIC("push rCX");
5679 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
5680}
5681
5682
5683/** Opcode 0x52. */
5684FNIEMOP_DEF(iemOp_push_eDX)
5685{
5686 IEMOP_MNEMONIC("push rDX");
5687 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
5688}
5689
5690
5691/** Opcode 0x53. */
5692FNIEMOP_DEF(iemOp_push_eBX)
5693{
5694 IEMOP_MNEMONIC("push rBX");
5695 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
5696}
5697
5698
5699/** Opcode 0x54. */
5700FNIEMOP_DEF(iemOp_push_eSP)
5701{
5702 IEMOP_MNEMONIC("push rSP");
5703 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
5704}
5705
5706
5707/** Opcode 0x55. */
5708FNIEMOP_DEF(iemOp_push_eBP)
5709{
5710 IEMOP_MNEMONIC("push rBP");
5711 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
5712}
5713
5714
5715/** Opcode 0x56. */
5716FNIEMOP_DEF(iemOp_push_eSI)
5717{
5718 IEMOP_MNEMONIC("push rSI");
5719 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
5720}
5721
5722
5723/** Opcode 0x57. */
5724FNIEMOP_DEF(iemOp_push_eDI)
5725{
5726 IEMOP_MNEMONIC("push rDI");
5727 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
5728}
5729
5730
5731/**
5732 * Common 'pop register' helper.
5733 */
5734FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
5735{
5736 IEMOP_HLP_NO_LOCK_PREFIX();
5737 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
5738 {
5739 iReg |= pIemCpu->uRexB;
5740 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
5741 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
5742 }
5743
5744/** @todo How does this code handle iReg==X86_GREG_xSP. How does a real CPU
5745 * handle it, for that matter (Intel pseudo code hints that the popped
5746 * value is incremented by the stack item size.) Test it, both encodings
5747 * and all three register sizes. */
5748 switch (pIemCpu->enmEffOpSize)
5749 {
5750 case IEMMODE_16BIT:
5751 IEM_MC_BEGIN(0, 1);
5752 IEM_MC_LOCAL(uint16_t, *pu16Dst);
5753 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
5754 IEM_MC_POP_U16(pu16Dst);
5755 IEM_MC_ADVANCE_RIP();
5756 IEM_MC_END();
5757 break;
5758
5759 case IEMMODE_32BIT:
5760 IEM_MC_BEGIN(0, 1);
5761 IEM_MC_LOCAL(uint32_t, *pu32Dst);
5762 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
5763 IEM_MC_POP_U32(pu32Dst);
5764 IEM_MC_ADVANCE_RIP();
5765 IEM_MC_END();
5766 break;
5767
5768 case IEMMODE_64BIT:
5769 IEM_MC_BEGIN(0, 1);
5770 IEM_MC_LOCAL(uint64_t, *pu64Dst);
5771 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
5772 IEM_MC_POP_U64(pu64Dst);
5773 IEM_MC_ADVANCE_RIP();
5774 IEM_MC_END();
5775 break;
5776 }
5777
5778 return VINF_SUCCESS;
5779}
5780
5781
5782/** Opcode 0x58. */
5783FNIEMOP_DEF(iemOp_pop_eAX)
5784{
5785 IEMOP_MNEMONIC("pop rAX");
5786 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
5787}
5788
5789
5790/** Opcode 0x59. */
5791FNIEMOP_DEF(iemOp_pop_eCX)
5792{
5793 IEMOP_MNEMONIC("pop rCX");
5794 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
5795}
5796
5797
5798/** Opcode 0x5a. */
5799FNIEMOP_DEF(iemOp_pop_eDX)
5800{
5801 IEMOP_MNEMONIC("pop rDX");
5802 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
5803}
5804
5805
5806/** Opcode 0x5b. */
5807FNIEMOP_DEF(iemOp_pop_eBX)
5808{
5809 IEMOP_MNEMONIC("pop rBX");
5810 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
5811}
5812
5813
5814/** Opcode 0x5c. */
5815FNIEMOP_DEF(iemOp_pop_eSP)
5816{
5817 IEMOP_MNEMONIC("pop rSP");
5818 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
5819}
5820
5821
5822/** Opcode 0x5d. */
5823FNIEMOP_DEF(iemOp_pop_eBP)
5824{
5825 IEMOP_MNEMONIC("pop rBP");
5826 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
5827}
5828
5829
5830/** Opcode 0x5e. */
5831FNIEMOP_DEF(iemOp_pop_eSI)
5832{
5833 IEMOP_MNEMONIC("pop rSI");
5834 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
5835}
5836
5837
5838/** Opcode 0x5f. */
5839FNIEMOP_DEF(iemOp_pop_eDI)
5840{
5841 IEMOP_MNEMONIC("pop rDI");
5842 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
5843}
5844
5845
5846/** Opcode 0x60. */
5847FNIEMOP_DEF(iemOp_pusha)
5848{
5849 IEMOP_MNEMONIC("pusha");
5850 IEMOP_HLP_NO_64BIT();
5851 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
5852 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
5853 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
5854 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
5855}
5856
5857
5858/** Opcode 0x61. */
5859FNIEMOP_DEF(iemOp_popa)
5860{
5861 IEMOP_MNEMONIC("popa");
5862 IEMOP_HLP_NO_64BIT();
5863 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
5864 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
5865 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
5866 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
5867}
5868
5869
5870/** Opcode 0x62. */
5871FNIEMOP_STUB(iemOp_bound_Gv_Ma);
5872/** Opcode 0x63. */
5873FNIEMOP_STUB(iemOp_arpl_Ew_Gw);
5874
5875
5876/** Opcode 0x64. */
5877FNIEMOP_DEF(iemOp_seg_FS)
5878{
5879 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
5880 pIemCpu->iEffSeg = X86_SREG_FS;
5881
5882 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5883 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5884}
5885
5886
5887/** Opcode 0x65. */
5888FNIEMOP_DEF(iemOp_seg_GS)
5889{
5890 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
5891 pIemCpu->iEffSeg = X86_SREG_GS;
5892
5893 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5894 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5895}
5896
5897
5898/** Opcode 0x66. */
5899FNIEMOP_DEF(iemOp_op_size)
5900{
5901 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
5902 iemRecalEffOpSize(pIemCpu);
5903
5904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5906}
5907
5908
5909/** Opcode 0x67. */
5910FNIEMOP_DEF(iemOp_addr_size)
5911{
5912 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
5913 switch (pIemCpu->enmDefAddrMode)
5914 {
5915 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
5916 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
5917 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
5918 default: AssertFailed();
5919 }
5920
5921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5923}
5924
5925
5926/** Opcode 0x68. */
5927FNIEMOP_DEF(iemOp_push_Iz)
5928{
5929 IEMOP_MNEMONIC("push Iz");
5930 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5931 switch (pIemCpu->enmEffOpSize)
5932 {
5933 case IEMMODE_16BIT:
5934 {
5935 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5936 IEMOP_HLP_NO_LOCK_PREFIX();
5937 IEM_MC_BEGIN(0,0);
5938 IEM_MC_PUSH_U16(u16Imm);
5939 IEM_MC_ADVANCE_RIP();
5940 IEM_MC_END();
5941 return VINF_SUCCESS;
5942 }
5943
5944 case IEMMODE_32BIT:
5945 {
5946 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5947 IEMOP_HLP_NO_LOCK_PREFIX();
5948 IEM_MC_BEGIN(0,0);
5949 IEM_MC_PUSH_U32(u32Imm);
5950 IEM_MC_ADVANCE_RIP();
5951 IEM_MC_END();
5952 return VINF_SUCCESS;
5953 }
5954
5955 case IEMMODE_64BIT:
5956 {
5957 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5958 IEMOP_HLP_NO_LOCK_PREFIX();
5959 IEM_MC_BEGIN(0,0);
5960 IEM_MC_PUSH_U64(u64Imm);
5961 IEM_MC_ADVANCE_RIP();
5962 IEM_MC_END();
5963 return VINF_SUCCESS;
5964 }
5965
5966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5967 }
5968}
5969
5970
5971/** Opcode 0x69. */
5972FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
5973{
5974 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
5975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5976 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5977
5978 switch (pIemCpu->enmEffOpSize)
5979 {
5980 case IEMMODE_16BIT:
5981 {
5982 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5983 IEMOP_HLP_NO_LOCK_PREFIX();
5984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5985 {
5986 /* register operand */
5987 IEM_MC_BEGIN(3, 1);
5988 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5989 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
5990 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5991 IEM_MC_LOCAL(uint16_t, u16Tmp);
5992
5993 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5994 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
5995 IEM_MC_REF_EFLAGS(pEFlags);
5996 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
5997 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
5998
5999 IEM_MC_ADVANCE_RIP();
6000 IEM_MC_END();
6001 }
6002 else
6003 {
6004 /* memory operand */
6005 IEM_MC_BEGIN(3, 2);
6006 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6007 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
6008 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6009 IEM_MC_LOCAL(uint16_t, u16Tmp);
6010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6011
6012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6013 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6014 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6015 IEM_MC_REF_EFLAGS(pEFlags);
6016 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6017 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6018
6019 IEM_MC_ADVANCE_RIP();
6020 IEM_MC_END();
6021 }
6022 return VINF_SUCCESS;
6023 }
6024
6025 case IEMMODE_32BIT:
6026 {
6027 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6028 IEMOP_HLP_NO_LOCK_PREFIX();
6029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6030 {
6031 /* register operand */
6032 IEM_MC_BEGIN(3, 1);
6033 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6034 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
6035 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6036 IEM_MC_LOCAL(uint32_t, u32Tmp);
6037
6038 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6039 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
6040 IEM_MC_REF_EFLAGS(pEFlags);
6041 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
6042 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
6043
6044 IEM_MC_ADVANCE_RIP();
6045 IEM_MC_END();
6046 }
6047 else
6048 {
6049 /* memory operand */
6050 IEM_MC_BEGIN(3, 2);
6051 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6052 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
6053 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6054 IEM_MC_LOCAL(uint32_t, u32Tmp);
6055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6056
6057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6058 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6059 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
6060 IEM_MC_REF_EFLAGS(pEFlags);
6061 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
6062 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
6063
6064 IEM_MC_ADVANCE_RIP();
6065 IEM_MC_END();
6066 }
6067 return VINF_SUCCESS;
6068 }
6069
6070 case IEMMODE_64BIT:
6071 {
6072 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6073 IEMOP_HLP_NO_LOCK_PREFIX();
6074 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6075 {
6076 /* register operand */
6077 IEM_MC_BEGIN(3, 1);
6078 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6079 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
6080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6081 IEM_MC_LOCAL(uint64_t, u64Tmp);
6082
6083 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6084 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
6085 IEM_MC_REF_EFLAGS(pEFlags);
6086 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
6087 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
6088
6089 IEM_MC_ADVANCE_RIP();
6090 IEM_MC_END();
6091 }
6092 else
6093 {
6094 /* memory operand */
6095 IEM_MC_BEGIN(3, 2);
6096 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6097 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
6098 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6099 IEM_MC_LOCAL(uint64_t, u64Tmp);
6100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6101
6102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6103 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6104 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
6105 IEM_MC_REF_EFLAGS(pEFlags);
6106 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
6107 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
6108
6109 IEM_MC_ADVANCE_RIP();
6110 IEM_MC_END();
6111 }
6112 return VINF_SUCCESS;
6113 }
6114 }
6115 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
6116}
6117
6118
6119/** Opcode 0x6a. */
6120FNIEMOP_DEF(iemOp_push_Ib)
6121{
6122 IEMOP_MNEMONIC("push Ib");
6123 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6124 IEMOP_HLP_NO_LOCK_PREFIX();
6125 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6126
6127 IEM_MC_BEGIN(0,0);
6128 switch (pIemCpu->enmEffOpSize)
6129 {
6130 case IEMMODE_16BIT:
6131 IEM_MC_PUSH_U16(i8Imm);
6132 break;
6133 case IEMMODE_32BIT:
6134 IEM_MC_PUSH_U32(i8Imm);
6135 break;
6136 case IEMMODE_64BIT:
6137 IEM_MC_PUSH_U64(i8Imm);
6138 break;
6139 }
6140 IEM_MC_ADVANCE_RIP();
6141 IEM_MC_END();
6142 return VINF_SUCCESS;
6143}
6144
6145
6146/** Opcode 0x6b. */
6147FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
6148{
6149 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
6150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6152 IEMOP_HLP_NO_LOCK_PREFIX();
6153 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6154
6155 switch (pIemCpu->enmEffOpSize)
6156 {
6157 case IEMMODE_16BIT:
6158 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6159 {
6160 /* register operand */
6161 IEM_MC_BEGIN(3, 1);
6162 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6163 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
6164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6165 IEM_MC_LOCAL(uint16_t, u16Tmp);
6166
6167 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6168 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6169 IEM_MC_REF_EFLAGS(pEFlags);
6170 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6171 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6172
6173 IEM_MC_ADVANCE_RIP();
6174 IEM_MC_END();
6175 }
6176 else
6177 {
6178 /* memory operand */
6179 IEM_MC_BEGIN(3, 2);
6180 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6181 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
6182 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6183 IEM_MC_LOCAL(uint16_t, u16Tmp);
6184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6185
6186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6187 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6188 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6189 IEM_MC_REF_EFLAGS(pEFlags);
6190 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6191 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6192
6193 IEM_MC_ADVANCE_RIP();
6194 IEM_MC_END();
6195 }
6196 return VINF_SUCCESS;
6197
6198 case IEMMODE_32BIT:
6199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6200 {
6201 /* register operand */
6202 IEM_MC_BEGIN(3, 1);
6203 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6204 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
6205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6206 IEM_MC_LOCAL(uint32_t, u32Tmp);
6207
6208 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6209 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
6210 IEM_MC_REF_EFLAGS(pEFlags);
6211 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
6212 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
6213
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 }
6217 else
6218 {
6219 /* memory operand */
6220 IEM_MC_BEGIN(3, 2);
6221 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6222 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
6223 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6224 IEM_MC_LOCAL(uint32_t, u32Tmp);
6225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6226
6227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6228 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6229 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
6230 IEM_MC_REF_EFLAGS(pEFlags);
6231 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
6232 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
6233
6234 IEM_MC_ADVANCE_RIP();
6235 IEM_MC_END();
6236 }
6237 return VINF_SUCCESS;
6238
6239 case IEMMODE_64BIT:
6240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6241 {
6242 /* register operand */
6243 IEM_MC_BEGIN(3, 1);
6244 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6245 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
6246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6247 IEM_MC_LOCAL(uint64_t, u64Tmp);
6248
6249 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6250 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
6251 IEM_MC_REF_EFLAGS(pEFlags);
6252 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
6253 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
6254
6255 IEM_MC_ADVANCE_RIP();
6256 IEM_MC_END();
6257 }
6258 else
6259 {
6260 /* memory operand */
6261 IEM_MC_BEGIN(3, 2);
6262 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6263 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
6264 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6265 IEM_MC_LOCAL(uint64_t, u64Tmp);
6266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6267
6268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6269 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6270 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
6271 IEM_MC_REF_EFLAGS(pEFlags);
6272 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
6273 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
6274
6275 IEM_MC_ADVANCE_RIP();
6276 IEM_MC_END();
6277 }
6278 return VINF_SUCCESS;
6279 }
6280 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
6281}
6282
6283
6284/** Opcode 0x6c. */
6285FNIEMOP_DEF(iemOp_insb_Yb_DX)
6286{
6287 IEMOP_HLP_NO_LOCK_PREFIX();
6288 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6289 {
6290 IEMOP_MNEMONIC("rep ins Yb,DX");
6291 switch (pIemCpu->enmEffAddrMode)
6292 {
6293 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr16);
6294 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr32);
6295 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr64);
6296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6297 }
6298 }
6299 else
6300 {
6301 IEMOP_MNEMONIC("ins Yb,DX");
6302 switch (pIemCpu->enmEffAddrMode)
6303 {
6304 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr16);
6305 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr32);
6306 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr64);
6307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6308 }
6309 }
6310}
6311
6312
6313/** Opcode 0x6d. */
6314FNIEMOP_DEF(iemOp_inswd_Yv_DX)
6315{
6316 IEMOP_HLP_NO_LOCK_PREFIX();
6317 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
6318 {
6319 IEMOP_MNEMONIC("rep ins Yv,DX");
6320 switch (pIemCpu->enmEffOpSize)
6321 {
6322 case IEMMODE_16BIT:
6323 switch (pIemCpu->enmEffAddrMode)
6324 {
6325 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr16);
6326 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr32);
6327 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr64);
6328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6329 }
6330 break;
6331 case IEMMODE_64BIT:
6332 case IEMMODE_32BIT:
6333 switch (pIemCpu->enmEffAddrMode)
6334 {
6335 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr16);
6336 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr32);
6337 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr64);
6338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6339 }
6340 break;
6341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6342 }
6343 }
6344 else
6345 {
6346 IEMOP_MNEMONIC("ins Yv,DX");
6347 switch (pIemCpu->enmEffOpSize)
6348 {
6349 case IEMMODE_16BIT:
6350 switch (pIemCpu->enmEffAddrMode)
6351 {
6352 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr16);
6353 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr32);
6354 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr64);
6355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6356 }
6357 break;
6358 case IEMMODE_64BIT:
6359 case IEMMODE_32BIT:
6360 switch (pIemCpu->enmEffAddrMode)
6361 {
6362 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr16);
6363 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr32);
6364 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr64);
6365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6366 }
6367 break;
6368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6369 }
6370 }
6371}
6372
6373
6374/** Opcode 0x6e. */
6375FNIEMOP_DEF(iemOp_outsb_Yb_DX)
6376{
6377 IEMOP_HLP_NO_LOCK_PREFIX();
6378 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6379 {
6380 IEMOP_MNEMONIC("rep out DX,Yb");
6381 switch (pIemCpu->enmEffAddrMode)
6382 {
6383 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg);
6384 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg);
6385 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg);
6386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6387 }
6388 }
6389 else
6390 {
6391 IEMOP_MNEMONIC("out DX,Yb");
6392 switch (pIemCpu->enmEffAddrMode)
6393 {
6394 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg);
6395 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg);
6396 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg);
6397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6398 }
6399 }
6400}
6401
6402
6403/** Opcode 0x6f. */
6404FNIEMOP_DEF(iemOp_outswd_Yv_DX)
6405{
6406 IEMOP_HLP_NO_LOCK_PREFIX();
6407 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
6408 {
6409 IEMOP_MNEMONIC("rep outs DX,Yv");
6410 switch (pIemCpu->enmEffOpSize)
6411 {
6412 case IEMMODE_16BIT:
6413 switch (pIemCpu->enmEffAddrMode)
6414 {
6415 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg);
6416 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg);
6417 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg);
6418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6419 }
6420 break;
6421 case IEMMODE_64BIT:
6422 case IEMMODE_32BIT:
6423 switch (pIemCpu->enmEffAddrMode)
6424 {
6425 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg);
6426 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg);
6427 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg);
6428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6429 }
6430 break;
6431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6432 }
6433 }
6434 else
6435 {
6436 IEMOP_MNEMONIC("outs DX,Yv");
6437 switch (pIemCpu->enmEffOpSize)
6438 {
6439 case IEMMODE_16BIT:
6440 switch (pIemCpu->enmEffAddrMode)
6441 {
6442 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg);
6443 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg);
6444 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg);
6445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6446 }
6447 break;
6448 case IEMMODE_64BIT:
6449 case IEMMODE_32BIT:
6450 switch (pIemCpu->enmEffAddrMode)
6451 {
6452 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg);
6453 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg);
6454 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg);
6455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6456 }
6457 break;
6458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6459 }
6460 }
6461}
6462
6463
6464/** Opcode 0x70. */
6465FNIEMOP_DEF(iemOp_jo_Jb)
6466{
6467 IEMOP_MNEMONIC("jo Jb");
6468 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6469 IEMOP_HLP_NO_LOCK_PREFIX();
6470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6471
6472 IEM_MC_BEGIN(0, 0);
6473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6474 IEM_MC_REL_JMP_S8(i8Imm);
6475 } IEM_MC_ELSE() {
6476 IEM_MC_ADVANCE_RIP();
6477 } IEM_MC_ENDIF();
6478 IEM_MC_END();
6479 return VINF_SUCCESS;
6480}
6481
6482
6483/** Opcode 0x71. */
6484FNIEMOP_DEF(iemOp_jno_Jb)
6485{
6486 IEMOP_MNEMONIC("jno Jb");
6487 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6488 IEMOP_HLP_NO_LOCK_PREFIX();
6489 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6490
6491 IEM_MC_BEGIN(0, 0);
6492 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6493 IEM_MC_ADVANCE_RIP();
6494 } IEM_MC_ELSE() {
6495 IEM_MC_REL_JMP_S8(i8Imm);
6496 } IEM_MC_ENDIF();
6497 IEM_MC_END();
6498 return VINF_SUCCESS;
6499}
6500
6501/** Opcode 0x72. */
6502FNIEMOP_DEF(iemOp_jc_Jb)
6503{
6504 IEMOP_MNEMONIC("jc/jnae Jb");
6505 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6506 IEMOP_HLP_NO_LOCK_PREFIX();
6507 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6508
6509 IEM_MC_BEGIN(0, 0);
6510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6511 IEM_MC_REL_JMP_S8(i8Imm);
6512 } IEM_MC_ELSE() {
6513 IEM_MC_ADVANCE_RIP();
6514 } IEM_MC_ENDIF();
6515 IEM_MC_END();
6516 return VINF_SUCCESS;
6517}
6518
6519
6520/** Opcode 0x73. */
6521FNIEMOP_DEF(iemOp_jnc_Jb)
6522{
6523 IEMOP_MNEMONIC("jnc/jnb Jb");
6524 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6525 IEMOP_HLP_NO_LOCK_PREFIX();
6526 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6527
6528 IEM_MC_BEGIN(0, 0);
6529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6530 IEM_MC_ADVANCE_RIP();
6531 } IEM_MC_ELSE() {
6532 IEM_MC_REL_JMP_S8(i8Imm);
6533 } IEM_MC_ENDIF();
6534 IEM_MC_END();
6535 return VINF_SUCCESS;
6536}
6537
6538
6539/** Opcode 0x74. */
6540FNIEMOP_DEF(iemOp_je_Jb)
6541{
6542 IEMOP_MNEMONIC("je/jz Jb");
6543 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6544 IEMOP_HLP_NO_LOCK_PREFIX();
6545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6546
6547 IEM_MC_BEGIN(0, 0);
6548 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6549 IEM_MC_REL_JMP_S8(i8Imm);
6550 } IEM_MC_ELSE() {
6551 IEM_MC_ADVANCE_RIP();
6552 } IEM_MC_ENDIF();
6553 IEM_MC_END();
6554 return VINF_SUCCESS;
6555}
6556
6557
6558/** Opcode 0x75. */
6559FNIEMOP_DEF(iemOp_jne_Jb)
6560{
6561 IEMOP_MNEMONIC("jne/jnz Jb");
6562 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6563 IEMOP_HLP_NO_LOCK_PREFIX();
6564 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6565
6566 IEM_MC_BEGIN(0, 0);
6567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6568 IEM_MC_ADVANCE_RIP();
6569 } IEM_MC_ELSE() {
6570 IEM_MC_REL_JMP_S8(i8Imm);
6571 } IEM_MC_ENDIF();
6572 IEM_MC_END();
6573 return VINF_SUCCESS;
6574}
6575
6576
6577/** Opcode 0x76. */
6578FNIEMOP_DEF(iemOp_jbe_Jb)
6579{
6580 IEMOP_MNEMONIC("jbe/jna Jb");
6581 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6582 IEMOP_HLP_NO_LOCK_PREFIX();
6583 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6584
6585 IEM_MC_BEGIN(0, 0);
6586 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6587 IEM_MC_REL_JMP_S8(i8Imm);
6588 } IEM_MC_ELSE() {
6589 IEM_MC_ADVANCE_RIP();
6590 } IEM_MC_ENDIF();
6591 IEM_MC_END();
6592 return VINF_SUCCESS;
6593}
6594
6595
6596/** Opcode 0x77. */
6597FNIEMOP_DEF(iemOp_jnbe_Jb)
6598{
6599 IEMOP_MNEMONIC("jnbe/ja Jb");
6600 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6601 IEMOP_HLP_NO_LOCK_PREFIX();
6602 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6603
6604 IEM_MC_BEGIN(0, 0);
6605 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6606 IEM_MC_ADVANCE_RIP();
6607 } IEM_MC_ELSE() {
6608 IEM_MC_REL_JMP_S8(i8Imm);
6609 } IEM_MC_ENDIF();
6610 IEM_MC_END();
6611 return VINF_SUCCESS;
6612}
6613
6614
6615/** Opcode 0x78. */
6616FNIEMOP_DEF(iemOp_js_Jb)
6617{
6618 IEMOP_MNEMONIC("js Jb");
6619 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6620 IEMOP_HLP_NO_LOCK_PREFIX();
6621 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6622
6623 IEM_MC_BEGIN(0, 0);
6624 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6625 IEM_MC_REL_JMP_S8(i8Imm);
6626 } IEM_MC_ELSE() {
6627 IEM_MC_ADVANCE_RIP();
6628 } IEM_MC_ENDIF();
6629 IEM_MC_END();
6630 return VINF_SUCCESS;
6631}
6632
6633
6634/** Opcode 0x79. */
6635FNIEMOP_DEF(iemOp_jns_Jb)
6636{
6637 IEMOP_MNEMONIC("jns Jb");
6638 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6639 IEMOP_HLP_NO_LOCK_PREFIX();
6640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6641
6642 IEM_MC_BEGIN(0, 0);
6643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6644 IEM_MC_ADVANCE_RIP();
6645 } IEM_MC_ELSE() {
6646 IEM_MC_REL_JMP_S8(i8Imm);
6647 } IEM_MC_ENDIF();
6648 IEM_MC_END();
6649 return VINF_SUCCESS;
6650}
6651
6652
6653/** Opcode 0x7a. */
6654FNIEMOP_DEF(iemOp_jp_Jb)
6655{
6656 IEMOP_MNEMONIC("jp Jb");
6657 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6658 IEMOP_HLP_NO_LOCK_PREFIX();
6659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6660
6661 IEM_MC_BEGIN(0, 0);
6662 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6663 IEM_MC_REL_JMP_S8(i8Imm);
6664 } IEM_MC_ELSE() {
6665 IEM_MC_ADVANCE_RIP();
6666 } IEM_MC_ENDIF();
6667 IEM_MC_END();
6668 return VINF_SUCCESS;
6669}
6670
6671
6672/** Opcode 0x7b. */
6673FNIEMOP_DEF(iemOp_jnp_Jb)
6674{
6675 IEMOP_MNEMONIC("jnp Jb");
6676 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6677 IEMOP_HLP_NO_LOCK_PREFIX();
6678 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6679
6680 IEM_MC_BEGIN(0, 0);
6681 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6682 IEM_MC_ADVANCE_RIP();
6683 } IEM_MC_ELSE() {
6684 IEM_MC_REL_JMP_S8(i8Imm);
6685 } IEM_MC_ENDIF();
6686 IEM_MC_END();
6687 return VINF_SUCCESS;
6688}
6689
6690
6691/** Opcode 0x7c. */
6692FNIEMOP_DEF(iemOp_jl_Jb)
6693{
6694 IEMOP_MNEMONIC("jl/jnge Jb");
6695 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6696 IEMOP_HLP_NO_LOCK_PREFIX();
6697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6698
6699 IEM_MC_BEGIN(0, 0);
6700 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6701 IEM_MC_REL_JMP_S8(i8Imm);
6702 } IEM_MC_ELSE() {
6703 IEM_MC_ADVANCE_RIP();
6704 } IEM_MC_ENDIF();
6705 IEM_MC_END();
6706 return VINF_SUCCESS;
6707}
6708
6709
6710/** Opcode 0x7d. */
6711FNIEMOP_DEF(iemOp_jnl_Jb)
6712{
6713 IEMOP_MNEMONIC("jnl/jge Jb");
6714 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6715 IEMOP_HLP_NO_LOCK_PREFIX();
6716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6717
6718 IEM_MC_BEGIN(0, 0);
6719 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6720 IEM_MC_ADVANCE_RIP();
6721 } IEM_MC_ELSE() {
6722 IEM_MC_REL_JMP_S8(i8Imm);
6723 } IEM_MC_ENDIF();
6724 IEM_MC_END();
6725 return VINF_SUCCESS;
6726}
6727
6728
6729/** Opcode 0x7e. */
6730FNIEMOP_DEF(iemOp_jle_Jb)
6731{
6732 IEMOP_MNEMONIC("jle/jng Jb");
6733 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6734 IEMOP_HLP_NO_LOCK_PREFIX();
6735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6736
6737 IEM_MC_BEGIN(0, 0);
6738 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6739 IEM_MC_REL_JMP_S8(i8Imm);
6740 } IEM_MC_ELSE() {
6741 IEM_MC_ADVANCE_RIP();
6742 } IEM_MC_ENDIF();
6743 IEM_MC_END();
6744 return VINF_SUCCESS;
6745}
6746
6747
6748/** Opcode 0x7f. */
6749FNIEMOP_DEF(iemOp_jnle_Jb)
6750{
6751 IEMOP_MNEMONIC("jnle/jg Jb");
6752 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
6753 IEMOP_HLP_NO_LOCK_PREFIX();
6754 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6755
6756 IEM_MC_BEGIN(0, 0);
6757 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6758 IEM_MC_ADVANCE_RIP();
6759 } IEM_MC_ELSE() {
6760 IEM_MC_REL_JMP_S8(i8Imm);
6761 } IEM_MC_ENDIF();
6762 IEM_MC_END();
6763 return VINF_SUCCESS;
6764}
6765
6766
6767/** Opcode 0x80. */
6768FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
6769{
6770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6771 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
6772 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
6773
6774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6775 {
6776 /* register target */
6777 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6778 IEMOP_HLP_NO_LOCK_PREFIX();
6779 IEM_MC_BEGIN(3, 0);
6780 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6781 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
6782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6783
6784 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6785 IEM_MC_REF_EFLAGS(pEFlags);
6786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
6787
6788 IEM_MC_ADVANCE_RIP();
6789 IEM_MC_END();
6790 }
6791 else
6792 {
6793 /* memory target */
6794 uint32_t fAccess;
6795 if (pImpl->pfnLockedU8)
6796 fAccess = IEM_ACCESS_DATA_RW;
6797 else
6798 { /* CMP */
6799 IEMOP_HLP_NO_LOCK_PREFIX();
6800 fAccess = IEM_ACCESS_DATA_R;
6801 }
6802 IEM_MC_BEGIN(3, 2);
6803 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6806
6807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6808 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6809 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
6810
6811 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6812 IEM_MC_FETCH_EFLAGS(EFlags);
6813 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
6815 else
6816 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
6817
6818 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
6819 IEM_MC_COMMIT_EFLAGS(EFlags);
6820 IEM_MC_ADVANCE_RIP();
6821 IEM_MC_END();
6822 }
6823 return VINF_SUCCESS;
6824}
6825
6826
6827/** Opcode 0x81. */
6828FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
6829{
6830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6831 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
6832 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
6833
6834 switch (pIemCpu->enmEffOpSize)
6835 {
6836 case IEMMODE_16BIT:
6837 {
6838 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6839 {
6840 /* register target */
6841 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6842 IEMOP_HLP_NO_LOCK_PREFIX();
6843 IEM_MC_BEGIN(3, 0);
6844 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6845 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
6846 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6847
6848 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6849 IEM_MC_REF_EFLAGS(pEFlags);
6850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6851
6852 IEM_MC_ADVANCE_RIP();
6853 IEM_MC_END();
6854 }
6855 else
6856 {
6857 /* memory target */
6858 uint32_t fAccess;
6859 if (pImpl->pfnLockedU16)
6860 fAccess = IEM_ACCESS_DATA_RW;
6861 else
6862 { /* CMP, TEST */
6863 IEMOP_HLP_NO_LOCK_PREFIX();
6864 fAccess = IEM_ACCESS_DATA_R;
6865 }
6866 IEM_MC_BEGIN(3, 2);
6867 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6868 IEM_MC_ARG(uint16_t, u16Src, 1);
6869 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6871
6872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6873 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6874 IEM_MC_ASSIGN(u16Src, u16Imm);
6875 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6876 IEM_MC_FETCH_EFLAGS(EFlags);
6877 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6878 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6879 else
6880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6881
6882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6883 IEM_MC_COMMIT_EFLAGS(EFlags);
6884 IEM_MC_ADVANCE_RIP();
6885 IEM_MC_END();
6886 }
6887 break;
6888 }
6889
6890 case IEMMODE_32BIT:
6891 {
6892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6893 {
6894 /* register target */
6895 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6896 IEMOP_HLP_NO_LOCK_PREFIX();
6897 IEM_MC_BEGIN(3, 0);
6898 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6899 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
6900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6901
6902 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6903 IEM_MC_REF_EFLAGS(pEFlags);
6904 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6905
6906 IEM_MC_ADVANCE_RIP();
6907 IEM_MC_END();
6908 }
6909 else
6910 {
6911 /* memory target */
6912 uint32_t fAccess;
6913 if (pImpl->pfnLockedU32)
6914 fAccess = IEM_ACCESS_DATA_RW;
6915 else
6916 { /* CMP, TEST */
6917 IEMOP_HLP_NO_LOCK_PREFIX();
6918 fAccess = IEM_ACCESS_DATA_R;
6919 }
6920 IEM_MC_BEGIN(3, 2);
6921 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6922 IEM_MC_ARG(uint32_t, u32Src, 1);
6923 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6925
6926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6927 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6928 IEM_MC_ASSIGN(u32Src, u32Imm);
6929 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6930 IEM_MC_FETCH_EFLAGS(EFlags);
6931 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6933 else
6934 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6935
6936 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6937 IEM_MC_COMMIT_EFLAGS(EFlags);
6938 IEM_MC_ADVANCE_RIP();
6939 IEM_MC_END();
6940 }
6941 break;
6942 }
6943
6944 case IEMMODE_64BIT:
6945 {
6946 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6947 {
6948 /* register target */
6949 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6950 IEMOP_HLP_NO_LOCK_PREFIX();
6951 IEM_MC_BEGIN(3, 0);
6952 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6953 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
6954 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6955
6956 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6957 IEM_MC_REF_EFLAGS(pEFlags);
6958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6959
6960 IEM_MC_ADVANCE_RIP();
6961 IEM_MC_END();
6962 }
6963 else
6964 {
6965 /* memory target */
6966 uint32_t fAccess;
6967 if (pImpl->pfnLockedU64)
6968 fAccess = IEM_ACCESS_DATA_RW;
6969 else
6970 { /* CMP */
6971 IEMOP_HLP_NO_LOCK_PREFIX();
6972 fAccess = IEM_ACCESS_DATA_R;
6973 }
6974 IEM_MC_BEGIN(3, 2);
6975 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6976 IEM_MC_ARG(uint64_t, u64Src, 1);
6977 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6979
6980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6981 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6982 IEM_MC_ASSIGN(u64Src, u64Imm);
6983 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6984 IEM_MC_FETCH_EFLAGS(EFlags);
6985 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6986 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6987 else
6988 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6989
6990 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6991 IEM_MC_COMMIT_EFLAGS(EFlags);
6992 IEM_MC_ADVANCE_RIP();
6993 IEM_MC_END();
6994 }
6995 break;
6996 }
6997 }
6998 return VINF_SUCCESS;
6999}
7000
7001
7002/** Opcode 0x82. */
7003 FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
7004{
7005 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
7006 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
7007}
7008
7009
7010/** Opcode 0x83. */
7011FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
7012{
7013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7014 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
7015 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7016
7017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7018 {
7019 /*
7020 * Register target
7021 */
7022 IEMOP_HLP_NO_LOCK_PREFIX();
7023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7024 switch (pIemCpu->enmEffOpSize)
7025 {
7026 case IEMMODE_16BIT:
7027 {
7028 IEM_MC_BEGIN(3, 0);
7029 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7030 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
7031 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7032
7033 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7034 IEM_MC_REF_EFLAGS(pEFlags);
7035 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7036
7037 IEM_MC_ADVANCE_RIP();
7038 IEM_MC_END();
7039 break;
7040 }
7041
7042 case IEMMODE_32BIT:
7043 {
7044 IEM_MC_BEGIN(3, 0);
7045 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7046 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
7047 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7048
7049 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7050 IEM_MC_REF_EFLAGS(pEFlags);
7051 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7052
7053 IEM_MC_ADVANCE_RIP();
7054 IEM_MC_END();
7055 break;
7056 }
7057
7058 case IEMMODE_64BIT:
7059 {
7060 IEM_MC_BEGIN(3, 0);
7061 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7062 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
7063 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7064
7065 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7066 IEM_MC_REF_EFLAGS(pEFlags);
7067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7068
7069 IEM_MC_ADVANCE_RIP();
7070 IEM_MC_END();
7071 break;
7072 }
7073 }
7074 }
7075 else
7076 {
7077 /*
7078 * Memory target.
7079 */
7080 uint32_t fAccess;
7081 if (pImpl->pfnLockedU16)
7082 fAccess = IEM_ACCESS_DATA_RW;
7083 else
7084 { /* CMP */
7085 IEMOP_HLP_NO_LOCK_PREFIX();
7086 fAccess = IEM_ACCESS_DATA_R;
7087 }
7088
7089 switch (pIemCpu->enmEffOpSize)
7090 {
7091 case IEMMODE_16BIT:
7092 {
7093 IEM_MC_BEGIN(3, 2);
7094 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7095 IEM_MC_ARG(uint16_t, u16Src, 1);
7096 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7098
7099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7100 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7101 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
7102 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7103 IEM_MC_FETCH_EFLAGS(EFlags);
7104 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7105 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7106 else
7107 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7108
7109 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7110 IEM_MC_COMMIT_EFLAGS(EFlags);
7111 IEM_MC_ADVANCE_RIP();
7112 IEM_MC_END();
7113 break;
7114 }
7115
7116 case IEMMODE_32BIT:
7117 {
7118 IEM_MC_BEGIN(3, 2);
7119 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7120 IEM_MC_ARG(uint32_t, u32Src, 1);
7121 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7123
7124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7125 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7126 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
7127 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7128 IEM_MC_FETCH_EFLAGS(EFlags);
7129 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7131 else
7132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7133
7134 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7135 IEM_MC_COMMIT_EFLAGS(EFlags);
7136 IEM_MC_ADVANCE_RIP();
7137 IEM_MC_END();
7138 break;
7139 }
7140
7141 case IEMMODE_64BIT:
7142 {
7143 IEM_MC_BEGIN(3, 2);
7144 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7145 IEM_MC_ARG(uint64_t, u64Src, 1);
7146 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7148
7149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7150 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7151 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
7152 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7153 IEM_MC_FETCH_EFLAGS(EFlags);
7154 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7156 else
7157 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7158
7159 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7160 IEM_MC_COMMIT_EFLAGS(EFlags);
7161 IEM_MC_ADVANCE_RIP();
7162 IEM_MC_END();
7163 break;
7164 }
7165 }
7166 }
7167 return VINF_SUCCESS;
7168}
7169
7170
7171/** Opcode 0x84. */
7172FNIEMOP_DEF(iemOp_test_Eb_Gb)
7173{
7174 IEMOP_MNEMONIC("test Eb,Gb");
7175 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7176 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7177 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
7178}
7179
7180
7181/** Opcode 0x85. */
7182FNIEMOP_DEF(iemOp_test_Ev_Gv)
7183{
7184 IEMOP_MNEMONIC("test Ev,Gv");
7185 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7186 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7187 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
7188}
7189
7190
7191/** Opcode 0x86. */
7192FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
7193{
7194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7195 IEMOP_MNEMONIC("xchg Eb,Gb");
7196
7197 /*
7198 * If rm is denoting a register, no more instruction bytes.
7199 */
7200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7201 {
7202 IEMOP_HLP_NO_LOCK_PREFIX();
7203
7204 IEM_MC_BEGIN(0, 2);
7205 IEM_MC_LOCAL(uint8_t, uTmp1);
7206 IEM_MC_LOCAL(uint8_t, uTmp2);
7207
7208 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7209 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7210 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
7211 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
7212
7213 IEM_MC_ADVANCE_RIP();
7214 IEM_MC_END();
7215 }
7216 else
7217 {
7218 /*
7219 * We're accessing memory.
7220 */
7221/** @todo the register must be committed separately! */
7222 IEM_MC_BEGIN(2, 2);
7223 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
7224 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7226
7227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7228 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7229 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7230 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
7231 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
7232
7233 IEM_MC_ADVANCE_RIP();
7234 IEM_MC_END();
7235 }
7236 return VINF_SUCCESS;
7237}
7238
7239
7240/** Opcode 0x87. */
7241FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
7242{
7243 IEMOP_MNEMONIC("xchg Ev,Gv");
7244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7245
7246 /*
7247 * If rm is denoting a register, no more instruction bytes.
7248 */
7249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7250 {
7251 IEMOP_HLP_NO_LOCK_PREFIX();
7252
7253 switch (pIemCpu->enmEffOpSize)
7254 {
7255 case IEMMODE_16BIT:
7256 IEM_MC_BEGIN(0, 2);
7257 IEM_MC_LOCAL(uint16_t, uTmp1);
7258 IEM_MC_LOCAL(uint16_t, uTmp2);
7259
7260 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7261 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7262 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
7263 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
7264
7265 IEM_MC_ADVANCE_RIP();
7266 IEM_MC_END();
7267 return VINF_SUCCESS;
7268
7269 case IEMMODE_32BIT:
7270 IEM_MC_BEGIN(0, 2);
7271 IEM_MC_LOCAL(uint32_t, uTmp1);
7272 IEM_MC_LOCAL(uint32_t, uTmp2);
7273
7274 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7275 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7276 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
7277 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
7278
7279 IEM_MC_ADVANCE_RIP();
7280 IEM_MC_END();
7281 return VINF_SUCCESS;
7282
7283 case IEMMODE_64BIT:
7284 IEM_MC_BEGIN(0, 2);
7285 IEM_MC_LOCAL(uint64_t, uTmp1);
7286 IEM_MC_LOCAL(uint64_t, uTmp2);
7287
7288 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7289 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7290 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
7291 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
7292
7293 IEM_MC_ADVANCE_RIP();
7294 IEM_MC_END();
7295 return VINF_SUCCESS;
7296
7297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7298 }
7299 }
7300 else
7301 {
7302 /*
7303 * We're accessing memory.
7304 */
7305 switch (pIemCpu->enmEffOpSize)
7306 {
7307/** @todo the register must be committed separately! */
7308 case IEMMODE_16BIT:
7309 IEM_MC_BEGIN(2, 2);
7310 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
7311 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7313
7314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7315 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7316 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7317 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
7318 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
7319
7320 IEM_MC_ADVANCE_RIP();
7321 IEM_MC_END();
7322 return VINF_SUCCESS;
7323
7324 case IEMMODE_32BIT:
7325 IEM_MC_BEGIN(2, 2);
7326 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
7327 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7329
7330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7331 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7332 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7333 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
7334 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
7335
7336 IEM_MC_ADVANCE_RIP();
7337 IEM_MC_END();
7338 return VINF_SUCCESS;
7339
7340 case IEMMODE_64BIT:
7341 IEM_MC_BEGIN(2, 2);
7342 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
7343 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7345
7346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7347 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7348 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7349 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
7350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
7351
7352 IEM_MC_ADVANCE_RIP();
7353 IEM_MC_END();
7354 return VINF_SUCCESS;
7355
7356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7357 }
7358 }
7359}
7360
7361
7362/** Opcode 0x88. */
7363FNIEMOP_DEF(iemOp_mov_Eb_Gb)
7364{
7365 IEMOP_MNEMONIC("mov Eb,Gb");
7366
7367 uint8_t bRm;
7368 IEM_OPCODE_GET_NEXT_U8(&bRm);
7369 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7370
7371 /*
7372 * If rm is denoting a register, no more instruction bytes.
7373 */
7374 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7375 {
7376 IEM_MC_BEGIN(0, 1);
7377 IEM_MC_LOCAL(uint8_t, u8Value);
7378 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7379 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
7380 IEM_MC_ADVANCE_RIP();
7381 IEM_MC_END();
7382 }
7383 else
7384 {
7385 /*
7386 * We're writing a register to memory.
7387 */
7388 IEM_MC_BEGIN(0, 2);
7389 IEM_MC_LOCAL(uint8_t, u8Value);
7390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7392 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7393 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
7394 IEM_MC_ADVANCE_RIP();
7395 IEM_MC_END();
7396 }
7397 return VINF_SUCCESS;
7398
7399}
7400
7401
7402/** Opcode 0x89. */
7403FNIEMOP_DEF(iemOp_mov_Ev_Gv)
7404{
7405 IEMOP_MNEMONIC("mov Ev,Gv");
7406
7407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7408 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7409
7410 /*
7411 * If rm is denoting a register, no more instruction bytes.
7412 */
7413 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7414 {
7415 switch (pIemCpu->enmEffOpSize)
7416 {
7417 case IEMMODE_16BIT:
7418 IEM_MC_BEGIN(0, 1);
7419 IEM_MC_LOCAL(uint16_t, u16Value);
7420 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7421 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
7422 IEM_MC_ADVANCE_RIP();
7423 IEM_MC_END();
7424 break;
7425
7426 case IEMMODE_32BIT:
7427 IEM_MC_BEGIN(0, 1);
7428 IEM_MC_LOCAL(uint32_t, u32Value);
7429 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7430 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
7431 IEM_MC_ADVANCE_RIP();
7432 IEM_MC_END();
7433 break;
7434
7435 case IEMMODE_64BIT:
7436 IEM_MC_BEGIN(0, 1);
7437 IEM_MC_LOCAL(uint64_t, u64Value);
7438 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7439 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
7440 IEM_MC_ADVANCE_RIP();
7441 IEM_MC_END();
7442 break;
7443 }
7444 }
7445 else
7446 {
7447 /*
7448 * We're writing a register to memory.
7449 */
7450 switch (pIemCpu->enmEffOpSize)
7451 {
7452 case IEMMODE_16BIT:
7453 IEM_MC_BEGIN(0, 2);
7454 IEM_MC_LOCAL(uint16_t, u16Value);
7455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7457 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7458 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
7459 IEM_MC_ADVANCE_RIP();
7460 IEM_MC_END();
7461 break;
7462
7463 case IEMMODE_32BIT:
7464 IEM_MC_BEGIN(0, 2);
7465 IEM_MC_LOCAL(uint32_t, u32Value);
7466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7468 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7469 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
7470 IEM_MC_ADVANCE_RIP();
7471 IEM_MC_END();
7472 break;
7473
7474 case IEMMODE_64BIT:
7475 IEM_MC_BEGIN(0, 2);
7476 IEM_MC_LOCAL(uint64_t, u64Value);
7477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7479 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7480 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
7481 IEM_MC_ADVANCE_RIP();
7482 IEM_MC_END();
7483 break;
7484 }
7485 }
7486 return VINF_SUCCESS;
7487}
7488
7489
7490/** Opcode 0x8a. */
7491FNIEMOP_DEF(iemOp_mov_Gb_Eb)
7492{
7493 IEMOP_MNEMONIC("mov Gb,Eb");
7494
7495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7496 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7497
7498 /*
7499 * If rm is denoting a register, no more instruction bytes.
7500 */
7501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7502 {
7503 IEM_MC_BEGIN(0, 1);
7504 IEM_MC_LOCAL(uint8_t, u8Value);
7505 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7506 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
7507 IEM_MC_ADVANCE_RIP();
7508 IEM_MC_END();
7509 }
7510 else
7511 {
7512 /*
7513 * We're loading a register from memory.
7514 */
7515 IEM_MC_BEGIN(0, 2);
7516 IEM_MC_LOCAL(uint8_t, u8Value);
7517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7519 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
7520 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
7521 IEM_MC_ADVANCE_RIP();
7522 IEM_MC_END();
7523 }
7524 return VINF_SUCCESS;
7525}
7526
7527
7528/** Opcode 0x8b. */
7529FNIEMOP_DEF(iemOp_mov_Gv_Ev)
7530{
7531 IEMOP_MNEMONIC("mov Gv,Ev");
7532
7533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7534 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7535
7536 /*
7537 * If rm is denoting a register, no more instruction bytes.
7538 */
7539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7540 {
7541 switch (pIemCpu->enmEffOpSize)
7542 {
7543 case IEMMODE_16BIT:
7544 IEM_MC_BEGIN(0, 1);
7545 IEM_MC_LOCAL(uint16_t, u16Value);
7546 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7547 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
7548 IEM_MC_ADVANCE_RIP();
7549 IEM_MC_END();
7550 break;
7551
7552 case IEMMODE_32BIT:
7553 IEM_MC_BEGIN(0, 1);
7554 IEM_MC_LOCAL(uint32_t, u32Value);
7555 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7556 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
7557 IEM_MC_ADVANCE_RIP();
7558 IEM_MC_END();
7559 break;
7560
7561 case IEMMODE_64BIT:
7562 IEM_MC_BEGIN(0, 1);
7563 IEM_MC_LOCAL(uint64_t, u64Value);
7564 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7565 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
7566 IEM_MC_ADVANCE_RIP();
7567 IEM_MC_END();
7568 break;
7569 }
7570 }
7571 else
7572 {
7573 /*
7574 * We're loading a register from memory.
7575 */
7576 switch (pIemCpu->enmEffOpSize)
7577 {
7578 case IEMMODE_16BIT:
7579 IEM_MC_BEGIN(0, 2);
7580 IEM_MC_LOCAL(uint16_t, u16Value);
7581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7583 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
7584 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
7585 IEM_MC_ADVANCE_RIP();
7586 IEM_MC_END();
7587 break;
7588
7589 case IEMMODE_32BIT:
7590 IEM_MC_BEGIN(0, 2);
7591 IEM_MC_LOCAL(uint32_t, u32Value);
7592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7594 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
7595 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
7596 IEM_MC_ADVANCE_RIP();
7597 IEM_MC_END();
7598 break;
7599
7600 case IEMMODE_64BIT:
7601 IEM_MC_BEGIN(0, 2);
7602 IEM_MC_LOCAL(uint64_t, u64Value);
7603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7605 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
7606 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
7607 IEM_MC_ADVANCE_RIP();
7608 IEM_MC_END();
7609 break;
7610 }
7611 }
7612 return VINF_SUCCESS;
7613}
7614
7615
7616/** Opcode 0x8c. */
7617FNIEMOP_DEF(iemOp_mov_Ev_Sw)
7618{
7619 IEMOP_MNEMONIC("mov Ev,Sw");
7620
7621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7622 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7623
7624 /*
7625 * Check that the destination register exists. The REX.R prefix is ignored.
7626 */
7627 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7628 if ( iSegReg > X86_SREG_GS)
7629 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7630
7631 /*
7632 * If rm is denoting a register, no more instruction bytes.
7633 * In that case, the operand size is respected and the upper bits are
7634 * cleared (starting with some pentium).
7635 */
7636 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7637 {
7638 switch (pIemCpu->enmEffOpSize)
7639 {
7640 case IEMMODE_16BIT:
7641 IEM_MC_BEGIN(0, 1);
7642 IEM_MC_LOCAL(uint16_t, u16Value);
7643 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
7644 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
7645 IEM_MC_ADVANCE_RIP();
7646 IEM_MC_END();
7647 break;
7648
7649 case IEMMODE_32BIT:
7650 IEM_MC_BEGIN(0, 1);
7651 IEM_MC_LOCAL(uint32_t, u32Value);
7652 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
7653 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
7654 IEM_MC_ADVANCE_RIP();
7655 IEM_MC_END();
7656 break;
7657
7658 case IEMMODE_64BIT:
7659 IEM_MC_BEGIN(0, 1);
7660 IEM_MC_LOCAL(uint64_t, u64Value);
7661 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
7662 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
7663 IEM_MC_ADVANCE_RIP();
7664 IEM_MC_END();
7665 break;
7666 }
7667 }
7668 else
7669 {
7670 /*
7671 * We're saving the register to memory. The access is word sized
7672 * regardless of operand size prefixes.
7673 */
7674#if 0 /* not necessary */
7675 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
7676#endif
7677 IEM_MC_BEGIN(0, 2);
7678 IEM_MC_LOCAL(uint16_t, u16Value);
7679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7681 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
7682 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
7683 IEM_MC_ADVANCE_RIP();
7684 IEM_MC_END();
7685 }
7686 return VINF_SUCCESS;
7687}
7688
7689
7690
7691
7692/** Opcode 0x8d. */
7693FNIEMOP_DEF(iemOp_lea_Gv_M)
7694{
7695 IEMOP_MNEMONIC("lea Gv,M");
7696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7697 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7698 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7699 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* no register form */
7700
7701 switch (pIemCpu->enmEffOpSize)
7702 {
7703 case IEMMODE_16BIT:
7704 IEM_MC_BEGIN(0, 2);
7705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7706 IEM_MC_LOCAL(uint16_t, u16Cast);
7707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
7708 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
7709 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
7710 IEM_MC_ADVANCE_RIP();
7711 IEM_MC_END();
7712 return VINF_SUCCESS;
7713
7714 case IEMMODE_32BIT:
7715 IEM_MC_BEGIN(0, 2);
7716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7717 IEM_MC_LOCAL(uint32_t, u32Cast);
7718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
7719 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
7720 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
7721 IEM_MC_ADVANCE_RIP();
7722 IEM_MC_END();
7723 return VINF_SUCCESS;
7724
7725 case IEMMODE_64BIT:
7726 IEM_MC_BEGIN(0, 1);
7727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
7729 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
7730 IEM_MC_ADVANCE_RIP();
7731 IEM_MC_END();
7732 return VINF_SUCCESS;
7733 }
7734 AssertFailedReturn(VERR_INTERNAL_ERROR_5);
7735}
7736
7737
7738/** Opcode 0x8e. */
7739FNIEMOP_DEF(iemOp_mov_Sw_Ev)
7740{
7741 IEMOP_MNEMONIC("mov Sw,Ev");
7742
7743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7744 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7745
7746 /*
7747 * The practical operand size is 16-bit.
7748 */
7749#if 0 /* not necessary */
7750 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
7751#endif
7752
7753 /*
7754 * Check that the destination register exists and can be used with this
7755 * instruction. The REX.R prefix is ignored.
7756 */
7757 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7758 if ( iSegReg == X86_SREG_CS
7759 || iSegReg > X86_SREG_GS)
7760 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7761
7762 /*
7763 * If rm is denoting a register, no more instruction bytes.
7764 */
7765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7766 {
7767 IEM_MC_BEGIN(2, 0);
7768 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
7769 IEM_MC_ARG(uint16_t, u16Value, 1);
7770 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7771 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
7772 IEM_MC_END();
7773 }
7774 else
7775 {
7776 /*
7777 * We're loading the register from memory. The access is word sized
7778 * regardless of operand size prefixes.
7779 */
7780 IEM_MC_BEGIN(2, 1);
7781 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
7782 IEM_MC_ARG(uint16_t, u16Value, 1);
7783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7785 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
7786 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
7787 IEM_MC_END();
7788 }
7789 return VINF_SUCCESS;
7790}
7791
7792
7793/** Opcode 0x8f. */
7794FNIEMOP_DEF(iemOp_pop_Ev)
7795{
7796 /* This bugger is rather annoying as it requires rSP to be updated before
7797 doing the effective address calculations. Will eventually require a
7798 split between the R/M+SIB decoding and the effective address
7799 calculation - which is something that is required for any attempt at
7800 reusing this code for a recompiler. It may also be good to have if we
7801 need to delay #UD exception caused by invalid lock prefixes.
7802
7803 For now, we'll do a mostly safe interpreter-only implementation here. */
7804 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
7805 * now until tests show it's checked.. */
7806 IEMOP_MNEMONIC("pop Ev");
7807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7808 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7809
7810 /* Register access is relatively easy and can share code. */
7811 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7812 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7813
7814 /*
7815 * Memory target.
7816 *
7817 * Intel says that RSP is incremented before it's used in any effective
7818 * address calcuations. This means some serious extra annoyance here since
7819 * we decode and caclulate the effective address in one step and like to
7820 * delay committing registers till everything is done.
7821 *
7822 * So, we'll decode and calculate the effective address twice. This will
7823 * require some recoding if turned into a recompiler.
7824 */
7825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
7826
7827#ifndef TST_IEM_CHECK_MC
7828 /* Calc effective address with modified ESP. */
7829 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
7830 RTGCPTR GCPtrEff;
7831 VBOXSTRICTRC rcStrict;
7832 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, &GCPtrEff);
7833 if (rcStrict != VINF_SUCCESS)
7834 return rcStrict;
7835 pIemCpu->offOpcode = offOpcodeSaved;
7836
7837 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
7838 uint64_t const RspSaved = pCtx->rsp;
7839 switch (pIemCpu->enmEffOpSize)
7840 {
7841 case IEMMODE_16BIT: iemRegAddToRsp(pCtx, 2); break;
7842 case IEMMODE_32BIT: iemRegAddToRsp(pCtx, 4); break;
7843 case IEMMODE_64BIT: iemRegAddToRsp(pCtx, 8); break;
7844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7845 }
7846 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, &GCPtrEff);
7847 Assert(rcStrict == VINF_SUCCESS);
7848 pCtx->rsp = RspSaved;
7849
7850 /* Perform the operation - this should be CImpl. */
7851 RTUINT64U TmpRsp;
7852 TmpRsp.u = pCtx->rsp;
7853 switch (pIemCpu->enmEffOpSize)
7854 {
7855 case IEMMODE_16BIT:
7856 {
7857 uint16_t u16Value;
7858 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
7859 if (rcStrict == VINF_SUCCESS)
7860 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
7861 break;
7862 }
7863
7864 case IEMMODE_32BIT:
7865 {
7866 uint32_t u32Value;
7867 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
7868 if (rcStrict == VINF_SUCCESS)
7869 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
7870 break;
7871 }
7872
7873 case IEMMODE_64BIT:
7874 {
7875 uint64_t u64Value;
7876 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
7877 if (rcStrict == VINF_SUCCESS)
7878 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
7879 break;
7880 }
7881
7882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7883 }
7884 if (rcStrict == VINF_SUCCESS)
7885 {
7886 pCtx->rsp = TmpRsp.u;
7887 iemRegUpdateRip(pIemCpu);
7888 }
7889 return rcStrict;
7890
7891#else
7892 return VERR_IEM_IPE_2;
7893#endif
7894}
7895
7896
7897/**
7898 * Common 'xchg reg,rAX' helper.
7899 */
7900FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
7901{
7902 IEMOP_HLP_NO_LOCK_PREFIX();
7903
7904 iReg |= pIemCpu->uRexB;
7905 switch (pIemCpu->enmEffOpSize)
7906 {
7907 case IEMMODE_16BIT:
7908 IEM_MC_BEGIN(0, 2);
7909 IEM_MC_LOCAL(uint16_t, u16Tmp1);
7910 IEM_MC_LOCAL(uint16_t, u16Tmp2);
7911 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
7912 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
7913 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
7914 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
7915 IEM_MC_ADVANCE_RIP();
7916 IEM_MC_END();
7917 return VINF_SUCCESS;
7918
7919 case IEMMODE_32BIT:
7920 IEM_MC_BEGIN(0, 2);
7921 IEM_MC_LOCAL(uint32_t, u32Tmp1);
7922 IEM_MC_LOCAL(uint32_t, u32Tmp2);
7923 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
7924 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
7925 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
7926 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
7927 IEM_MC_ADVANCE_RIP();
7928 IEM_MC_END();
7929 return VINF_SUCCESS;
7930
7931 case IEMMODE_64BIT:
7932 IEM_MC_BEGIN(0, 2);
7933 IEM_MC_LOCAL(uint64_t, u64Tmp1);
7934 IEM_MC_LOCAL(uint64_t, u64Tmp2);
7935 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
7936 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
7937 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
7938 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
7939 IEM_MC_ADVANCE_RIP();
7940 IEM_MC_END();
7941 return VINF_SUCCESS;
7942
7943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7944 }
7945}
7946
7947
7948/** Opcode 0x90. */
7949FNIEMOP_DEF(iemOp_nop)
7950{
7951 /* R8/R8D and RAX/EAX can be exchanged. */
7952 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
7953 {
7954 IEMOP_MNEMONIC("xchg r8,rAX");
7955 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
7956 }
7957
7958 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
7959 IEMOP_MNEMONIC("pause");
7960 else
7961 IEMOP_MNEMONIC("nop");
7962 IEM_MC_BEGIN(0, 0);
7963 IEM_MC_ADVANCE_RIP();
7964 IEM_MC_END();
7965 return VINF_SUCCESS;
7966}
7967
7968
7969/** Opcode 0x91. */
7970FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
7971{
7972 IEMOP_MNEMONIC("xchg rCX,rAX");
7973 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
7974}
7975
7976
7977/** Opcode 0x92. */
7978FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
7979{
7980 IEMOP_MNEMONIC("xchg rDX,rAX");
7981 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
7982}
7983
7984
7985/** Opcode 0x93. */
7986FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
7987{
7988 IEMOP_MNEMONIC("xchg rBX,rAX");
7989 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
7990}
7991
7992
7993/** Opcode 0x94. */
7994FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
7995{
7996 IEMOP_MNEMONIC("xchg rSX,rAX");
7997 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
7998}
7999
8000
8001/** Opcode 0x95. */
8002FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
8003{
8004 IEMOP_MNEMONIC("xchg rBP,rAX");
8005 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
8006}
8007
8008
8009/** Opcode 0x96. */
8010FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
8011{
8012 IEMOP_MNEMONIC("xchg rSI,rAX");
8013 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
8014}
8015
8016
8017/** Opcode 0x97. */
8018FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
8019{
8020 IEMOP_MNEMONIC("xchg rDI,rAX");
8021 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
8022}
8023
8024
8025/** Opcode 0x98. */
8026FNIEMOP_DEF(iemOp_cbw)
8027{
8028 IEMOP_HLP_NO_LOCK_PREFIX();
8029 switch (pIemCpu->enmEffOpSize)
8030 {
8031 case IEMMODE_16BIT:
8032 IEMOP_MNEMONIC("cbw");
8033 IEM_MC_BEGIN(0, 1);
8034 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
8035 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
8036 } IEM_MC_ELSE() {
8037 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
8038 } IEM_MC_ENDIF();
8039 IEM_MC_ADVANCE_RIP();
8040 IEM_MC_END();
8041 return VINF_SUCCESS;
8042
8043 case IEMMODE_32BIT:
8044 IEMOP_MNEMONIC("cwde");
8045 IEM_MC_BEGIN(0, 1);
8046 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
8047 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
8048 } IEM_MC_ELSE() {
8049 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
8050 } IEM_MC_ENDIF();
8051 IEM_MC_ADVANCE_RIP();
8052 IEM_MC_END();
8053 return VINF_SUCCESS;
8054
8055 case IEMMODE_64BIT:
8056 IEMOP_MNEMONIC("cdqe");
8057 IEM_MC_BEGIN(0, 1);
8058 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
8059 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
8060 } IEM_MC_ELSE() {
8061 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
8062 } IEM_MC_ENDIF();
8063 IEM_MC_ADVANCE_RIP();
8064 IEM_MC_END();
8065 return VINF_SUCCESS;
8066
8067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8068 }
8069}
8070
8071
8072/** Opcode 0x99. */
8073FNIEMOP_DEF(iemOp_cwd)
8074{
8075 IEMOP_HLP_NO_LOCK_PREFIX();
8076 switch (pIemCpu->enmEffOpSize)
8077 {
8078 case IEMMODE_16BIT:
8079 IEMOP_MNEMONIC("cwd");
8080 IEM_MC_BEGIN(0, 1);
8081 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
8082 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
8083 } IEM_MC_ELSE() {
8084 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
8085 } IEM_MC_ENDIF();
8086 IEM_MC_ADVANCE_RIP();
8087 IEM_MC_END();
8088 return VINF_SUCCESS;
8089
8090 case IEMMODE_32BIT:
8091 IEMOP_MNEMONIC("cdq");
8092 IEM_MC_BEGIN(0, 1);
8093 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
8094 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
8095 } IEM_MC_ELSE() {
8096 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
8097 } IEM_MC_ENDIF();
8098 IEM_MC_ADVANCE_RIP();
8099 IEM_MC_END();
8100 return VINF_SUCCESS;
8101
8102 case IEMMODE_64BIT:
8103 IEMOP_MNEMONIC("cqo");
8104 IEM_MC_BEGIN(0, 1);
8105 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
8106 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
8107 } IEM_MC_ELSE() {
8108 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
8109 } IEM_MC_ENDIF();
8110 IEM_MC_ADVANCE_RIP();
8111 IEM_MC_END();
8112 return VINF_SUCCESS;
8113
8114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8115 }
8116}
8117
8118
8119/** Opcode 0x9a. */
8120FNIEMOP_DEF(iemOp_call_Ap)
8121{
8122 IEMOP_MNEMONIC("call Ap");
8123 IEMOP_HLP_NO_64BIT();
8124
8125 /* Decode the far pointer address and pass it on to the far call C implementation. */
8126 uint32_t offSeg;
8127 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
8128 IEM_OPCODE_GET_NEXT_U32(&offSeg);
8129 else
8130 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
8131 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
8132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8133 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
8134}
8135
8136
8137/** Opcode 0x9b. (aka fwait) */
8138FNIEMOP_DEF(iemOp_wait)
8139{
8140 IEMOP_MNEMONIC("wait");
8141 IEMOP_HLP_NO_LOCK_PREFIX();
8142
8143 IEM_MC_BEGIN(0, 0);
8144 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8145 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8146 IEM_MC_ADVANCE_RIP();
8147 IEM_MC_END();
8148 return VINF_SUCCESS;
8149}
8150
8151
8152/** Opcode 0x9c. */
8153FNIEMOP_DEF(iemOp_pushf_Fv)
8154{
8155 IEMOP_HLP_NO_LOCK_PREFIX();
8156 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8157 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
8158}
8159
8160
8161/** Opcode 0x9d. */
8162FNIEMOP_DEF(iemOp_popf_Fv)
8163{
8164 IEMOP_HLP_NO_LOCK_PREFIX();
8165 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8166 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
8167}
8168
8169
8170/** Opcode 0x9e. */
8171FNIEMOP_DEF(iemOp_sahf)
8172{
8173 IEMOP_MNEMONIC("sahf");
8174 IEMOP_HLP_NO_LOCK_PREFIX();
8175 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
8176 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_LAHF_SAHF))
8177 return IEMOP_RAISE_INVALID_OPCODE();
8178 IEM_MC_BEGIN(0, 2);
8179 IEM_MC_LOCAL(uint32_t, u32Flags);
8180 IEM_MC_LOCAL(uint32_t, EFlags);
8181 IEM_MC_FETCH_EFLAGS(EFlags);
8182 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
8183 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
8184 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
8185 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
8186 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
8187 IEM_MC_COMMIT_EFLAGS(EFlags);
8188 IEM_MC_ADVANCE_RIP();
8189 IEM_MC_END();
8190 return VINF_SUCCESS;
8191}
8192
8193
8194/** Opcode 0x9f. */
8195FNIEMOP_DEF(iemOp_lahf)
8196{
8197 IEMOP_MNEMONIC("lahf");
8198 IEMOP_HLP_NO_LOCK_PREFIX();
8199 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
8200 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_LAHF_SAHF))
8201 return IEMOP_RAISE_INVALID_OPCODE();
8202 IEM_MC_BEGIN(0, 1);
8203 IEM_MC_LOCAL(uint8_t, u8Flags);
8204 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
8205 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
8206 IEM_MC_ADVANCE_RIP();
8207 IEM_MC_END();
8208 return VINF_SUCCESS;
8209}
8210
8211
8212/**
8213 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
8214 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
8215 * prefixes. Will return on failures.
8216 * @param a_GCPtrMemOff The variable to store the offset in.
8217 */
8218#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
8219 do \
8220 { \
8221 switch (pIemCpu->enmEffAddrMode) \
8222 { \
8223 case IEMMODE_16BIT: \
8224 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
8225 break; \
8226 case IEMMODE_32BIT: \
8227 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
8228 break; \
8229 case IEMMODE_64BIT: \
8230 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
8231 break; \
8232 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8233 } \
8234 IEMOP_HLP_NO_LOCK_PREFIX(); \
8235 } while (0)
8236
8237/** Opcode 0xa0. */
8238FNIEMOP_DEF(iemOp_mov_Al_Ob)
8239{
8240 /*
8241 * Get the offset and fend of lock prefixes.
8242 */
8243 RTGCPTR GCPtrMemOff;
8244 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
8245
8246 /*
8247 * Fetch AL.
8248 */
8249 IEM_MC_BEGIN(0,1);
8250 IEM_MC_LOCAL(uint8_t, u8Tmp);
8251 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
8252 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8253 IEM_MC_ADVANCE_RIP();
8254 IEM_MC_END();
8255 return VINF_SUCCESS;
8256}
8257
8258
8259/** Opcode 0xa1. */
8260FNIEMOP_DEF(iemOp_mov_rAX_Ov)
8261{
8262 /*
8263 * Get the offset and fend of lock prefixes.
8264 */
8265 IEMOP_MNEMONIC("mov rAX,Ov");
8266 RTGCPTR GCPtrMemOff;
8267 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
8268
8269 /*
8270 * Fetch rAX.
8271 */
8272 switch (pIemCpu->enmEffOpSize)
8273 {
8274 case IEMMODE_16BIT:
8275 IEM_MC_BEGIN(0,1);
8276 IEM_MC_LOCAL(uint16_t, u16Tmp);
8277 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
8278 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
8279 IEM_MC_ADVANCE_RIP();
8280 IEM_MC_END();
8281 return VINF_SUCCESS;
8282
8283 case IEMMODE_32BIT:
8284 IEM_MC_BEGIN(0,1);
8285 IEM_MC_LOCAL(uint32_t, u32Tmp);
8286 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
8287 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
8288 IEM_MC_ADVANCE_RIP();
8289 IEM_MC_END();
8290 return VINF_SUCCESS;
8291
8292 case IEMMODE_64BIT:
8293 IEM_MC_BEGIN(0,1);
8294 IEM_MC_LOCAL(uint64_t, u64Tmp);
8295 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
8296 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
8297 IEM_MC_ADVANCE_RIP();
8298 IEM_MC_END();
8299 return VINF_SUCCESS;
8300
8301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8302 }
8303}
8304
8305
8306/** Opcode 0xa2. */
8307FNIEMOP_DEF(iemOp_mov_Ob_AL)
8308{
8309 /*
8310 * Get the offset and fend of lock prefixes.
8311 */
8312 RTGCPTR GCPtrMemOff;
8313 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
8314
8315 /*
8316 * Store AL.
8317 */
8318 IEM_MC_BEGIN(0,1);
8319 IEM_MC_LOCAL(uint8_t, u8Tmp);
8320 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
8321 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
8322 IEM_MC_ADVANCE_RIP();
8323 IEM_MC_END();
8324 return VINF_SUCCESS;
8325}
8326
8327
8328/** Opcode 0xa3. */
8329FNIEMOP_DEF(iemOp_mov_Ov_rAX)
8330{
8331 /*
8332 * Get the offset and fend of lock prefixes.
8333 */
8334 RTGCPTR GCPtrMemOff;
8335 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
8336
8337 /*
8338 * Store rAX.
8339 */
8340 switch (pIemCpu->enmEffOpSize)
8341 {
8342 case IEMMODE_16BIT:
8343 IEM_MC_BEGIN(0,1);
8344 IEM_MC_LOCAL(uint16_t, u16Tmp);
8345 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
8346 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
8347 IEM_MC_ADVANCE_RIP();
8348 IEM_MC_END();
8349 return VINF_SUCCESS;
8350
8351 case IEMMODE_32BIT:
8352 IEM_MC_BEGIN(0,1);
8353 IEM_MC_LOCAL(uint32_t, u32Tmp);
8354 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
8355 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
8356 IEM_MC_ADVANCE_RIP();
8357 IEM_MC_END();
8358 return VINF_SUCCESS;
8359
8360 case IEMMODE_64BIT:
8361 IEM_MC_BEGIN(0,1);
8362 IEM_MC_LOCAL(uint64_t, u64Tmp);
8363 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
8364 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
8365 IEM_MC_ADVANCE_RIP();
8366 IEM_MC_END();
8367 return VINF_SUCCESS;
8368
8369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8370 }
8371}
8372
8373/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
8374#define IEM_MOVS_CASE(ValBits, AddrBits) \
8375 IEM_MC_BEGIN(0, 2); \
8376 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
8377 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8378 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
8379 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
8380 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8381 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
8382 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8383 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8384 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8385 } IEM_MC_ELSE() { \
8386 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8387 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8388 } IEM_MC_ENDIF(); \
8389 IEM_MC_ADVANCE_RIP(); \
8390 IEM_MC_END();
8391
8392/** Opcode 0xa4. */
8393FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
8394{
8395 IEMOP_HLP_NO_LOCK_PREFIX();
8396
8397 /*
8398 * Use the C implementation if a repeat prefix is encountered.
8399 */
8400 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8401 {
8402 IEMOP_MNEMONIC("rep movsb Xb,Yb");
8403 switch (pIemCpu->enmEffAddrMode)
8404 {
8405 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
8406 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
8407 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
8408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8409 }
8410 }
8411 IEMOP_MNEMONIC("movsb Xb,Yb");
8412
8413 /*
8414 * Sharing case implementation with movs[wdq] below.
8415 */
8416 switch (pIemCpu->enmEffAddrMode)
8417 {
8418 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
8419 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
8420 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
8421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8422 }
8423 return VINF_SUCCESS;
8424}
8425
8426
8427/** Opcode 0xa5. */
8428FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
8429{
8430 IEMOP_HLP_NO_LOCK_PREFIX();
8431
8432 /*
8433 * Use the C implementation if a repeat prefix is encountered.
8434 */
8435 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8436 {
8437 IEMOP_MNEMONIC("rep movs Xv,Yv");
8438 switch (pIemCpu->enmEffOpSize)
8439 {
8440 case IEMMODE_16BIT:
8441 switch (pIemCpu->enmEffAddrMode)
8442 {
8443 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
8444 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
8445 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
8446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8447 }
8448 break;
8449 case IEMMODE_32BIT:
8450 switch (pIemCpu->enmEffAddrMode)
8451 {
8452 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
8453 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
8454 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
8455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8456 }
8457 case IEMMODE_64BIT:
8458 switch (pIemCpu->enmEffAddrMode)
8459 {
8460 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8461 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
8462 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
8463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8464 }
8465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8466 }
8467 }
8468 IEMOP_MNEMONIC("movs Xv,Yv");
8469
8470 /*
8471 * Annoying double switch here.
8472 * Using ugly macro for implementing the cases, sharing it with movsb.
8473 */
8474 switch (pIemCpu->enmEffOpSize)
8475 {
8476 case IEMMODE_16BIT:
8477 switch (pIemCpu->enmEffAddrMode)
8478 {
8479 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
8480 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
8481 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
8482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8483 }
8484 break;
8485
8486 case IEMMODE_32BIT:
8487 switch (pIemCpu->enmEffAddrMode)
8488 {
8489 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
8490 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
8491 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
8492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8493 }
8494 break;
8495
8496 case IEMMODE_64BIT:
8497 switch (pIemCpu->enmEffAddrMode)
8498 {
8499 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
8500 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
8501 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
8502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8503 }
8504 break;
8505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8506 }
8507 return VINF_SUCCESS;
8508}
8509
8510#undef IEM_MOVS_CASE
8511
8512/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
8513#define IEM_CMPS_CASE(ValBits, AddrBits) \
8514 IEM_MC_BEGIN(3, 3); \
8515 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
8516 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
8517 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8518 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
8519 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8520 \
8521 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
8522 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
8523 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8524 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
8525 IEM_MC_REF_LOCAL(puValue1, uValue1); \
8526 IEM_MC_REF_EFLAGS(pEFlags); \
8527 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
8528 \
8529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8530 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8531 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8532 } IEM_MC_ELSE() { \
8533 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8534 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8535 } IEM_MC_ENDIF(); \
8536 IEM_MC_ADVANCE_RIP(); \
8537 IEM_MC_END(); \
8538
8539/** Opcode 0xa6. */
8540FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
8541{
8542 IEMOP_HLP_NO_LOCK_PREFIX();
8543
8544 /*
8545 * Use the C implementation if a repeat prefix is encountered.
8546 */
8547 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
8548 {
8549 IEMOP_MNEMONIC("repe cmps Xb,Yb");
8550 switch (pIemCpu->enmEffAddrMode)
8551 {
8552 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
8553 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
8554 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
8555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8556 }
8557 }
8558 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
8559 {
8560 IEMOP_MNEMONIC("repe cmps Xb,Yb");
8561 switch (pIemCpu->enmEffAddrMode)
8562 {
8563 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
8564 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
8565 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
8566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8567 }
8568 }
8569 IEMOP_MNEMONIC("cmps Xb,Yb");
8570
8571 /*
8572 * Sharing case implementation with cmps[wdq] below.
8573 */
8574 switch (pIemCpu->enmEffAddrMode)
8575 {
8576 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
8577 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
8578 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
8579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8580 }
8581 return VINF_SUCCESS;
8582
8583}
8584
8585
8586/** Opcode 0xa7. */
8587FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
8588{
8589 IEMOP_HLP_NO_LOCK_PREFIX();
8590
8591 /*
8592 * Use the C implementation if a repeat prefix is encountered.
8593 */
8594 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
8595 {
8596 IEMOP_MNEMONIC("repe cmps Xv,Yv");
8597 switch (pIemCpu->enmEffOpSize)
8598 {
8599 case IEMMODE_16BIT:
8600 switch (pIemCpu->enmEffAddrMode)
8601 {
8602 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
8603 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
8604 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
8605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8606 }
8607 break;
8608 case IEMMODE_32BIT:
8609 switch (pIemCpu->enmEffAddrMode)
8610 {
8611 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
8612 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
8613 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
8614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8615 }
8616 case IEMMODE_64BIT:
8617 switch (pIemCpu->enmEffAddrMode)
8618 {
8619 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8620 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
8621 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
8622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8623 }
8624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8625 }
8626 }
8627
8628 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
8629 {
8630 IEMOP_MNEMONIC("repne cmps Xv,Yv");
8631 switch (pIemCpu->enmEffOpSize)
8632 {
8633 case IEMMODE_16BIT:
8634 switch (pIemCpu->enmEffAddrMode)
8635 {
8636 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
8637 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
8638 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
8639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8640 }
8641 break;
8642 case IEMMODE_32BIT:
8643 switch (pIemCpu->enmEffAddrMode)
8644 {
8645 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
8646 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
8647 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
8648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8649 }
8650 case IEMMODE_64BIT:
8651 switch (pIemCpu->enmEffAddrMode)
8652 {
8653 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8654 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
8655 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
8656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8657 }
8658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8659 }
8660 }
8661
8662 IEMOP_MNEMONIC("cmps Xv,Yv");
8663
8664 /*
8665 * Annoying double switch here.
8666 * Using ugly macro for implementing the cases, sharing it with cmpsb.
8667 */
8668 switch (pIemCpu->enmEffOpSize)
8669 {
8670 case IEMMODE_16BIT:
8671 switch (pIemCpu->enmEffAddrMode)
8672 {
8673 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
8674 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
8675 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
8676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8677 }
8678 break;
8679
8680 case IEMMODE_32BIT:
8681 switch (pIemCpu->enmEffAddrMode)
8682 {
8683 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
8684 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
8685 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
8686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8687 }
8688 break;
8689
8690 case IEMMODE_64BIT:
8691 switch (pIemCpu->enmEffAddrMode)
8692 {
8693 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
8694 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
8695 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
8696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8697 }
8698 break;
8699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8700 }
8701 return VINF_SUCCESS;
8702
8703}
8704
8705#undef IEM_CMPS_CASE
8706
8707/** Opcode 0xa8. */
8708FNIEMOP_DEF(iemOp_test_AL_Ib)
8709{
8710 IEMOP_MNEMONIC("test al,Ib");
8711 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8712 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
8713}
8714
8715
8716/** Opcode 0xa9. */
8717FNIEMOP_DEF(iemOp_test_eAX_Iz)
8718{
8719 IEMOP_MNEMONIC("test rAX,Iz");
8720 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8721 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
8722}
8723
8724
8725/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
8726#define IEM_STOS_CASE(ValBits, AddrBits) \
8727 IEM_MC_BEGIN(0, 2); \
8728 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
8729 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8730 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
8731 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
8732 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
8733 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8734 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8735 } IEM_MC_ELSE() { \
8736 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
8737 } IEM_MC_ENDIF(); \
8738 IEM_MC_ADVANCE_RIP(); \
8739 IEM_MC_END(); \
8740
8741/** Opcode 0xaa. */
8742FNIEMOP_DEF(iemOp_stosb_Yb_AL)
8743{
8744 IEMOP_HLP_NO_LOCK_PREFIX();
8745
8746 /*
8747 * Use the C implementation if a repeat prefix is encountered.
8748 */
8749 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8750 {
8751 IEMOP_MNEMONIC("rep stos Yb,al");
8752 switch (pIemCpu->enmEffAddrMode)
8753 {
8754 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
8755 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
8756 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
8757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8758 }
8759 }
8760 IEMOP_MNEMONIC("stos Yb,al");
8761
8762 /*
8763 * Sharing case implementation with stos[wdq] below.
8764 */
8765 switch (pIemCpu->enmEffAddrMode)
8766 {
8767 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
8768 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
8769 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
8770 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8771 }
8772 return VINF_SUCCESS;
8773}
8774
8775
8776/** Opcode 0xab. */
8777FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
8778{
8779 IEMOP_HLP_NO_LOCK_PREFIX();
8780
8781 /*
8782 * Use the C implementation if a repeat prefix is encountered.
8783 */
8784 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8785 {
8786 IEMOP_MNEMONIC("rep stos Yv,rAX");
8787 switch (pIemCpu->enmEffOpSize)
8788 {
8789 case IEMMODE_16BIT:
8790 switch (pIemCpu->enmEffAddrMode)
8791 {
8792 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
8793 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
8794 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
8795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8796 }
8797 break;
8798 case IEMMODE_32BIT:
8799 switch (pIemCpu->enmEffAddrMode)
8800 {
8801 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
8802 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
8803 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
8804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8805 }
8806 case IEMMODE_64BIT:
8807 switch (pIemCpu->enmEffAddrMode)
8808 {
8809 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8810 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
8811 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
8812 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8813 }
8814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8815 }
8816 }
8817 IEMOP_MNEMONIC("stos Yv,rAX");
8818
8819 /*
8820 * Annoying double switch here.
8821 * Using ugly macro for implementing the cases, sharing it with stosb.
8822 */
8823 switch (pIemCpu->enmEffOpSize)
8824 {
8825 case IEMMODE_16BIT:
8826 switch (pIemCpu->enmEffAddrMode)
8827 {
8828 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
8829 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
8830 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
8831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8832 }
8833 break;
8834
8835 case IEMMODE_32BIT:
8836 switch (pIemCpu->enmEffAddrMode)
8837 {
8838 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
8839 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
8840 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
8841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8842 }
8843 break;
8844
8845 case IEMMODE_64BIT:
8846 switch (pIemCpu->enmEffAddrMode)
8847 {
8848 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
8849 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
8850 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
8851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8852 }
8853 break;
8854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8855 }
8856 return VINF_SUCCESS;
8857}
8858
8859#undef IEM_STOS_CASE
8860
8861/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
8862#define IEM_LODS_CASE(ValBits, AddrBits) \
8863 IEM_MC_BEGIN(0, 2); \
8864 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
8865 IEM_MC_LOCAL(RTGCPTR, uAddr); \
8866 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
8867 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
8868 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
8869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
8870 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8871 } IEM_MC_ELSE() { \
8872 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
8873 } IEM_MC_ENDIF(); \
8874 IEM_MC_ADVANCE_RIP(); \
8875 IEM_MC_END();
8876
8877/** Opcode 0xac. */
8878FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
8879{
8880 IEMOP_HLP_NO_LOCK_PREFIX();
8881
8882 /*
8883 * Use the C implementation if a repeat prefix is encountered.
8884 */
8885 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8886 {
8887 IEMOP_MNEMONIC("rep lodsb al,Xb");
8888 switch (pIemCpu->enmEffAddrMode)
8889 {
8890 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
8891 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
8892 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
8893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8894 }
8895 }
8896 IEMOP_MNEMONIC("lodsb al,Xb");
8897
8898 /*
8899 * Sharing case implementation with stos[wdq] below.
8900 */
8901 switch (pIemCpu->enmEffAddrMode)
8902 {
8903 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
8904 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
8905 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
8906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8907 }
8908 return VINF_SUCCESS;
8909}
8910
8911
8912/** Opcode 0xad. */
8913FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
8914{
8915 IEMOP_HLP_NO_LOCK_PREFIX();
8916
8917 /*
8918 * Use the C implementation if a repeat prefix is encountered.
8919 */
8920 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8921 {
8922 IEMOP_MNEMONIC("rep lods rAX,Xv");
8923 switch (pIemCpu->enmEffOpSize)
8924 {
8925 case IEMMODE_16BIT:
8926 switch (pIemCpu->enmEffAddrMode)
8927 {
8928 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
8929 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
8930 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
8931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8932 }
8933 break;
8934 case IEMMODE_32BIT:
8935 switch (pIemCpu->enmEffAddrMode)
8936 {
8937 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
8938 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
8939 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
8940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8941 }
8942 case IEMMODE_64BIT:
8943 switch (pIemCpu->enmEffAddrMode)
8944 {
8945 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8946 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
8947 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
8948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8949 }
8950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8951 }
8952 }
8953 IEMOP_MNEMONIC("lods rAX,Xv");
8954
8955 /*
8956 * Annoying double switch here.
8957 * Using ugly macro for implementing the cases, sharing it with lodsb.
8958 */
8959 switch (pIemCpu->enmEffOpSize)
8960 {
8961 case IEMMODE_16BIT:
8962 switch (pIemCpu->enmEffAddrMode)
8963 {
8964 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
8965 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
8966 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
8967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8968 }
8969 break;
8970
8971 case IEMMODE_32BIT:
8972 switch (pIemCpu->enmEffAddrMode)
8973 {
8974 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
8975 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
8976 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
8977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8978 }
8979 break;
8980
8981 case IEMMODE_64BIT:
8982 switch (pIemCpu->enmEffAddrMode)
8983 {
8984 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
8985 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
8986 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
8987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8988 }
8989 break;
8990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8991 }
8992 return VINF_SUCCESS;
8993}
8994
8995#undef IEM_LODS_CASE
8996
8997/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
8998#define IEM_SCAS_CASE(ValBits, AddrBits) \
8999 IEM_MC_BEGIN(3, 2); \
9000 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
9001 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
9002 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9003 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9004 \
9005 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9006 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
9007 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
9008 IEM_MC_REF_EFLAGS(pEFlags); \
9009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
9010 \
9011 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9012 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9013 } IEM_MC_ELSE() { \
9014 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9015 } IEM_MC_ENDIF(); \
9016 IEM_MC_ADVANCE_RIP(); \
9017 IEM_MC_END();
9018
9019/** Opcode 0xae. */
9020FNIEMOP_DEF(iemOp_scasb_AL_Xb)
9021{
9022 IEMOP_HLP_NO_LOCK_PREFIX();
9023
9024 /*
9025 * Use the C implementation if a repeat prefix is encountered.
9026 */
9027 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9028 {
9029 IEMOP_MNEMONIC("repe scasb al,Xb");
9030 switch (pIemCpu->enmEffAddrMode)
9031 {
9032 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
9033 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
9034 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
9035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9036 }
9037 }
9038 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9039 {
9040 IEMOP_MNEMONIC("repne scasb al,Xb");
9041 switch (pIemCpu->enmEffAddrMode)
9042 {
9043 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
9044 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
9045 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
9046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9047 }
9048 }
9049 IEMOP_MNEMONIC("scasb al,Xb");
9050
9051 /*
9052 * Sharing case implementation with stos[wdq] below.
9053 */
9054 switch (pIemCpu->enmEffAddrMode)
9055 {
9056 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
9057 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
9058 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
9059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9060 }
9061 return VINF_SUCCESS;
9062}
9063
9064
9065/** Opcode 0xaf. */
9066FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
9067{
9068 IEMOP_HLP_NO_LOCK_PREFIX();
9069
9070 /*
9071 * Use the C implementation if a repeat prefix is encountered.
9072 */
9073 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9074 {
9075 IEMOP_MNEMONIC("repe scas rAX,Xv");
9076 switch (pIemCpu->enmEffOpSize)
9077 {
9078 case IEMMODE_16BIT:
9079 switch (pIemCpu->enmEffAddrMode)
9080 {
9081 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
9082 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
9083 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
9084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9085 }
9086 break;
9087 case IEMMODE_32BIT:
9088 switch (pIemCpu->enmEffAddrMode)
9089 {
9090 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
9091 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
9092 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
9093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9094 }
9095 case IEMMODE_64BIT:
9096 switch (pIemCpu->enmEffAddrMode)
9097 {
9098 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
9099 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
9100 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
9101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9102 }
9103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9104 }
9105 }
9106 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9107 {
9108 IEMOP_MNEMONIC("repne scas rAX,Xv");
9109 switch (pIemCpu->enmEffOpSize)
9110 {
9111 case IEMMODE_16BIT:
9112 switch (pIemCpu->enmEffAddrMode)
9113 {
9114 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
9115 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
9116 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
9117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9118 }
9119 break;
9120 case IEMMODE_32BIT:
9121 switch (pIemCpu->enmEffAddrMode)
9122 {
9123 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
9124 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
9125 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
9126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9127 }
9128 case IEMMODE_64BIT:
9129 switch (pIemCpu->enmEffAddrMode)
9130 {
9131 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9132 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
9133 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
9134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9135 }
9136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9137 }
9138 }
9139 IEMOP_MNEMONIC("scas rAX,Xv");
9140
9141 /*
9142 * Annoying double switch here.
9143 * Using ugly macro for implementing the cases, sharing it with scasb.
9144 */
9145 switch (pIemCpu->enmEffOpSize)
9146 {
9147 case IEMMODE_16BIT:
9148 switch (pIemCpu->enmEffAddrMode)
9149 {
9150 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
9151 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
9152 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
9153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9154 }
9155 break;
9156
9157 case IEMMODE_32BIT:
9158 switch (pIemCpu->enmEffAddrMode)
9159 {
9160 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
9161 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
9162 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
9163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9164 }
9165 break;
9166
9167 case IEMMODE_64BIT:
9168 switch (pIemCpu->enmEffAddrMode)
9169 {
9170 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9171 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
9172 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
9173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9174 }
9175 break;
9176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9177 }
9178 return VINF_SUCCESS;
9179}
9180
9181#undef IEM_SCAS_CASE
9182
9183/**
9184 * Common 'mov r8, imm8' helper.
9185 */
9186FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
9187{
9188 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9189 IEMOP_HLP_NO_LOCK_PREFIX();
9190
9191 IEM_MC_BEGIN(0, 1);
9192 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
9193 IEM_MC_STORE_GREG_U8(iReg, u8Value);
9194 IEM_MC_ADVANCE_RIP();
9195 IEM_MC_END();
9196
9197 return VINF_SUCCESS;
9198}
9199
9200
9201/** Opcode 0xb0. */
9202FNIEMOP_DEF(iemOp_mov_AL_Ib)
9203{
9204 IEMOP_MNEMONIC("mov AL,Ib");
9205 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX);
9206}
9207
9208
9209/** Opcode 0xb1. */
9210FNIEMOP_DEF(iemOp_CL_Ib)
9211{
9212 IEMOP_MNEMONIC("mov CL,Ib");
9213 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX);
9214}
9215
9216
9217/** Opcode 0xb2. */
9218FNIEMOP_DEF(iemOp_DL_Ib)
9219{
9220 IEMOP_MNEMONIC("mov DL,Ib");
9221 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX);
9222}
9223
9224
9225/** Opcode 0xb3. */
9226FNIEMOP_DEF(iemOp_BL_Ib)
9227{
9228 IEMOP_MNEMONIC("mov BL,Ib");
9229 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX);
9230}
9231
9232
9233/** Opcode 0xb4. */
9234FNIEMOP_DEF(iemOp_mov_AH_Ib)
9235{
9236 IEMOP_MNEMONIC("mov AH,Ib");
9237 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP);
9238}
9239
9240
9241/** Opcode 0xb5. */
9242FNIEMOP_DEF(iemOp_CH_Ib)
9243{
9244 IEMOP_MNEMONIC("mov CH,Ib");
9245 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP);
9246}
9247
9248
9249/** Opcode 0xb6. */
9250FNIEMOP_DEF(iemOp_DH_Ib)
9251{
9252 IEMOP_MNEMONIC("mov DH,Ib");
9253 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI);
9254}
9255
9256
9257/** Opcode 0xb7. */
9258FNIEMOP_DEF(iemOp_BH_Ib)
9259{
9260 IEMOP_MNEMONIC("mov BH,Ib");
9261 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI);
9262}
9263
9264
9265/**
9266 * Common 'mov regX,immX' helper.
9267 */
9268FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
9269{
9270 switch (pIemCpu->enmEffOpSize)
9271 {
9272 case IEMMODE_16BIT:
9273 {
9274 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9275 IEMOP_HLP_NO_LOCK_PREFIX();
9276
9277 IEM_MC_BEGIN(0, 1);
9278 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
9279 IEM_MC_STORE_GREG_U16(iReg, u16Value);
9280 IEM_MC_ADVANCE_RIP();
9281 IEM_MC_END();
9282 break;
9283 }
9284
9285 case IEMMODE_32BIT:
9286 {
9287 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9288 IEMOP_HLP_NO_LOCK_PREFIX();
9289
9290 IEM_MC_BEGIN(0, 1);
9291 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
9292 IEM_MC_STORE_GREG_U32(iReg, u32Value);
9293 IEM_MC_ADVANCE_RIP();
9294 IEM_MC_END();
9295 break;
9296 }
9297 case IEMMODE_64BIT:
9298 {
9299 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm);
9300 IEMOP_HLP_NO_LOCK_PREFIX();
9301
9302 IEM_MC_BEGIN(0, 1);
9303 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
9304 IEM_MC_STORE_GREG_U64(iReg, u64Value);
9305 IEM_MC_ADVANCE_RIP();
9306 IEM_MC_END();
9307 break;
9308 }
9309 }
9310
9311 return VINF_SUCCESS;
9312}
9313
9314
9315/** Opcode 0xb8. */
9316FNIEMOP_DEF(iemOp_eAX_Iv)
9317{
9318 IEMOP_MNEMONIC("mov rAX,IV");
9319 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX);
9320}
9321
9322
9323/** Opcode 0xb9. */
9324FNIEMOP_DEF(iemOp_eCX_Iv)
9325{
9326 IEMOP_MNEMONIC("mov rCX,IV");
9327 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX);
9328}
9329
9330
9331/** Opcode 0xba. */
9332FNIEMOP_DEF(iemOp_eDX_Iv)
9333{
9334 IEMOP_MNEMONIC("mov rDX,IV");
9335 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX);
9336}
9337
9338
9339/** Opcode 0xbb. */
9340FNIEMOP_DEF(iemOp_eBX_Iv)
9341{
9342 IEMOP_MNEMONIC("mov rBX,IV");
9343 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX);
9344}
9345
9346
9347/** Opcode 0xbc. */
9348FNIEMOP_DEF(iemOp_eSP_Iv)
9349{
9350 IEMOP_MNEMONIC("mov rSP,IV");
9351 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP);
9352}
9353
9354
9355/** Opcode 0xbd. */
9356FNIEMOP_DEF(iemOp_eBP_Iv)
9357{
9358 IEMOP_MNEMONIC("mov rBP,IV");
9359 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP);
9360}
9361
9362
9363/** Opcode 0xbe. */
9364FNIEMOP_DEF(iemOp_eSI_Iv)
9365{
9366 IEMOP_MNEMONIC("mov rSI,IV");
9367 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI);
9368}
9369
9370
9371/** Opcode 0xbf. */
9372FNIEMOP_DEF(iemOp_eDI_Iv)
9373{
9374 IEMOP_MNEMONIC("mov rDI,IV");
9375 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI);
9376}
9377
9378
9379/** Opcode 0xc0. */
9380FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
9381{
9382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9383 PCIEMOPSHIFTSIZES pImpl;
9384 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9385 {
9386 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
9387 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
9388 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
9389 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
9390 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
9391 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
9392 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
9393 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
9394 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9395 }
9396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9397
9398 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9399 {
9400 /* register */
9401 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9402 IEMOP_HLP_NO_LOCK_PREFIX();
9403 IEM_MC_BEGIN(3, 0);
9404 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9405 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
9406 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9407 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9408 IEM_MC_REF_EFLAGS(pEFlags);
9409 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9410 IEM_MC_ADVANCE_RIP();
9411 IEM_MC_END();
9412 }
9413 else
9414 {
9415 /* memory */
9416 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9417 IEM_MC_BEGIN(3, 2);
9418 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9419 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9420 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9422
9423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9424 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9425 IEM_MC_ASSIGN(cShiftArg, cShift);
9426 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9427 IEM_MC_FETCH_EFLAGS(EFlags);
9428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9429
9430 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9431 IEM_MC_COMMIT_EFLAGS(EFlags);
9432 IEM_MC_ADVANCE_RIP();
9433 IEM_MC_END();
9434 }
9435 return VINF_SUCCESS;
9436}
9437
9438
9439/** Opcode 0xc1. */
9440FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
9441{
9442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9443 PCIEMOPSHIFTSIZES pImpl;
9444 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9445 {
9446 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
9447 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
9448 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
9449 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
9450 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
9451 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
9452 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
9453 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
9454 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9455 }
9456 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9457
9458 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9459 {
9460 /* register */
9461 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9462 IEMOP_HLP_NO_LOCK_PREFIX();
9463 switch (pIemCpu->enmEffOpSize)
9464 {
9465 case IEMMODE_16BIT:
9466 IEM_MC_BEGIN(3, 0);
9467 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9468 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
9469 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9470 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9471 IEM_MC_REF_EFLAGS(pEFlags);
9472 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9473 IEM_MC_ADVANCE_RIP();
9474 IEM_MC_END();
9475 return VINF_SUCCESS;
9476
9477 case IEMMODE_32BIT:
9478 IEM_MC_BEGIN(3, 0);
9479 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9480 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
9481 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9482 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9483 IEM_MC_REF_EFLAGS(pEFlags);
9484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9485 IEM_MC_ADVANCE_RIP();
9486 IEM_MC_END();
9487 return VINF_SUCCESS;
9488
9489 case IEMMODE_64BIT:
9490 IEM_MC_BEGIN(3, 0);
9491 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9492 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
9493 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9494 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9495 IEM_MC_REF_EFLAGS(pEFlags);
9496 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9497 IEM_MC_ADVANCE_RIP();
9498 IEM_MC_END();
9499 return VINF_SUCCESS;
9500
9501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9502 }
9503 }
9504 else
9505 {
9506 /* memory */
9507 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9508 switch (pIemCpu->enmEffOpSize)
9509 {
9510 case IEMMODE_16BIT:
9511 IEM_MC_BEGIN(3, 2);
9512 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9513 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9514 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9516
9517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9518 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9519 IEM_MC_ASSIGN(cShiftArg, cShift);
9520 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9521 IEM_MC_FETCH_EFLAGS(EFlags);
9522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9523
9524 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9525 IEM_MC_COMMIT_EFLAGS(EFlags);
9526 IEM_MC_ADVANCE_RIP();
9527 IEM_MC_END();
9528 return VINF_SUCCESS;
9529
9530 case IEMMODE_32BIT:
9531 IEM_MC_BEGIN(3, 2);
9532 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9533 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9534 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9536
9537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9538 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9539 IEM_MC_ASSIGN(cShiftArg, cShift);
9540 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9541 IEM_MC_FETCH_EFLAGS(EFlags);
9542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9543
9544 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9545 IEM_MC_COMMIT_EFLAGS(EFlags);
9546 IEM_MC_ADVANCE_RIP();
9547 IEM_MC_END();
9548 return VINF_SUCCESS;
9549
9550 case IEMMODE_64BIT:
9551 IEM_MC_BEGIN(3, 2);
9552 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9553 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9554 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9556
9557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9558 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9559 IEM_MC_ASSIGN(cShiftArg, cShift);
9560 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9561 IEM_MC_FETCH_EFLAGS(EFlags);
9562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9563
9564 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9565 IEM_MC_COMMIT_EFLAGS(EFlags);
9566 IEM_MC_ADVANCE_RIP();
9567 IEM_MC_END();
9568 return VINF_SUCCESS;
9569
9570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9571 }
9572 }
9573}
9574
9575
9576/** Opcode 0xc2. */
9577FNIEMOP_DEF(iemOp_retn_Iw)
9578{
9579 IEMOP_MNEMONIC("retn Iw");
9580 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9581 IEMOP_HLP_NO_LOCK_PREFIX();
9582 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9583 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
9584}
9585
9586
9587/** Opcode 0xc3. */
9588FNIEMOP_DEF(iemOp_retn)
9589{
9590 IEMOP_MNEMONIC("retn");
9591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9592 IEMOP_HLP_NO_LOCK_PREFIX();
9593 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
9594}
9595
9596
9597/** Opcode 0xc4. */
9598FNIEMOP_DEF(iemOp_les_Gv_Mp)
9599{
9600 IEMOP_MNEMONIC("les Gv,Mp");
9601 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_ES);
9602}
9603
9604
9605/** Opcode 0xc5. */
9606FNIEMOP_DEF(iemOp_lds_Gv_Mp)
9607{
9608 IEMOP_MNEMONIC("lds Gv,Mp");
9609 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_DS);
9610}
9611
9612
9613/** Opcode 0xc6. */
9614FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
9615{
9616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9617 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9618 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
9619 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
9620 IEMOP_MNEMONIC("mov Eb,Ib");
9621
9622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9623 {
9624 /* register access */
9625 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9626 IEM_MC_BEGIN(0, 0);
9627 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
9628 IEM_MC_ADVANCE_RIP();
9629 IEM_MC_END();
9630 }
9631 else
9632 {
9633 /* memory access. */
9634 IEM_MC_BEGIN(0, 1);
9635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9637 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9638 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
9639 IEM_MC_ADVANCE_RIP();
9640 IEM_MC_END();
9641 }
9642 return VINF_SUCCESS;
9643}
9644
9645
9646/** Opcode 0xc7. */
9647FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
9648{
9649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9650 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9651 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
9652 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
9653 IEMOP_MNEMONIC("mov Ev,Iz");
9654
9655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9656 {
9657 /* register access */
9658 switch (pIemCpu->enmEffOpSize)
9659 {
9660 case IEMMODE_16BIT:
9661 IEM_MC_BEGIN(0, 0);
9662 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9663 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
9664 IEM_MC_ADVANCE_RIP();
9665 IEM_MC_END();
9666 return VINF_SUCCESS;
9667
9668 case IEMMODE_32BIT:
9669 IEM_MC_BEGIN(0, 0);
9670 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9671 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
9672 IEM_MC_ADVANCE_RIP();
9673 IEM_MC_END();
9674 return VINF_SUCCESS;
9675
9676 case IEMMODE_64BIT:
9677 IEM_MC_BEGIN(0, 0);
9678 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm);
9679 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
9680 IEM_MC_ADVANCE_RIP();
9681 IEM_MC_END();
9682 return VINF_SUCCESS;
9683
9684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9685 }
9686 }
9687 else
9688 {
9689 /* memory access. */
9690 switch (pIemCpu->enmEffOpSize)
9691 {
9692 case IEMMODE_16BIT:
9693 IEM_MC_BEGIN(0, 1);
9694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9696 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9697 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
9698 IEM_MC_ADVANCE_RIP();
9699 IEM_MC_END();
9700 return VINF_SUCCESS;
9701
9702 case IEMMODE_32BIT:
9703 IEM_MC_BEGIN(0, 1);
9704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9706 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9707 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
9708 IEM_MC_ADVANCE_RIP();
9709 IEM_MC_END();
9710 return VINF_SUCCESS;
9711
9712 case IEMMODE_64BIT:
9713 IEM_MC_BEGIN(0, 1);
9714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9716 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm);
9717 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
9718 IEM_MC_ADVANCE_RIP();
9719 IEM_MC_END();
9720 return VINF_SUCCESS;
9721
9722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9723 }
9724 }
9725}
9726
9727
9728
9729
9730/** Opcode 0xc8. */
9731FNIEMOP_STUB(iemOp_enter_Iw_Ib);
9732
9733
9734/** Opcode 0xc9. */
9735FNIEMOP_DEF(iemOp_leave)
9736{
9737 IEMOP_MNEMONIC("retn");
9738 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9739 IEMOP_HLP_NO_LOCK_PREFIX();
9740 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
9741}
9742
9743
9744/** Opcode 0xca. */
9745FNIEMOP_DEF(iemOp_retf_Iw)
9746{
9747 IEMOP_MNEMONIC("retf Iw");
9748 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9749 IEMOP_HLP_NO_LOCK_PREFIX();
9750 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9751 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
9752}
9753
9754
9755/** Opcode 0xcb. */
9756FNIEMOP_DEF(iemOp_retf)
9757{
9758 IEMOP_MNEMONIC("retf");
9759 IEMOP_HLP_NO_LOCK_PREFIX();
9760 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9761 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
9762}
9763
9764
9765/** Opcode 0xcc. */
9766FNIEMOP_DEF(iemOp_int_3)
9767{
9768 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
9769}
9770
9771
9772/** Opcode 0xcd. */
9773FNIEMOP_DEF(iemOp_int_Ib)
9774{
9775 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9776 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
9777}
9778
9779
9780/** Opcode 0xce. */
9781FNIEMOP_DEF(iemOp_into)
9782{
9783 IEM_MC_BEGIN(2, 0);
9784 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
9785 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
9786 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
9787 IEM_MC_END();
9788 return VINF_SUCCESS;
9789}
9790
9791
9792/** Opcode 0xcf. */
9793FNIEMOP_DEF(iemOp_iret)
9794{
9795 IEMOP_MNEMONIC("iret");
9796 IEMOP_HLP_NO_LOCK_PREFIX();
9797 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
9798}
9799
9800
9801/** Opcode 0xd0. */
9802FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9803{
9804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9805 PCIEMOPSHIFTSIZES pImpl;
9806 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9807 {
9808 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
9809 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
9810 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
9811 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
9812 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
9813 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
9814 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
9815 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
9816 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9817 }
9818 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9819
9820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9821 {
9822 /* register */
9823 IEMOP_HLP_NO_LOCK_PREFIX();
9824 IEM_MC_BEGIN(3, 0);
9825 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9826 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
9827 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9828 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9829 IEM_MC_REF_EFLAGS(pEFlags);
9830 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9831 IEM_MC_ADVANCE_RIP();
9832 IEM_MC_END();
9833 }
9834 else
9835 {
9836 /* memory */
9837 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9838 IEM_MC_BEGIN(3, 2);
9839 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9840 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
9841 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9843
9844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9845 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9846 IEM_MC_FETCH_EFLAGS(EFlags);
9847 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9848
9849 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9850 IEM_MC_COMMIT_EFLAGS(EFlags);
9851 IEM_MC_ADVANCE_RIP();
9852 IEM_MC_END();
9853 }
9854 return VINF_SUCCESS;
9855}
9856
9857
9858
9859/** Opcode 0xd1. */
9860FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9861{
9862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9863 PCIEMOPSHIFTSIZES pImpl;
9864 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9865 {
9866 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
9867 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
9868 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
9869 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
9870 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
9871 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
9872 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
9873 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
9874 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9875 }
9876 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9877
9878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9879 {
9880 /* register */
9881 IEMOP_HLP_NO_LOCK_PREFIX();
9882 switch (pIemCpu->enmEffOpSize)
9883 {
9884 case IEMMODE_16BIT:
9885 IEM_MC_BEGIN(3, 0);
9886 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9887 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
9888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9889 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9890 IEM_MC_REF_EFLAGS(pEFlags);
9891 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9892 IEM_MC_ADVANCE_RIP();
9893 IEM_MC_END();
9894 return VINF_SUCCESS;
9895
9896 case IEMMODE_32BIT:
9897 IEM_MC_BEGIN(3, 0);
9898 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9899 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
9900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9901 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9902 IEM_MC_REF_EFLAGS(pEFlags);
9903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9904 IEM_MC_ADVANCE_RIP();
9905 IEM_MC_END();
9906 return VINF_SUCCESS;
9907
9908 case IEMMODE_64BIT:
9909 IEM_MC_BEGIN(3, 0);
9910 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9911 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
9912 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9913 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9914 IEM_MC_REF_EFLAGS(pEFlags);
9915 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9916 IEM_MC_ADVANCE_RIP();
9917 IEM_MC_END();
9918 return VINF_SUCCESS;
9919
9920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9921 }
9922 }
9923 else
9924 {
9925 /* memory */
9926 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9927 switch (pIemCpu->enmEffOpSize)
9928 {
9929 case IEMMODE_16BIT:
9930 IEM_MC_BEGIN(3, 2);
9931 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9932 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
9933 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9935
9936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9937 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9938 IEM_MC_FETCH_EFLAGS(EFlags);
9939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9940
9941 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9942 IEM_MC_COMMIT_EFLAGS(EFlags);
9943 IEM_MC_ADVANCE_RIP();
9944 IEM_MC_END();
9945 return VINF_SUCCESS;
9946
9947 case IEMMODE_32BIT:
9948 IEM_MC_BEGIN(3, 2);
9949 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9950 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
9951 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9953
9954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9955 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9956 IEM_MC_FETCH_EFLAGS(EFlags);
9957 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9958
9959 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9960 IEM_MC_COMMIT_EFLAGS(EFlags);
9961 IEM_MC_ADVANCE_RIP();
9962 IEM_MC_END();
9963 return VINF_SUCCESS;
9964
9965 case IEMMODE_64BIT:
9966 IEM_MC_BEGIN(3, 2);
9967 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9968 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
9969 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9971
9972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
9973 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9974 IEM_MC_FETCH_EFLAGS(EFlags);
9975 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9976
9977 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9978 IEM_MC_COMMIT_EFLAGS(EFlags);
9979 IEM_MC_ADVANCE_RIP();
9980 IEM_MC_END();
9981 return VINF_SUCCESS;
9982
9983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9984 }
9985 }
9986}
9987
9988
9989/** Opcode 0xd2. */
9990FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9991{
9992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9993 PCIEMOPSHIFTSIZES pImpl;
9994 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9995 {
9996 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
9997 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
9998 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
9999 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
10000 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
10001 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
10002 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
10003 case 6: return IEMOP_RAISE_INVALID_OPCODE();
10004 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
10005 }
10006 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10007
10008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10009 {
10010 /* register */
10011 IEMOP_HLP_NO_LOCK_PREFIX();
10012 IEM_MC_BEGIN(3, 0);
10013 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10014 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10015 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10016 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10017 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10018 IEM_MC_REF_EFLAGS(pEFlags);
10019 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10020 IEM_MC_ADVANCE_RIP();
10021 IEM_MC_END();
10022 }
10023 else
10024 {
10025 /* memory */
10026 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10027 IEM_MC_BEGIN(3, 2);
10028 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10029 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10030 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10032
10033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10034 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10035 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10036 IEM_MC_FETCH_EFLAGS(EFlags);
10037 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10038
10039 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10040 IEM_MC_COMMIT_EFLAGS(EFlags);
10041 IEM_MC_ADVANCE_RIP();
10042 IEM_MC_END();
10043 }
10044 return VINF_SUCCESS;
10045}
10046
10047
10048/** Opcode 0xd3. */
10049FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
10050{
10051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10052 PCIEMOPSHIFTSIZES pImpl;
10053 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10054 {
10055 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
10056 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
10057 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
10058 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
10059 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
10060 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
10061 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
10062 case 6: return IEMOP_RAISE_INVALID_OPCODE();
10063 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
10064 }
10065 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10066
10067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10068 {
10069 /* register */
10070 IEMOP_HLP_NO_LOCK_PREFIX();
10071 switch (pIemCpu->enmEffOpSize)
10072 {
10073 case IEMMODE_16BIT:
10074 IEM_MC_BEGIN(3, 0);
10075 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10076 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10077 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10078 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10079 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10080 IEM_MC_REF_EFLAGS(pEFlags);
10081 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10082 IEM_MC_ADVANCE_RIP();
10083 IEM_MC_END();
10084 return VINF_SUCCESS;
10085
10086 case IEMMODE_32BIT:
10087 IEM_MC_BEGIN(3, 0);
10088 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10089 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10090 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10091 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10092 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10093 IEM_MC_REF_EFLAGS(pEFlags);
10094 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10095 IEM_MC_ADVANCE_RIP();
10096 IEM_MC_END();
10097 return VINF_SUCCESS;
10098
10099 case IEMMODE_64BIT:
10100 IEM_MC_BEGIN(3, 0);
10101 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10102 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10103 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10104 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10105 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10106 IEM_MC_REF_EFLAGS(pEFlags);
10107 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10108 IEM_MC_ADVANCE_RIP();
10109 IEM_MC_END();
10110 return VINF_SUCCESS;
10111
10112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10113 }
10114 }
10115 else
10116 {
10117 /* memory */
10118 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10119 switch (pIemCpu->enmEffOpSize)
10120 {
10121 case IEMMODE_16BIT:
10122 IEM_MC_BEGIN(3, 2);
10123 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10124 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10125 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10127
10128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10129 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10130 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10131 IEM_MC_FETCH_EFLAGS(EFlags);
10132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10133
10134 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10135 IEM_MC_COMMIT_EFLAGS(EFlags);
10136 IEM_MC_ADVANCE_RIP();
10137 IEM_MC_END();
10138 return VINF_SUCCESS;
10139
10140 case IEMMODE_32BIT:
10141 IEM_MC_BEGIN(3, 2);
10142 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10143 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10144 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10146
10147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10148 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10149 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10150 IEM_MC_FETCH_EFLAGS(EFlags);
10151 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10152
10153 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10154 IEM_MC_COMMIT_EFLAGS(EFlags);
10155 IEM_MC_ADVANCE_RIP();
10156 IEM_MC_END();
10157 return VINF_SUCCESS;
10158
10159 case IEMMODE_64BIT:
10160 IEM_MC_BEGIN(3, 2);
10161 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10162 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10163 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10165
10166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10167 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
10168 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10169 IEM_MC_FETCH_EFLAGS(EFlags);
10170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10171
10172 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10173 IEM_MC_COMMIT_EFLAGS(EFlags);
10174 IEM_MC_ADVANCE_RIP();
10175 IEM_MC_END();
10176 return VINF_SUCCESS;
10177
10178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10179 }
10180 }
10181}
10182
10183/** Opcode 0xd4. */
10184FNIEMOP_DEF(iemOp_aam_Ib)
10185{
10186 IEMOP_MNEMONIC("aam Ib");
10187 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10188 IEMOP_HLP_NO_LOCK_PREFIX();
10189 IEMOP_HLP_NO_64BIT();
10190 if (!bImm)
10191 return IEMOP_RAISE_DIVIDE_ERROR();
10192 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
10193}
10194
10195
10196/** Opcode 0xd5. */
10197FNIEMOP_DEF(iemOp_aad_Ib)
10198{
10199 IEMOP_MNEMONIC("aad Ib");
10200 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10201 IEMOP_HLP_NO_LOCK_PREFIX();
10202 IEMOP_HLP_NO_64BIT();
10203 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
10204}
10205
10206
10207/** Opcode 0xd7. */
10208FNIEMOP_DEF(iemOp_xlat)
10209{
10210 IEMOP_MNEMONIC("xlat");
10211 IEMOP_HLP_NO_LOCK_PREFIX();
10212 switch (pIemCpu->enmEffAddrMode)
10213 {
10214 case IEMMODE_16BIT:
10215 IEM_MC_BEGIN(2, 0);
10216 IEM_MC_LOCAL(uint8_t, u8Tmp);
10217 IEM_MC_LOCAL(uint16_t, u16Addr);
10218 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
10219 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
10220 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
10221 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10222 IEM_MC_ADVANCE_RIP();
10223 IEM_MC_END();
10224 return VINF_SUCCESS;
10225
10226 case IEMMODE_32BIT:
10227 IEM_MC_BEGIN(2, 0);
10228 IEM_MC_LOCAL(uint8_t, u8Tmp);
10229 IEM_MC_LOCAL(uint32_t, u32Addr);
10230 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
10231 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
10232 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
10233 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10234 IEM_MC_ADVANCE_RIP();
10235 IEM_MC_END();
10236 return VINF_SUCCESS;
10237
10238 case IEMMODE_64BIT:
10239 IEM_MC_BEGIN(2, 0);
10240 IEM_MC_LOCAL(uint8_t, u8Tmp);
10241 IEM_MC_LOCAL(uint64_t, u64Addr);
10242 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10243 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10244 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
10245 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10246 IEM_MC_ADVANCE_RIP();
10247 IEM_MC_END();
10248 return VINF_SUCCESS;
10249
10250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10251 }
10252}
10253
10254
10255/**
10256 * Common worker for FPU instructions working on ST0 and STn, and storing the
10257 * result in ST0.
10258 *
10259 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10260 */
10261FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10262{
10263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10264
10265 IEM_MC_BEGIN(3, 1);
10266 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10267 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10268 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10269 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10270
10271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10273 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
10274 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10275 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10276 IEM_MC_ELSE()
10277 IEM_MC_FPU_STACK_UNDERFLOW(0);
10278 IEM_MC_ENDIF();
10279 IEM_MC_ADVANCE_RIP();
10280
10281 IEM_MC_END();
10282 return VINF_SUCCESS;
10283}
10284
10285
10286/**
10287 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10288 * flags.
10289 *
10290 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10291 */
10292FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10293{
10294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10295
10296 IEM_MC_BEGIN(3, 1);
10297 IEM_MC_LOCAL(uint16_t, u16Fsw);
10298 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10299 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10301
10302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10304 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
10305 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10306 IEM_MC_UPDATE_FSW(u16Fsw);
10307 IEM_MC_ELSE()
10308 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
10309 IEM_MC_ENDIF();
10310 IEM_MC_ADVANCE_RIP();
10311
10312 IEM_MC_END();
10313 return VINF_SUCCESS;
10314}
10315
10316
10317/**
10318 * Common worker for FPU instructions working on ST0 and STn, only affecting
10319 * flags, and popping when done.
10320 *
10321 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10322 */
10323FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10324{
10325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10326
10327 IEM_MC_BEGIN(3, 1);
10328 IEM_MC_LOCAL(uint16_t, u16Fsw);
10329 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10330 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10331 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10332
10333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10335 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
10336 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10337 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
10338 IEM_MC_ELSE()
10339 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
10340 IEM_MC_ENDIF();
10341 IEM_MC_ADVANCE_RIP();
10342
10343 IEM_MC_END();
10344 return VINF_SUCCESS;
10345}
10346
10347
10348/** Opcode 0xd8 11/0. */
10349FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10350{
10351 IEMOP_MNEMONIC("fadd st0,stN");
10352 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10353}
10354
10355
10356/** Opcode 0xd8 11/1. */
10357FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10358{
10359 IEMOP_MNEMONIC("fmul st0,stN");
10360 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10361}
10362
10363
10364/** Opcode 0xd8 11/2. */
10365FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10366{
10367 IEMOP_MNEMONIC("fcom st0,stN");
10368 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10369}
10370
10371
10372/** Opcode 0xd8 11/3. */
10373FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10374{
10375 IEMOP_MNEMONIC("fcomp st0,stN");
10376 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10377}
10378
10379
10380/** Opcode 0xd8 11/4. */
10381FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10382{
10383 IEMOP_MNEMONIC("fsub st0,stN");
10384 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10385}
10386
10387
10388/** Opcode 0xd8 11/5. */
10389FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10390{
10391 IEMOP_MNEMONIC("fsubr st0,stN");
10392 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10393}
10394
10395
10396/** Opcode 0xd8 11/6. */
10397FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10398{
10399 IEMOP_MNEMONIC("fdiv st0,stN");
10400 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10401}
10402
10403
10404/** Opcode 0xd8 11/7. */
10405FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10406{
10407 IEMOP_MNEMONIC("fdivr st0,stN");
10408 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10409}
10410
10411
10412/**
10413 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10414 * the result in ST0.
10415 *
10416 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10417 */
10418FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10419{
10420 IEM_MC_BEGIN(3, 3);
10421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10422 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10423 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10424 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10426 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10427
10428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
10429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10430
10431 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10432 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10433 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
10434
10435 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
10436 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10437 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10438 IEM_MC_ELSE()
10439 IEM_MC_FPU_STACK_UNDERFLOW(0);
10440 IEM_MC_ENDIF();
10441 IEM_MC_ADVANCE_RIP();
10442
10443 IEM_MC_END();
10444 return VINF_SUCCESS;
10445}
10446
10447
10448/** Opcode 0xd8 !11/0. */
10449FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10450{
10451 IEMOP_MNEMONIC("fadd st0,m32r");
10452 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10453}
10454
10455
10456/** Opcode 0xd8 !11/1. */
10457FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10458{
10459 IEMOP_MNEMONIC("fmul st0,m32r");
10460 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10461}
10462
10463
10464/** Opcode 0xd8 !11/2. */
10465FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10466{
10467 IEMOP_MNEMONIC("fcom st0,m32r");
10468
10469 IEM_MC_BEGIN(3, 3);
10470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10471 IEM_MC_LOCAL(uint16_t, u16Fsw);
10472 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10473 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10475 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10476
10477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
10478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10479
10480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10482 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
10483
10484 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
10485 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10486 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
10487 IEM_MC_ELSE()
10488 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
10489 IEM_MC_ENDIF();
10490 IEM_MC_ADVANCE_RIP();
10491
10492 IEM_MC_END();
10493 return VINF_SUCCESS;
10494}
10495
10496
10497/** Opcode 0xd8 !11/3. */
10498FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10499{
10500 IEMOP_MNEMONIC("fcomp st0,m32r");
10501
10502 IEM_MC_BEGIN(3, 3);
10503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10504 IEM_MC_LOCAL(uint16_t, u16Fsw);
10505 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10506 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10507 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10508 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10509
10510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
10511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10512
10513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10514 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10515 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
10516
10517 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
10518 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10519 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
10520 IEM_MC_ELSE()
10521 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
10522 IEM_MC_ENDIF();
10523 IEM_MC_ADVANCE_RIP();
10524
10525 IEM_MC_END();
10526 return VINF_SUCCESS;
10527}
10528
10529
10530/** Opcode 0xd8 !11/4. */
10531FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10532{
10533 IEMOP_MNEMONIC("fsub st0,m32r");
10534 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10535}
10536
10537
10538/** Opcode 0xd8 !11/5. */
10539FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10540{
10541 IEMOP_MNEMONIC("fsubr st0,m32r");
10542 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10543}
10544
10545
10546/** Opcode 0xd8 !11/6. */
10547FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10548{
10549 IEMOP_MNEMONIC("fdiv st0,m32r");
10550 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10551}
10552
10553
10554/** Opcode 0xd8 !11/7. */
10555FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10556{
10557 IEMOP_MNEMONIC("fdivr st0,m32r");
10558 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10559}
10560
10561
10562/** Opcode 0xd8. */
10563FNIEMOP_DEF(iemOp_EscF0)
10564{
10565 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
10566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10567
10568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10569 {
10570 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10571 {
10572 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10573 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10574 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10575 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10576 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10577 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10578 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10579 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10581 }
10582 }
10583 else
10584 {
10585 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10586 {
10587 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10588 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10589 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10590 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10591 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10592 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10593 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10594 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10596 }
10597 }
10598}
10599
10600
10601/** Opcode 0xd9 /0 mem32real
10602 * @sa iemOp_fld_m64r */
10603FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10604{
10605 IEMOP_MNEMONIC("fld m32r");
10606
10607 IEM_MC_BEGIN(2, 3);
10608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10609 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10610 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10611 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10612 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10613
10614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
10615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10616
10617 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10618 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10619 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
10620
10621 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10622 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
10623 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
10624 IEM_MC_ELSE()
10625 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
10626 IEM_MC_ENDIF();
10627 IEM_MC_ADVANCE_RIP();
10628
10629 IEM_MC_END();
10630 return VINF_SUCCESS;
10631}
10632
10633
10634/** Opcode 0xd9 !11/2 mem32real */
10635FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10636{
10637 IEMOP_MNEMONIC("fst m32r");
10638 IEM_MC_BEGIN(3, 2);
10639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10640 IEM_MC_LOCAL(uint16_t, u16Fsw);
10641 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10642 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10643 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10644
10645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10647 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10648 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10649
10650 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
10651 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10652 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10653 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10654 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
10655 IEM_MC_ELSE()
10656 IEM_MC_IF_FCW_IM()
10657 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10658 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
10659 IEM_MC_ENDIF();
10660 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
10661 IEM_MC_ENDIF();
10662 IEM_MC_ADVANCE_RIP();
10663
10664 IEM_MC_END();
10665 return VINF_SUCCESS;
10666}
10667
10668
10669/** Opcode 0xd9 !11/3 */
10670FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10671{
10672 IEMOP_MNEMONIC("fstp m32r");
10673 IEM_MC_BEGIN(3, 2);
10674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10675 IEM_MC_LOCAL(uint16_t, u16Fsw);
10676 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10677 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10678 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10679
10680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10684
10685 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
10686 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10687 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10688 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10689 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
10690 IEM_MC_ELSE()
10691 IEM_MC_IF_FCW_IM()
10692 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10693 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
10694 IEM_MC_ENDIF();
10695 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
10696 IEM_MC_ENDIF();
10697 IEM_MC_ADVANCE_RIP();
10698
10699 IEM_MC_END();
10700 return VINF_SUCCESS;
10701}
10702
10703
10704/** Opcode 0xd9 !11/4 */
10705FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10706{
10707 IEMOP_MNEMONIC("fldenv m14/28byte");
10708 IEM_MC_BEGIN(3, 0);
10709 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
10710 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
10711 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
10713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10715 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10716 IEM_MC_END();
10717 return VINF_SUCCESS;
10718}
10719
10720
10721/** Opcode 0xd9 !11/5 */
10722FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10723{
10724 IEMOP_MNEMONIC("fldcw m2byte");
10725 IEM_MC_BEGIN(1, 1);
10726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10727 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
10729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10730 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10731 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
10732 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
10733 IEM_MC_END();
10734 return VINF_SUCCESS;
10735}
10736
10737
10738/** Opcode 0xd9 !11/6 */
10739FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10740{
10741 IEMOP_MNEMONIC("fstenv m14/28byte");
10742 IEM_MC_BEGIN(3, 0);
10743 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
10744 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
10745 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10748 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10749 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10750 IEM_MC_END();
10751 return VINF_SUCCESS;
10752}
10753
10754
10755/** Opcode 0xd9 !11/7 */
10756FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10757{
10758 IEMOP_MNEMONIC("fnstcw m2byte");
10759 IEM_MC_BEGIN(2, 0);
10760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10761 IEM_MC_LOCAL(uint16_t, u16Fcw);
10762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10764 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10765 IEM_MC_FETCH_FSW(u16Fcw);
10766 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
10767 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10768 IEM_MC_END();
10769 return VINF_SUCCESS;
10770}
10771
10772
10773/** Opcode 0xd9 0xc9, 0xd9 0xd8-0xdf, ++?. */
10774FNIEMOP_DEF(iemOp_fnop)
10775{
10776 IEMOP_MNEMONIC("fnop");
10777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10778
10779 IEM_MC_BEGIN(0, 0);
10780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10781 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10782 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10783 * intel optimizations. Investigate. */
10784 IEM_MC_UPDATE_FPU_OPCODE_IP();
10785 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10786 IEM_MC_END();
10787 return VINF_SUCCESS;
10788}
10789
10790
10791/** Opcode 0xd9 11/0 stN */
10792FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10793{
10794 IEMOP_MNEMONIC("fld stN");
10795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10796
10797 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10798 * indicates that it does. */
10799 IEM_MC_BEGIN(0, 2);
10800 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10801 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10802 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10803 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10804 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
10805 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10806 IEM_MC_PUSH_FPU_RESULT(FpuRes);
10807 IEM_MC_ELSE()
10808 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
10809 IEM_MC_ENDIF();
10810 IEM_MC_ADVANCE_RIP();
10811 IEM_MC_END();
10812
10813 return VINF_SUCCESS;
10814}
10815
10816
10817/** Opcode 0xd9 11/3 stN */
10818FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10819{
10820 IEMOP_MNEMONIC("fxch stN");
10821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10822
10823 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10824 * indicates that it does. */
10825 IEM_MC_BEGIN(1, 3);
10826 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10827 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10828 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10829 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
10830 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10831 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10832 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
10833 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10834 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
10835 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10836 IEM_MC_ELSE()
10837 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
10838 IEM_MC_ENDIF();
10839 IEM_MC_ADVANCE_RIP();
10840 IEM_MC_END();
10841
10842 return VINF_SUCCESS;
10843}
10844
10845
10846/** Opcode 0xd9 11/4, 0xdd 11/2. */
10847FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10848{
10849 IEMOP_MNEMONIC("fstp st0,stN");
10850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10851
10852 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
10853 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
10854 if (!iDstReg)
10855 {
10856 IEM_MC_BEGIN(0, 1);
10857 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10858 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10859 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10860 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
10861 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
10862 IEM_MC_ELSE()
10863 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
10864 IEM_MC_ENDIF();
10865 IEM_MC_ADVANCE_RIP();
10866 IEM_MC_END();
10867 }
10868 else
10869 {
10870 IEM_MC_BEGIN(0, 2);
10871 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10872 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10873 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10874 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10875 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10876 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10877 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
10878 IEM_MC_ELSE()
10879 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
10880 IEM_MC_ENDIF();
10881 IEM_MC_ADVANCE_RIP();
10882 IEM_MC_END();
10883 }
10884 return VINF_SUCCESS;
10885}
10886
10887
10888/**
10889 * Common worker for FPU instructions working on ST0 and replaces it with the
10890 * result, i.e. unary operators.
10891 *
10892 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10893 */
10894FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10895{
10896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10897
10898 IEM_MC_BEGIN(2, 1);
10899 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10900 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10901 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10902
10903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10904 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10905 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10906 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10907 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10908 IEM_MC_ELSE()
10909 IEM_MC_FPU_STACK_UNDERFLOW(0);
10910 IEM_MC_ENDIF();
10911 IEM_MC_ADVANCE_RIP();
10912
10913 IEM_MC_END();
10914 return VINF_SUCCESS;
10915}
10916
10917
10918/** Opcode 0xd9 0xe0. */
10919FNIEMOP_DEF(iemOp_fchs)
10920{
10921 IEMOP_MNEMONIC("fchs st0");
10922 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10923}
10924
10925
10926/** Opcode 0xd9 0xe1. */
10927FNIEMOP_DEF(iemOp_fabs)
10928{
10929 IEMOP_MNEMONIC("fabs st0");
10930 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10931}
10932
10933
10934/**
10935 * Common worker for FPU instructions working on ST0 and only returns FSW.
10936 *
10937 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10938 */
10939FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
10940{
10941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10942
10943 IEM_MC_BEGIN(2, 1);
10944 IEM_MC_LOCAL(uint16_t, u16Fsw);
10945 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10946 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10947
10948 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10949 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10950 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10951 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
10952 IEM_MC_UPDATE_FSW(u16Fsw);
10953 IEM_MC_ELSE()
10954 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
10955 IEM_MC_ENDIF();
10956 IEM_MC_ADVANCE_RIP();
10957
10958 IEM_MC_END();
10959 return VINF_SUCCESS;
10960}
10961
10962
10963/** Opcode 0xd9 0xe4. */
10964FNIEMOP_DEF(iemOp_ftst)
10965{
10966 IEMOP_MNEMONIC("ftst st0");
10967 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
10968}
10969
10970
10971/** Opcode 0xd9 0xe5. */
10972FNIEMOP_DEF(iemOp_fxam)
10973{
10974 IEMOP_MNEMONIC("fxam st0");
10975 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
10976}
10977
10978
10979/**
10980 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10981 *
10982 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10983 */
10984FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10985{
10986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10987
10988 IEM_MC_BEGIN(1, 1);
10989 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10990 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10991
10992 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10993 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10994 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10995 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10996 IEM_MC_PUSH_FPU_RESULT(FpuRes);
10997 IEM_MC_ELSE()
10998 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
10999 IEM_MC_ENDIF();
11000 IEM_MC_ADVANCE_RIP();
11001
11002 IEM_MC_END();
11003 return VINF_SUCCESS;
11004}
11005
11006
11007/** Opcode 0xd9 0xe8. */
11008FNIEMOP_DEF(iemOp_fld1)
11009{
11010 IEMOP_MNEMONIC("fld1");
11011 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
11012}
11013
11014
11015/** Opcode 0xd9 0xe9. */
11016FNIEMOP_DEF(iemOp_fldl2t)
11017{
11018 IEMOP_MNEMONIC("fldl2t");
11019 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
11020}
11021
11022
11023/** Opcode 0xd9 0xea. */
11024FNIEMOP_DEF(iemOp_fldl2e)
11025{
11026 IEMOP_MNEMONIC("fldl2e");
11027 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
11028}
11029
11030/** Opcode 0xd9 0xeb. */
11031FNIEMOP_DEF(iemOp_fldpi)
11032{
11033 IEMOP_MNEMONIC("fldpi");
11034 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
11035}
11036
11037
11038/** Opcode 0xd9 0xec. */
11039FNIEMOP_DEF(iemOp_fldlg2)
11040{
11041 IEMOP_MNEMONIC("fldlg2");
11042 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
11043}
11044
11045/** Opcode 0xd9 0xed. */
11046FNIEMOP_DEF(iemOp_fldln2)
11047{
11048 IEMOP_MNEMONIC("fldln2");
11049 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
11050}
11051
11052
11053/** Opcode 0xd9 0xee. */
11054FNIEMOP_DEF(iemOp_fldz)
11055{
11056 IEMOP_MNEMONIC("fldz");
11057 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
11058}
11059
11060
11061/** Opcode 0xd9 0xf0. */
11062FNIEMOP_DEF(iemOp_f2xm1)
11063{
11064 IEMOP_MNEMONIC("f2xm1 st0");
11065 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
11066}
11067
11068
11069/** Opcode 0xd9 0xf1. */
11070FNIEMOP_DEF(iemOp_fylx2)
11071{
11072 IEMOP_MNEMONIC("fylx2 st0");
11073 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
11074}
11075
11076
11077/**
11078 * Common worker for FPU instructions working on ST0 and having two outputs, one
11079 * replacing ST0 and one pushed onto the stack.
11080 *
11081 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11082 */
11083FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
11084{
11085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11086
11087 IEM_MC_BEGIN(2, 1);
11088 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
11089 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
11090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11091
11092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11094 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11095 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
11096 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
11097 IEM_MC_ELSE()
11098 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
11099 IEM_MC_ENDIF();
11100 IEM_MC_ADVANCE_RIP();
11101
11102 IEM_MC_END();
11103 return VINF_SUCCESS;
11104}
11105
11106
11107/** Opcode 0xd9 0xf2. */
11108FNIEMOP_DEF(iemOp_fptan)
11109{
11110 IEMOP_MNEMONIC("fptan st0");
11111 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
11112}
11113
11114
11115/**
11116 * Common worker for FPU instructions working on STn and ST0, storing the result
11117 * in STn, and popping the stack unless IE, DE or ZE was raised.
11118 *
11119 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11120 */
11121FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11122{
11123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11124
11125 IEM_MC_BEGIN(3, 1);
11126 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11127 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11128 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11130
11131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11133
11134 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
11135 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11136 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
11137 IEM_MC_ELSE()
11138 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
11139 IEM_MC_ENDIF();
11140 IEM_MC_ADVANCE_RIP();
11141
11142 IEM_MC_END();
11143 return VINF_SUCCESS;
11144}
11145
11146
11147/** Opcode 0xd9 0xf3. */
11148FNIEMOP_DEF(iemOp_fpatan)
11149{
11150 IEMOP_MNEMONIC("fpatan st1,st0");
11151 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
11152}
11153
11154
11155/** Opcode 0xd9 0xf4. */
11156FNIEMOP_DEF(iemOp_fxtract)
11157{
11158 IEMOP_MNEMONIC("fxtract st0");
11159 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
11160}
11161
11162
11163/** Opcode 0xd9 0xf5. */
11164FNIEMOP_DEF(iemOp_fprem1)
11165{
11166 IEMOP_MNEMONIC("fprem1 st0, st1");
11167 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
11168}
11169
11170
11171/** Opcode 0xd9 0xf6. */
11172FNIEMOP_DEF(iemOp_fdecstp)
11173{
11174 IEMOP_MNEMONIC("fdecstp");
11175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11176 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_fpu_AddToTop, 7);
11177}
11178
11179
11180/** Opcode 0xd9 0xf7. */
11181FNIEMOP_DEF(iemOp_fincstp)
11182{
11183 IEMOP_MNEMONIC("fincstp");
11184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11185 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_fpu_AddToTop, 1);
11186}
11187
11188
11189/** Opcode 0xd9 0xf8. */
11190FNIEMOP_DEF(iemOp_fprem)
11191{
11192 IEMOP_MNEMONIC("fprem st0, st1");
11193 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11194}
11195
11196
11197/** Opcode 0xd9 0xf9. */
11198FNIEMOP_DEF(iemOp_fyl2xp1)
11199{
11200 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
11201 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11202}
11203
11204
11205/** Opcode 0xd9 0xfa. */
11206FNIEMOP_DEF(iemOp_fsqrt)
11207{
11208 IEMOP_MNEMONIC("fsqrt st0");
11209 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11210}
11211
11212
11213/** Opcode 0xd9 0xfb. */
11214FNIEMOP_DEF(iemOp_fsincos)
11215{
11216 IEMOP_MNEMONIC("fsincos st0");
11217 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11218}
11219
11220
11221/** Opcode 0xd9 0xfc. */
11222FNIEMOP_DEF(iemOp_frndint)
11223{
11224 IEMOP_MNEMONIC("frndint st0");
11225 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11226}
11227
11228
11229/** Opcode 0xd9 0xfd. */
11230FNIEMOP_DEF(iemOp_fscale)
11231{
11232 IEMOP_MNEMONIC("fscale st0, st1");
11233 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11234}
11235
11236
11237/** Opcode 0xd9 0xfe. */
11238FNIEMOP_DEF(iemOp_fsin)
11239{
11240 IEMOP_MNEMONIC("fsin st0");
11241 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11242}
11243
11244
11245/** Opcode 0xd9 0xff. */
11246FNIEMOP_DEF(iemOp_fcos)
11247{
11248 IEMOP_MNEMONIC("fcos st0");
11249 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11250}
11251
11252
11253/** Used by iemOp_EscF1. */
11254static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11255{
11256 /* 0xe0 */ iemOp_fchs,
11257 /* 0xe1 */ iemOp_fabs,
11258 /* 0xe2 */ iemOp_Invalid,
11259 /* 0xe3 */ iemOp_Invalid,
11260 /* 0xe4 */ iemOp_ftst,
11261 /* 0xe5 */ iemOp_fxam,
11262 /* 0xe6 */ iemOp_Invalid,
11263 /* 0xe7 */ iemOp_Invalid,
11264 /* 0xe8 */ iemOp_fld1,
11265 /* 0xe9 */ iemOp_fldl2t,
11266 /* 0xea */ iemOp_fldl2e,
11267 /* 0xeb */ iemOp_fldpi,
11268 /* 0xec */ iemOp_fldlg2,
11269 /* 0xed */ iemOp_fldln2,
11270 /* 0xee */ iemOp_fldz,
11271 /* 0xef */ iemOp_Invalid,
11272 /* 0xf0 */ iemOp_f2xm1,
11273 /* 0xf1 */ iemOp_fylx2,
11274 /* 0xf2 */ iemOp_fptan,
11275 /* 0xf3 */ iemOp_fpatan,
11276 /* 0xf4 */ iemOp_fxtract,
11277 /* 0xf5 */ iemOp_fprem1,
11278 /* 0xf6 */ iemOp_fdecstp,
11279 /* 0xf7 */ iemOp_fincstp,
11280 /* 0xf8 */ iemOp_fprem,
11281 /* 0xf9 */ iemOp_fyl2xp1,
11282 /* 0xfa */ iemOp_fsqrt,
11283 /* 0xfb */ iemOp_fsincos,
11284 /* 0xfc */ iemOp_frndint,
11285 /* 0xfd */ iemOp_fscale,
11286 /* 0xfe */ iemOp_fsin,
11287 /* 0xff */ iemOp_fcos
11288};
11289
11290
11291/** Opcode 0xd9. */
11292FNIEMOP_DEF(iemOp_EscF1)
11293{
11294 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
11295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11296 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11297 {
11298 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11299 {
11300 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11301 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11302 case 2:
11303 if (bRm == 0xc9)
11304 return FNIEMOP_CALL(iemOp_fnop);
11305 return IEMOP_RAISE_INVALID_OPCODE();
11306 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11307 case 4:
11308 case 5:
11309 case 6:
11310 case 7:
11311 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[(bRm & (X86_MODRM_REG_MASK |X86_MODRM_RM_MASK)) - 0xe0]);
11312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11313 }
11314 }
11315 else
11316 {
11317 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11318 {
11319 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11320 case 1: return IEMOP_RAISE_INVALID_OPCODE();
11321 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11322 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11323 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11324 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11325 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11326 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11328 }
11329 }
11330}
11331
11332
11333/** Opcode 0xda 11/0. */
11334FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11335{
11336 IEMOP_MNEMONIC("fcmovb st0,stN");
11337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11338
11339 IEM_MC_BEGIN(0, 1);
11340 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11341
11342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11344
11345 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11346 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
11347 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11348 IEM_MC_ENDIF();
11349 IEM_MC_UPDATE_FPU_OPCODE_IP();
11350 IEM_MC_ELSE()
11351 IEM_MC_FPU_STACK_UNDERFLOW(0);
11352 IEM_MC_ENDIF();
11353 IEM_MC_ADVANCE_RIP();
11354
11355 IEM_MC_END();
11356 return VINF_SUCCESS;
11357}
11358
11359
11360/** Opcode 0xda 11/1. */
11361FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11362{
11363 IEMOP_MNEMONIC("fcmove st0,stN");
11364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11365
11366 IEM_MC_BEGIN(0, 1);
11367 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11368
11369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11371
11372 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11373 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
11374 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11375 IEM_MC_ENDIF();
11376 IEM_MC_UPDATE_FPU_OPCODE_IP();
11377 IEM_MC_ELSE()
11378 IEM_MC_FPU_STACK_UNDERFLOW(0);
11379 IEM_MC_ENDIF();
11380 IEM_MC_ADVANCE_RIP();
11381
11382 IEM_MC_END();
11383 return VINF_SUCCESS;
11384}
11385
11386
11387/** Opcode 0xda 11/2. */
11388FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11389{
11390 IEMOP_MNEMONIC("fcmovbe st0,stN");
11391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11392
11393 IEM_MC_BEGIN(0, 1);
11394 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11395
11396 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11397 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11398
11399 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11400 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
11401 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11402 IEM_MC_ENDIF();
11403 IEM_MC_UPDATE_FPU_OPCODE_IP();
11404 IEM_MC_ELSE()
11405 IEM_MC_FPU_STACK_UNDERFLOW(0);
11406 IEM_MC_ENDIF();
11407 IEM_MC_ADVANCE_RIP();
11408
11409 IEM_MC_END();
11410 return VINF_SUCCESS;
11411}
11412
11413
11414/** Opcode 0xda 11/3. */
11415FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11416{
11417 IEMOP_MNEMONIC("fcmovu st0,stN");
11418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11419
11420 IEM_MC_BEGIN(0, 1);
11421 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11422
11423 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11424 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11425
11426 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11427 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
11428 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11429 IEM_MC_ENDIF();
11430 IEM_MC_UPDATE_FPU_OPCODE_IP();
11431 IEM_MC_ELSE()
11432 IEM_MC_FPU_STACK_UNDERFLOW(0);
11433 IEM_MC_ENDIF();
11434 IEM_MC_ADVANCE_RIP();
11435
11436 IEM_MC_END();
11437 return VINF_SUCCESS;
11438}
11439
11440
11441/**
11442 * Common worker for FPU instructions working on ST0 and STn, only affecting
11443 * flags, and popping twice when done.
11444 *
11445 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11446 */
11447FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11448{
11449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11450
11451 IEM_MC_BEGIN(3, 1);
11452 IEM_MC_LOCAL(uint16_t, u16Fsw);
11453 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11454 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11456
11457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11458 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11459 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
11460 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11461 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
11462 IEM_MC_ELSE()
11463 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
11464 IEM_MC_ENDIF();
11465 IEM_MC_ADVANCE_RIP();
11466
11467 IEM_MC_END();
11468 return VINF_SUCCESS;
11469}
11470
11471
11472/** Opcode 0xda 0xe9. */
11473FNIEMOP_DEF(iemOp_fucompp)
11474{
11475 IEMOP_MNEMONIC("fucompp st0,stN");
11476 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
11477}
11478
11479
11480/**
11481 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11482 * the result in ST0.
11483 *
11484 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11485 */
11486FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11487{
11488 IEM_MC_BEGIN(3, 3);
11489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11490 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11491 IEM_MC_LOCAL(int32_t, i32Val2);
11492 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11493 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11494 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11495
11496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11498
11499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11501 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11502
11503 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11504 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11505 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11506 IEM_MC_ELSE()
11507 IEM_MC_FPU_STACK_UNDERFLOW(0);
11508 IEM_MC_ENDIF();
11509 IEM_MC_ADVANCE_RIP();
11510
11511 IEM_MC_END();
11512 return VINF_SUCCESS;
11513}
11514
11515
11516/** Opcode 0xda !11/0. */
11517FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11518{
11519 IEMOP_MNEMONIC("fiadd m32i");
11520 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11521}
11522
11523
11524/** Opcode 0xda !11/1. */
11525FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11526{
11527 IEMOP_MNEMONIC("fimul m32i");
11528 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11529}
11530
11531
11532/** Opcode 0xda !11/2. */
11533FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11534{
11535 IEMOP_MNEMONIC("ficom st0,m32i");
11536
11537 IEM_MC_BEGIN(3, 3);
11538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11539 IEM_MC_LOCAL(uint16_t, u16Fsw);
11540 IEM_MC_LOCAL(int32_t, i32Val2);
11541 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11542 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11543 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11544
11545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11547
11548 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11549 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11550 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11551
11552 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11553 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11554 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11555 IEM_MC_ELSE()
11556 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
11557 IEM_MC_ENDIF();
11558 IEM_MC_ADVANCE_RIP();
11559
11560 IEM_MC_END();
11561 return VINF_SUCCESS;
11562}
11563
11564
11565/** Opcode 0xda !11/3. */
11566FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11567{
11568 IEMOP_MNEMONIC("ficomp st0,m32i");
11569
11570 IEM_MC_BEGIN(3, 3);
11571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11572 IEM_MC_LOCAL(uint16_t, u16Fsw);
11573 IEM_MC_LOCAL(int32_t, i32Val2);
11574 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11575 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11576 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11577
11578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11580
11581 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11582 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11583 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11584
11585 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11586 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11587 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11588 IEM_MC_ELSE()
11589 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
11590 IEM_MC_ENDIF();
11591 IEM_MC_ADVANCE_RIP();
11592
11593 IEM_MC_END();
11594 return VINF_SUCCESS;
11595}
11596
11597
11598/** Opcode 0xda !11/4. */
11599FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11600{
11601 IEMOP_MNEMONIC("fisub m32i");
11602 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11603}
11604
11605
11606/** Opcode 0xda !11/5. */
11607FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11608{
11609 IEMOP_MNEMONIC("fisubr m32i");
11610 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11611}
11612
11613
11614/** Opcode 0xda !11/6. */
11615FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11616{
11617 IEMOP_MNEMONIC("fidiv m32i");
11618 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11619}
11620
11621
11622/** Opcode 0xda !11/7. */
11623FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11624{
11625 IEMOP_MNEMONIC("fidivr m32i");
11626 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11627}
11628
11629
11630/** Opcode 0xda. */
11631FNIEMOP_DEF(iemOp_EscF2)
11632{
11633 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
11634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11636 {
11637 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11638 {
11639 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11640 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11641 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11642 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11643 case 4: return IEMOP_RAISE_INVALID_OPCODE();
11644 case 5:
11645 if (bRm == 0xe9)
11646 return FNIEMOP_CALL(iemOp_fucompp);
11647 return IEMOP_RAISE_INVALID_OPCODE();
11648 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11649 case 7: return IEMOP_RAISE_INVALID_OPCODE();
11650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11651 }
11652 }
11653 else
11654 {
11655 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11656 {
11657 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11658 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11659 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11660 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11661 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11662 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11663 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11664 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11666 }
11667 }
11668}
11669
11670
11671/** Opcode 0xdb !11/0. */
11672FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11673{
11674 IEMOP_MNEMONIC("fild m32i");
11675
11676 IEM_MC_BEGIN(2, 3);
11677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11678 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11679 IEM_MC_LOCAL(int32_t, i32Val);
11680 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11681 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11682
11683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11685
11686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11687 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11688 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
11689
11690 IEM_MC_IF_FPUREG_IS_EMPTY(7)
11691 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
11692 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
11693 IEM_MC_ELSE()
11694 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
11695 IEM_MC_ENDIF();
11696 IEM_MC_ADVANCE_RIP();
11697
11698 IEM_MC_END();
11699 return VINF_SUCCESS;
11700}
11701
11702
11703/** Opcode 0xdb !11/1. */
11704FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11705{
11706 IEMOP_MNEMONIC("fisttp m32i");
11707 IEM_MC_BEGIN(3, 2);
11708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11709 IEM_MC_LOCAL(uint16_t, u16Fsw);
11710 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11711 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11712 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11713
11714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11716 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11717 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11718
11719 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11720 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11721 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11722 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11723 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11724 IEM_MC_ELSE()
11725 IEM_MC_IF_FCW_IM()
11726 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11727 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
11728 IEM_MC_ENDIF();
11729 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11730 IEM_MC_ENDIF();
11731 IEM_MC_ADVANCE_RIP();
11732
11733 IEM_MC_END();
11734 return VINF_SUCCESS;
11735}
11736
11737
11738/** Opcode 0xdb !11/2. */
11739FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11740{
11741 IEMOP_MNEMONIC("fist m32i");
11742 IEM_MC_BEGIN(3, 2);
11743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11744 IEM_MC_LOCAL(uint16_t, u16Fsw);
11745 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11746 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11747 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11748
11749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11751 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11752 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11753
11754 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11755 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11756 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11757 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11758 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11759 IEM_MC_ELSE()
11760 IEM_MC_IF_FCW_IM()
11761 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11762 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
11763 IEM_MC_ENDIF();
11764 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11765 IEM_MC_ENDIF();
11766 IEM_MC_ADVANCE_RIP();
11767
11768 IEM_MC_END();
11769 return VINF_SUCCESS;
11770}
11771
11772
11773/** Opcode 0xdb !11/3. */
11774FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11775{
11776 IEMOP_MNEMONIC("fisttp m32i");
11777 IEM_MC_BEGIN(3, 2);
11778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11779 IEM_MC_LOCAL(uint16_t, u16Fsw);
11780 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11781 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11782 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11783
11784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11786 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11787 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11788
11789 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11790 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11791 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11792 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11793 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11794 IEM_MC_ELSE()
11795 IEM_MC_IF_FCW_IM()
11796 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11797 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
11798 IEM_MC_ENDIF();
11799 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11800 IEM_MC_ENDIF();
11801 IEM_MC_ADVANCE_RIP();
11802
11803 IEM_MC_END();
11804 return VINF_SUCCESS;
11805}
11806
11807
11808/** Opcode 0xdb !11/5. */
11809FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11810{
11811 IEMOP_MNEMONIC("fld m80r");
11812
11813 IEM_MC_BEGIN(2, 3);
11814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11815 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11816 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11817 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11818 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11819
11820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11822
11823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11824 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11825 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
11826
11827 IEM_MC_IF_FPUREG_IS_EMPTY(7)
11828 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11829 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
11830 IEM_MC_ELSE()
11831 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
11832 IEM_MC_ENDIF();
11833 IEM_MC_ADVANCE_RIP();
11834
11835 IEM_MC_END();
11836 return VINF_SUCCESS;
11837}
11838
11839
11840/** Opcode 0xdb !11/7. */
11841FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11842{
11843 IEMOP_MNEMONIC("fstp m80r");
11844 IEM_MC_BEGIN(3, 2);
11845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11846 IEM_MC_LOCAL(uint16_t, u16Fsw);
11847 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11848 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11849 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11850
11851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11853 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11854 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11855
11856 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11857 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11858 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11859 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11860 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11861 IEM_MC_ELSE()
11862 IEM_MC_IF_FCW_IM()
11863 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11864 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
11865 IEM_MC_ENDIF();
11866 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11867 IEM_MC_ENDIF();
11868 IEM_MC_ADVANCE_RIP();
11869
11870 IEM_MC_END();
11871 return VINF_SUCCESS;
11872}
11873
11874
11875/** Opcode 0xdb 11/0. */
11876FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11877{
11878 IEMOP_MNEMONIC("fcmovnb st0,stN");
11879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11880
11881 IEM_MC_BEGIN(0, 1);
11882 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11883
11884 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11885 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11886
11887 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11888 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
11889 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11890 IEM_MC_ENDIF();
11891 IEM_MC_UPDATE_FPU_OPCODE_IP();
11892 IEM_MC_ELSE()
11893 IEM_MC_FPU_STACK_UNDERFLOW(0);
11894 IEM_MC_ENDIF();
11895 IEM_MC_ADVANCE_RIP();
11896
11897 IEM_MC_END();
11898 return VINF_SUCCESS;
11899}
11900
11901
11902/** Opcode 0xdb 11/1. */
11903FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11904{
11905 IEMOP_MNEMONIC("fcmovne st0,stN");
11906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11907
11908 IEM_MC_BEGIN(0, 1);
11909 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11910
11911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11912 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11913
11914 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11915 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
11916 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11917 IEM_MC_ENDIF();
11918 IEM_MC_UPDATE_FPU_OPCODE_IP();
11919 IEM_MC_ELSE()
11920 IEM_MC_FPU_STACK_UNDERFLOW(0);
11921 IEM_MC_ENDIF();
11922 IEM_MC_ADVANCE_RIP();
11923
11924 IEM_MC_END();
11925 return VINF_SUCCESS;
11926}
11927
11928
11929/** Opcode 0xdb 11/2. */
11930FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11931{
11932 IEMOP_MNEMONIC("fcmovnbe st0,stN");
11933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11934
11935 IEM_MC_BEGIN(0, 1);
11936 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11937
11938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11940
11941 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11942 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
11943 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11944 IEM_MC_ENDIF();
11945 IEM_MC_UPDATE_FPU_OPCODE_IP();
11946 IEM_MC_ELSE()
11947 IEM_MC_FPU_STACK_UNDERFLOW(0);
11948 IEM_MC_ENDIF();
11949 IEM_MC_ADVANCE_RIP();
11950
11951 IEM_MC_END();
11952 return VINF_SUCCESS;
11953}
11954
11955
11956/** Opcode 0xdb 11/3. */
11957FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11958{
11959 IEMOP_MNEMONIC("fcmovnnu st0,stN");
11960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11961
11962 IEM_MC_BEGIN(0, 1);
11963 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11964
11965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11966 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11967
11968 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
11969 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
11970 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11971 IEM_MC_ENDIF();
11972 IEM_MC_UPDATE_FPU_OPCODE_IP();
11973 IEM_MC_ELSE()
11974 IEM_MC_FPU_STACK_UNDERFLOW(0);
11975 IEM_MC_ENDIF();
11976 IEM_MC_ADVANCE_RIP();
11977
11978 IEM_MC_END();
11979 return VINF_SUCCESS;
11980}
11981
11982
11983/** Opcode 0xdb 0xe0. */
11984FNIEMOP_DEF(iemOp_fneni)
11985{
11986 IEMOP_MNEMONIC("fneni (8087/ign)");
11987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11988 IEM_MC_BEGIN(0,0);
11989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11990 IEM_MC_ADVANCE_RIP();
11991 IEM_MC_END();
11992 return VINF_SUCCESS;
11993}
11994
11995
11996/** Opcode 0xdb 0xe1. */
11997FNIEMOP_DEF(iemOp_fndisi)
11998{
11999 IEMOP_MNEMONIC("fndisi (8087/ign)");
12000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12001 IEM_MC_BEGIN(0,0);
12002 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12003 IEM_MC_ADVANCE_RIP();
12004 IEM_MC_END();
12005 return VINF_SUCCESS;
12006}
12007
12008
12009/** Opcode 0xdb 0xe2. */
12010FNIEMOP_DEF(iemOp_fnclex)
12011{
12012 IEMOP_MNEMONIC("fnclex");
12013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12014
12015 IEM_MC_BEGIN(0,0);
12016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12017 IEM_MC_CLEAR_FSW_EX();
12018 IEM_MC_ADVANCE_RIP();
12019 IEM_MC_END();
12020 return VINF_SUCCESS;
12021}
12022
12023
12024/** Opcode 0xdb 0xe3. */
12025FNIEMOP_DEF(iemOp_fninit)
12026{
12027 IEMOP_MNEMONIC("fninit");
12028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12029 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
12030}
12031
12032
12033/** Opcode 0xdb 0xe4. */
12034FNIEMOP_DEF(iemOp_fnsetpm)
12035{
12036 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
12037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12038 IEM_MC_BEGIN(0,0);
12039 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12040 IEM_MC_ADVANCE_RIP();
12041 IEM_MC_END();
12042 return VINF_SUCCESS;
12043}
12044
12045
12046/** Opcode 0xdb 0xe5. */
12047FNIEMOP_DEF(iemOp_frstpm)
12048{
12049 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
12050#if 0 /* #UDs on newer CPUs */
12051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12052 IEM_MC_BEGIN(0,0);
12053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12054 IEM_MC_ADVANCE_RIP();
12055 IEM_MC_END();
12056 return VINF_SUCCESS;
12057#else
12058 return IEMOP_RAISE_INVALID_OPCODE();
12059#endif
12060}
12061
12062
12063/** Opcode 0xdb 11/5. */
12064FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
12065{
12066 IEMOP_MNEMONIC("fucomi st0,stN");
12067 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
12068}
12069
12070
12071/** Opcode 0xdb 11/6. */
12072FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
12073{
12074 IEMOP_MNEMONIC("fcomi st0,stN");
12075 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
12076}
12077
12078
12079/** Opcode 0xdb. */
12080FNIEMOP_DEF(iemOp_EscF3)
12081{
12082 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12085 {
12086 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12087 {
12088 case 0: FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
12089 case 1: FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
12090 case 2: FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
12091 case 3: FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
12092 case 4:
12093 switch (bRm)
12094 {
12095 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
12096 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
12097 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
12098 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
12099 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
12100 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
12101 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
12102 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
12103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12104 }
12105 break;
12106 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
12107 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
12108 case 7: return IEMOP_RAISE_INVALID_OPCODE();
12109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12110 }
12111 }
12112 else
12113 {
12114 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12115 {
12116 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
12117 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
12118 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
12119 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
12120 case 4: return IEMOP_RAISE_INVALID_OPCODE();
12121 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
12122 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12123 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
12124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12125 }
12126 }
12127}
12128
12129
12130/**
12131 * Common worker for FPU instructions working on STn and ST0, and storing the
12132 * result in STn unless IE, DE or ZE was raised.
12133 *
12134 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12135 */
12136FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12137{
12138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12139
12140 IEM_MC_BEGIN(3, 1);
12141 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12142 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12143 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12144 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12145
12146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12147 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12148
12149 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
12150 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12151 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
12152 IEM_MC_ELSE()
12153 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
12154 IEM_MC_ENDIF();
12155 IEM_MC_ADVANCE_RIP();
12156
12157 IEM_MC_END();
12158 return VINF_SUCCESS;
12159}
12160
12161
12162/** Opcode 0xdc 11/0. */
12163FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12164{
12165 IEMOP_MNEMONIC("fadd stN,st0");
12166 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12167}
12168
12169
12170/** Opcode 0xdc 11/1. */
12171FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12172{
12173 IEMOP_MNEMONIC("fmul stN,st0");
12174 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12175}
12176
12177
12178/** Opcode 0xdc 11/4. */
12179FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12180{
12181 IEMOP_MNEMONIC("fsubr stN,st0");
12182 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12183}
12184
12185
12186/** Opcode 0xdc 11/5. */
12187FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12188{
12189 IEMOP_MNEMONIC("fsub stN,st0");
12190 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12191}
12192
12193
12194/** Opcode 0xdc 11/6. */
12195FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12196{
12197 IEMOP_MNEMONIC("fdivr stN,st0");
12198 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12199}
12200
12201
12202/** Opcode 0xdc 11/7. */
12203FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12204{
12205 IEMOP_MNEMONIC("fdiv stN,st0");
12206 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12207}
12208
12209
12210/**
12211 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12212 * memory operand, and storing the result in ST0.
12213 *
12214 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12215 */
12216FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12217{
12218 IEM_MC_BEGIN(3, 3);
12219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12220 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12221 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12222 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12223 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12224 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12225
12226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12230
12231 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
12232 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
12233 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12234 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
12235 IEM_MC_ELSE()
12236 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
12237 IEM_MC_ENDIF();
12238 IEM_MC_ADVANCE_RIP();
12239
12240 IEM_MC_END();
12241 return VINF_SUCCESS;
12242}
12243
12244
12245/** Opcode 0xdc !11/0. */
12246FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12247{
12248 IEMOP_MNEMONIC("fadd m64r");
12249 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12250}
12251
12252
12253/** Opcode 0xdc !11/1. */
12254FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12255{
12256 IEMOP_MNEMONIC("fmul m64r");
12257 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12258}
12259
12260
12261/** Opcode 0xdc !11/2. */
12262FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12263{
12264 IEMOP_MNEMONIC("fcom st0,m64r");
12265
12266 IEM_MC_BEGIN(3, 3);
12267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12268 IEM_MC_LOCAL(uint16_t, u16Fsw);
12269 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12270 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12271 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12272 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12273
12274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12276
12277 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12278 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12279 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12280
12281 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12282 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12283 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12284 IEM_MC_ELSE()
12285 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12286 IEM_MC_ENDIF();
12287 IEM_MC_ADVANCE_RIP();
12288
12289 IEM_MC_END();
12290 return VINF_SUCCESS;
12291}
12292
12293
12294/** Opcode 0xdc !11/3. */
12295FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12296{
12297 IEMOP_MNEMONIC("fcomp st0,m64r");
12298
12299 IEM_MC_BEGIN(3, 3);
12300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12301 IEM_MC_LOCAL(uint16_t, u16Fsw);
12302 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12303 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12304 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12305 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12306
12307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12309
12310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12312 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12313
12314 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12315 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12316 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12317 IEM_MC_ELSE()
12318 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12319 IEM_MC_ENDIF();
12320 IEM_MC_ADVANCE_RIP();
12321
12322 IEM_MC_END();
12323 return VINF_SUCCESS;
12324}
12325
12326
12327/** Opcode 0xdc !11/4. */
12328FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12329{
12330 IEMOP_MNEMONIC("fsub m64r");
12331 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12332}
12333
12334
12335/** Opcode 0xdc !11/5. */
12336FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12337{
12338 IEMOP_MNEMONIC("fsubr m64r");
12339 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12340}
12341
12342
12343/** Opcode 0xdc !11/6. */
12344FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12345{
12346 IEMOP_MNEMONIC("fdiv m64r");
12347 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12348}
12349
12350
12351/** Opcode 0xdc !11/7. */
12352FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12353{
12354 IEMOP_MNEMONIC("fdivr m64r");
12355 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12356}
12357
12358
12359/** Opcode 0xdc. */
12360FNIEMOP_DEF(iemOp_EscF4)
12361{
12362 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12364 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12365 {
12366 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12367 {
12368 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12369 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12370 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12371 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12372 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12373 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12374 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12375 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12377 }
12378 }
12379 else
12380 {
12381 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12382 {
12383 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12384 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12385 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12386 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12387 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12388 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12389 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12390 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12392 }
12393 }
12394}
12395
12396
12397/** Opcode 0xdd !11/0.
12398 * @sa iemOp_fld_m32r */
12399FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12400{
12401 IEMOP_MNEMONIC("fld m64r");
12402
12403 IEM_MC_BEGIN(2, 3);
12404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12405 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12406 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12407 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12408 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12409
12410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12412 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12413 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12414
12415 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12416 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12417 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
12418 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12419 IEM_MC_ELSE()
12420 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12421 IEM_MC_ENDIF();
12422 IEM_MC_ADVANCE_RIP();
12423
12424 IEM_MC_END();
12425 return VINF_SUCCESS;
12426}
12427
12428
12429/** Opcode 0xdd !11/0. */
12430FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12431{
12432 IEMOP_MNEMONIC("fisttp m64i");
12433 IEM_MC_BEGIN(3, 2);
12434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12435 IEM_MC_LOCAL(uint16_t, u16Fsw);
12436 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12437 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12438 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12439
12440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12442 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12443 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12444
12445 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12446 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12447 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12448 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
12449 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12450 IEM_MC_ELSE()
12451 IEM_MC_IF_FCW_IM()
12452 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12453 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
12454 IEM_MC_ENDIF();
12455 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12456 IEM_MC_ENDIF();
12457 IEM_MC_ADVANCE_RIP();
12458
12459 IEM_MC_END();
12460 return VINF_SUCCESS;
12461}
12462
12463
12464/** Opcode 0xdd !11/0. */
12465FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12466{
12467 IEMOP_MNEMONIC("fst m64r");
12468 IEM_MC_BEGIN(3, 2);
12469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12470 IEM_MC_LOCAL(uint16_t, u16Fsw);
12471 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12472 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12473 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12474
12475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12478 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12479
12480 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12481 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12482 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12483 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
12484 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12485 IEM_MC_ELSE()
12486 IEM_MC_IF_FCW_IM()
12487 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12488 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
12489 IEM_MC_ENDIF();
12490 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12491 IEM_MC_ENDIF();
12492 IEM_MC_ADVANCE_RIP();
12493
12494 IEM_MC_END();
12495 return VINF_SUCCESS;
12496}
12497
12498
12499
12500
12501/** Opcode 0xdd !11/0. */
12502FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12503{
12504 IEMOP_MNEMONIC("fstp m64r");
12505 IEM_MC_BEGIN(3, 2);
12506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12507 IEM_MC_LOCAL(uint16_t, u16Fsw);
12508 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12509 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12510 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12511
12512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12516
12517 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12518 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12519 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12520 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
12521 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12522 IEM_MC_ELSE()
12523 IEM_MC_IF_FCW_IM()
12524 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12525 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
12526 IEM_MC_ENDIF();
12527 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12528 IEM_MC_ENDIF();
12529 IEM_MC_ADVANCE_RIP();
12530
12531 IEM_MC_END();
12532 return VINF_SUCCESS;
12533}
12534
12535
12536/** Opcode 0xdd !11/0. */
12537FNIEMOP_STUB_1(iemOp_frstor, uint8_t, bRm);
12538
12539/** Opcode 0xdd !11/0. */
12540FNIEMOP_STUB_1(iemOp_fnsave, uint8_t, bRm);
12541
12542/** Opcode 0xdd !11/0. */
12543FNIEMOP_STUB_1(iemOp_fnstsw, uint8_t, bRm);
12544
12545/** Opcode 0xdd 11/0. */
12546FNIEMOP_STUB_1(iemOp_ffree_stN, uint8_t, bRm);
12547
12548
12549/** Opcode 0xdd 11/1. */
12550FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12551{
12552 IEMOP_MNEMONIC("fst st0,stN");
12553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12554
12555 IEM_MC_BEGIN(0, 2);
12556 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12557 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12558 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12559 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12560 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12561 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12562 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
12563 IEM_MC_ELSE()
12564 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
12565 IEM_MC_ENDIF();
12566 IEM_MC_ADVANCE_RIP();
12567 IEM_MC_END();
12568 return VINF_SUCCESS;
12569}
12570
12571
12572/** Opcode 0xdd 11/3. */
12573FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12574{
12575 IEMOP_MNEMONIC("fcom st0,stN");
12576 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12577}
12578
12579
12580/** Opcode 0xdd 11/4. */
12581FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12582{
12583 IEMOP_MNEMONIC("fcomp st0,stN");
12584 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12585}
12586
12587
12588/** Opcode 0xdd. */
12589FNIEMOP_DEF(iemOp_EscF5)
12590{
12591 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12594 {
12595 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12596 {
12597 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12598 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12599 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12600 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12601 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12602 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12603 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12604 case 7: return IEMOP_RAISE_INVALID_OPCODE();
12605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12606 }
12607 }
12608 else
12609 {
12610 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12611 {
12612 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12613 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12614 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12615 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12616 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12617 case 5: return IEMOP_RAISE_INVALID_OPCODE();
12618 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12619 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12621 }
12622 }
12623}
12624
12625
12626/** Opcode 0xde 11/0. */
12627FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12628{
12629 IEMOP_MNEMONIC("faddp stN,st0");
12630 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12631}
12632
12633
12634/** Opcode 0xde 11/0. */
12635FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12636{
12637 IEMOP_MNEMONIC("fmulp stN,st0");
12638 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12639}
12640
12641
12642/** Opcode 0xde 0xd9. */
12643FNIEMOP_DEF(iemOp_fcompp)
12644{
12645 IEMOP_MNEMONIC("fucompp st0,stN");
12646 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
12647}
12648
12649
12650/** Opcode 0xde 11/4. */
12651FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12652{
12653 IEMOP_MNEMONIC("fsubrp stN,st0");
12654 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12655}
12656
12657
12658/** Opcode 0xde 11/5. */
12659FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12660{
12661 IEMOP_MNEMONIC("fsubp stN,st0");
12662 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12663}
12664
12665
12666/** Opcode 0xde 11/6. */
12667FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12668{
12669 IEMOP_MNEMONIC("fdivrp stN,st0");
12670 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12671}
12672
12673
12674/** Opcode 0xde 11/7. */
12675FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12676{
12677 IEMOP_MNEMONIC("fdivp stN,st0");
12678 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12679}
12680
12681
12682/**
12683 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12684 * the result in ST0.
12685 *
12686 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12687 */
12688FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12689{
12690 IEM_MC_BEGIN(3, 3);
12691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12692 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12693 IEM_MC_LOCAL(int16_t, i16Val2);
12694 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12695 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12696 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12697
12698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12700
12701 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12702 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12703 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12704
12705 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12706 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12707 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12708 IEM_MC_ELSE()
12709 IEM_MC_FPU_STACK_UNDERFLOW(0);
12710 IEM_MC_ENDIF();
12711 IEM_MC_ADVANCE_RIP();
12712
12713 IEM_MC_END();
12714 return VINF_SUCCESS;
12715}
12716
12717
12718/** Opcode 0xde !11/0. */
12719FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12720{
12721 IEMOP_MNEMONIC("fiadd m16i");
12722 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12723}
12724
12725
12726/** Opcode 0xde !11/1. */
12727FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12728{
12729 IEMOP_MNEMONIC("fimul m16i");
12730 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12731}
12732
12733
12734/** Opcode 0xde !11/2. */
12735FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12736{
12737 IEMOP_MNEMONIC("ficom st0,m16i");
12738
12739 IEM_MC_BEGIN(3, 3);
12740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12741 IEM_MC_LOCAL(uint16_t, u16Fsw);
12742 IEM_MC_LOCAL(int16_t, i16Val2);
12743 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12744 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12745 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12746
12747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12749
12750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12751 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12752 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12753
12754 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12755 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12756 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12757 IEM_MC_ELSE()
12758 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12759 IEM_MC_ENDIF();
12760 IEM_MC_ADVANCE_RIP();
12761
12762 IEM_MC_END();
12763 return VINF_SUCCESS;
12764}
12765
12766
12767/** Opcode 0xde !11/3. */
12768FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12769{
12770 IEMOP_MNEMONIC("ficomp st0,m16i");
12771
12772 IEM_MC_BEGIN(3, 3);
12773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12774 IEM_MC_LOCAL(uint16_t, u16Fsw);
12775 IEM_MC_LOCAL(int16_t, i16Val2);
12776 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12777 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12778 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12779
12780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12782
12783 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12784 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12785 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12786
12787 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12788 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12789 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12790 IEM_MC_ELSE()
12791 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12792 IEM_MC_ENDIF();
12793 IEM_MC_ADVANCE_RIP();
12794
12795 IEM_MC_END();
12796 return VINF_SUCCESS;
12797}
12798
12799
12800/** Opcode 0xde !11/4. */
12801FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12802{
12803 IEMOP_MNEMONIC("fisub m16i");
12804 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12805}
12806
12807
12808/** Opcode 0xde !11/5. */
12809FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12810{
12811 IEMOP_MNEMONIC("fisubr m16i");
12812 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12813}
12814
12815
12816/** Opcode 0xde !11/6. */
12817FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12818{
12819 IEMOP_MNEMONIC("fiadd m16i");
12820 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12821}
12822
12823
12824/** Opcode 0xde !11/7. */
12825FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12826{
12827 IEMOP_MNEMONIC("fiadd m16i");
12828 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12829}
12830
12831
12832/** Opcode 0xde. */
12833FNIEMOP_DEF(iemOp_EscF6)
12834{
12835 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12838 {
12839 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12840 {
12841 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12842 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12843 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12844 case 3: if (bRm == 0xd9)
12845 return FNIEMOP_CALL(iemOp_fcompp);
12846 return IEMOP_RAISE_INVALID_OPCODE();
12847 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12848 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12849 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12850 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12852 }
12853 }
12854 else
12855 {
12856 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12857 {
12858 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12859 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12860 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12861 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12862 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12863 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12864 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12865 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12867 }
12868 }
12869}
12870
12871
12872/** Opcode 0xdf 11/0. */
12873FNIEMOP_STUB_1(iemOp_ffreep_stN, uint8_t, bRm);
12874
12875
12876/** Opcode 0xdf 0xe0. */
12877FNIEMOP_DEF(iemOp_fnstsw_ax)
12878{
12879 IEMOP_MNEMONIC("fnstsw ax");
12880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12881
12882 IEM_MC_BEGIN(0, 1);
12883 IEM_MC_LOCAL(uint16_t, u16Tmp);
12884 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12885 IEM_MC_FETCH_FSW(u16Tmp);
12886 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12887 IEM_MC_ADVANCE_RIP();
12888 IEM_MC_END();
12889 return VINF_SUCCESS;
12890}
12891
12892
12893/** Opcode 0xdf 11/5. */
12894FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12895{
12896 IEMOP_MNEMONIC("fcomip st0,stN");
12897 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
12898}
12899
12900
12901/** Opcode 0xdf 11/6. */
12902FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12903{
12904 IEMOP_MNEMONIC("fcomip st0,stN");
12905 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
12906}
12907
12908
12909/** Opcode 0xdf !11/0. */
12910FNIEMOP_STUB_1(iemOp_fild_m16i, uint8_t, bRm);
12911
12912
12913/** Opcode 0xdf !11/1. */
12914FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12915{
12916 IEMOP_MNEMONIC("fisttp m16i");
12917 IEM_MC_BEGIN(3, 2);
12918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12919 IEM_MC_LOCAL(uint16_t, u16Fsw);
12920 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12921 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12922 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12923
12924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12927 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12928
12929 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12930 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12931 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12932 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12933 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12934 IEM_MC_ELSE()
12935 IEM_MC_IF_FCW_IM()
12936 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12937 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12938 IEM_MC_ENDIF();
12939 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12940 IEM_MC_ENDIF();
12941 IEM_MC_ADVANCE_RIP();
12942
12943 IEM_MC_END();
12944 return VINF_SUCCESS;
12945}
12946
12947
12948/** Opcode 0xdf !11/2. */
12949FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12950{
12951 IEMOP_MNEMONIC("fistp m16i");
12952 IEM_MC_BEGIN(3, 2);
12953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12954 IEM_MC_LOCAL(uint16_t, u16Fsw);
12955 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12956 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12957 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12958
12959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12962 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12963
12964 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12965 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12966 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12967 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12968 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12969 IEM_MC_ELSE()
12970 IEM_MC_IF_FCW_IM()
12971 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12972 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12973 IEM_MC_ENDIF();
12974 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12975 IEM_MC_ENDIF();
12976 IEM_MC_ADVANCE_RIP();
12977
12978 IEM_MC_END();
12979 return VINF_SUCCESS;
12980}
12981
12982
12983/** Opcode 0xdf !11/3. */
12984FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12985{
12986 IEMOP_MNEMONIC("fistp m16i");
12987 IEM_MC_BEGIN(3, 2);
12988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12989 IEM_MC_LOCAL(uint16_t, u16Fsw);
12990 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12991 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12992 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12993
12994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12998
12999 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13000 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13001 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13002 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
13003 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13004 IEM_MC_ELSE()
13005 IEM_MC_IF_FCW_IM()
13006 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13007 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
13008 IEM_MC_ENDIF();
13009 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13010 IEM_MC_ENDIF();
13011 IEM_MC_ADVANCE_RIP();
13012
13013 IEM_MC_END();
13014 return VINF_SUCCESS;
13015}
13016
13017
13018/** Opcode 0xdf !11/4. */
13019FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
13020
13021/** Opcode 0xdf !11/5. */
13022FNIEMOP_STUB_1(iemOp_fild_m64i, uint8_t, bRm);
13023
13024/** Opcode 0xdf !11/6. */
13025FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
13026
13027
13028/** Opcode 0xdf !11/7. */
13029FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13030{
13031 IEMOP_MNEMONIC("fistp m64i");
13032 IEM_MC_BEGIN(3, 2);
13033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13034 IEM_MC_LOCAL(uint16_t, u16Fsw);
13035 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13036 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13037 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13038
13039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13041 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13042 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13043
13044 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13045 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13046 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13047 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13048 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13049 IEM_MC_ELSE()
13050 IEM_MC_IF_FCW_IM()
13051 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13052 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
13053 IEM_MC_ENDIF();
13054 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13055 IEM_MC_ENDIF();
13056 IEM_MC_ADVANCE_RIP();
13057
13058 IEM_MC_END();
13059 return VINF_SUCCESS;
13060}
13061
13062
13063/** Opcode 0xdf. */
13064FNIEMOP_DEF(iemOp_EscF7)
13065{
13066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13068 {
13069 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13070 {
13071 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13072 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13073 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13074 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13075 case 4: if (bRm == 0xe0)
13076 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13077 return IEMOP_RAISE_INVALID_OPCODE();
13078 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13079 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13080 case 7: return IEMOP_RAISE_INVALID_OPCODE();
13081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13082 }
13083 }
13084 else
13085 {
13086 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13087 {
13088 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13089 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13090 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13091 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13092 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13093 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13094 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13095 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13097 }
13098 }
13099}
13100
13101
13102/** Opcode 0xe0. */
13103FNIEMOP_DEF(iemOp_loopne_Jb)
13104{
13105 IEMOP_MNEMONIC("loopne Jb");
13106 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13107 IEMOP_HLP_NO_LOCK_PREFIX();
13108 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13109
13110 switch (pIemCpu->enmEffAddrMode)
13111 {
13112 case IEMMODE_16BIT:
13113 IEM_MC_BEGIN(0,0);
13114 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13115 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13116 IEM_MC_REL_JMP_S8(i8Imm);
13117 } IEM_MC_ELSE() {
13118 IEM_MC_ADVANCE_RIP();
13119 } IEM_MC_ENDIF();
13120 IEM_MC_END();
13121 return VINF_SUCCESS;
13122
13123 case IEMMODE_32BIT:
13124 IEM_MC_BEGIN(0,0);
13125 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13126 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13127 IEM_MC_REL_JMP_S8(i8Imm);
13128 } IEM_MC_ELSE() {
13129 IEM_MC_ADVANCE_RIP();
13130 } IEM_MC_ENDIF();
13131 IEM_MC_END();
13132 return VINF_SUCCESS;
13133
13134 case IEMMODE_64BIT:
13135 IEM_MC_BEGIN(0,0);
13136 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13137 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13138 IEM_MC_REL_JMP_S8(i8Imm);
13139 } IEM_MC_ELSE() {
13140 IEM_MC_ADVANCE_RIP();
13141 } IEM_MC_ENDIF();
13142 IEM_MC_END();
13143 return VINF_SUCCESS;
13144
13145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13146 }
13147}
13148
13149
13150/** Opcode 0xe1. */
13151FNIEMOP_DEF(iemOp_loope_Jb)
13152{
13153 IEMOP_MNEMONIC("loope Jb");
13154 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13155 IEMOP_HLP_NO_LOCK_PREFIX();
13156 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13157
13158 switch (pIemCpu->enmEffAddrMode)
13159 {
13160 case IEMMODE_16BIT:
13161 IEM_MC_BEGIN(0,0);
13162 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13163 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
13164 IEM_MC_REL_JMP_S8(i8Imm);
13165 } IEM_MC_ELSE() {
13166 IEM_MC_ADVANCE_RIP();
13167 } IEM_MC_ENDIF();
13168 IEM_MC_END();
13169 return VINF_SUCCESS;
13170
13171 case IEMMODE_32BIT:
13172 IEM_MC_BEGIN(0,0);
13173 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13174 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
13175 IEM_MC_REL_JMP_S8(i8Imm);
13176 } IEM_MC_ELSE() {
13177 IEM_MC_ADVANCE_RIP();
13178 } IEM_MC_ENDIF();
13179 IEM_MC_END();
13180 return VINF_SUCCESS;
13181
13182 case IEMMODE_64BIT:
13183 IEM_MC_BEGIN(0,0);
13184 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13185 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
13186 IEM_MC_REL_JMP_S8(i8Imm);
13187 } IEM_MC_ELSE() {
13188 IEM_MC_ADVANCE_RIP();
13189 } IEM_MC_ENDIF();
13190 IEM_MC_END();
13191 return VINF_SUCCESS;
13192
13193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13194 }
13195}
13196
13197
13198/** Opcode 0xe2. */
13199FNIEMOP_DEF(iemOp_loop_Jb)
13200{
13201 IEMOP_MNEMONIC("loop Jb");
13202 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13203 IEMOP_HLP_NO_LOCK_PREFIX();
13204 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13205
13206 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
13207 * using the 32-bit operand size override. How can that be restarted? See
13208 * weird pseudo code in intel manual. */
13209 switch (pIemCpu->enmEffAddrMode)
13210 {
13211 case IEMMODE_16BIT:
13212 IEM_MC_BEGIN(0,0);
13213 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13214 IEM_MC_IF_CX_IS_NZ() {
13215 IEM_MC_REL_JMP_S8(i8Imm);
13216 } IEM_MC_ELSE() {
13217 IEM_MC_ADVANCE_RIP();
13218 } IEM_MC_ENDIF();
13219 IEM_MC_END();
13220 return VINF_SUCCESS;
13221
13222 case IEMMODE_32BIT:
13223 IEM_MC_BEGIN(0,0);
13224 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13225 IEM_MC_IF_ECX_IS_NZ() {
13226 IEM_MC_REL_JMP_S8(i8Imm);
13227 } IEM_MC_ELSE() {
13228 IEM_MC_ADVANCE_RIP();
13229 } IEM_MC_ENDIF();
13230 IEM_MC_END();
13231 return VINF_SUCCESS;
13232
13233 case IEMMODE_64BIT:
13234 IEM_MC_BEGIN(0,0);
13235 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13236 IEM_MC_IF_RCX_IS_NZ() {
13237 IEM_MC_REL_JMP_S8(i8Imm);
13238 } IEM_MC_ELSE() {
13239 IEM_MC_ADVANCE_RIP();
13240 } IEM_MC_ENDIF();
13241 IEM_MC_END();
13242 return VINF_SUCCESS;
13243
13244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13245 }
13246}
13247
13248
13249/** Opcode 0xe3. */
13250FNIEMOP_DEF(iemOp_jecxz_Jb)
13251{
13252 IEMOP_MNEMONIC("jecxz Jb");
13253 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13254 IEMOP_HLP_NO_LOCK_PREFIX();
13255 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13256
13257 switch (pIemCpu->enmEffAddrMode)
13258 {
13259 case IEMMODE_16BIT:
13260 IEM_MC_BEGIN(0,0);
13261 IEM_MC_IF_CX_IS_NZ() {
13262 IEM_MC_ADVANCE_RIP();
13263 } IEM_MC_ELSE() {
13264 IEM_MC_REL_JMP_S8(i8Imm);
13265 } IEM_MC_ENDIF();
13266 IEM_MC_END();
13267 return VINF_SUCCESS;
13268
13269 case IEMMODE_32BIT:
13270 IEM_MC_BEGIN(0,0);
13271 IEM_MC_IF_ECX_IS_NZ() {
13272 IEM_MC_ADVANCE_RIP();
13273 } IEM_MC_ELSE() {
13274 IEM_MC_REL_JMP_S8(i8Imm);
13275 } IEM_MC_ENDIF();
13276 IEM_MC_END();
13277 return VINF_SUCCESS;
13278
13279 case IEMMODE_64BIT:
13280 IEM_MC_BEGIN(0,0);
13281 IEM_MC_IF_RCX_IS_NZ() {
13282 IEM_MC_ADVANCE_RIP();
13283 } IEM_MC_ELSE() {
13284 IEM_MC_REL_JMP_S8(i8Imm);
13285 } IEM_MC_ENDIF();
13286 IEM_MC_END();
13287 return VINF_SUCCESS;
13288
13289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13290 }
13291}
13292
13293
13294/** Opcode 0xe4 */
13295FNIEMOP_DEF(iemOp_in_AL_Ib)
13296{
13297 IEMOP_MNEMONIC("in eAX,Ib");
13298 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13299 IEMOP_HLP_NO_LOCK_PREFIX();
13300 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
13301}
13302
13303
13304/** Opcode 0xe5 */
13305FNIEMOP_DEF(iemOp_in_eAX_Ib)
13306{
13307 IEMOP_MNEMONIC("in eAX,Ib");
13308 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13309 IEMOP_HLP_NO_LOCK_PREFIX();
13310 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
13311}
13312
13313
13314/** Opcode 0xe6 */
13315FNIEMOP_DEF(iemOp_out_Ib_AL)
13316{
13317 IEMOP_MNEMONIC("out Ib,AL");
13318 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13319 IEMOP_HLP_NO_LOCK_PREFIX();
13320 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
13321}
13322
13323
13324/** Opcode 0xe7 */
13325FNIEMOP_DEF(iemOp_out_Ib_eAX)
13326{
13327 IEMOP_MNEMONIC("out Ib,eAX");
13328 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13329 IEMOP_HLP_NO_LOCK_PREFIX();
13330 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
13331}
13332
13333
13334/** Opcode 0xe8. */
13335FNIEMOP_DEF(iemOp_call_Jv)
13336{
13337 IEMOP_MNEMONIC("call Jv");
13338 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13339 switch (pIemCpu->enmEffOpSize)
13340 {
13341 case IEMMODE_16BIT:
13342 {
13343 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13344 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
13345 }
13346
13347 case IEMMODE_32BIT:
13348 {
13349 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13350 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
13351 }
13352
13353 case IEMMODE_64BIT:
13354 {
13355 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13356 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
13357 }
13358
13359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13360 }
13361}
13362
13363
13364/** Opcode 0xe9. */
13365FNIEMOP_DEF(iemOp_jmp_Jv)
13366{
13367 IEMOP_MNEMONIC("jmp Jv");
13368 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13369 switch (pIemCpu->enmEffOpSize)
13370 {
13371 case IEMMODE_16BIT:
13372 {
13373 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13374 IEM_MC_BEGIN(0, 0);
13375 IEM_MC_REL_JMP_S16(i16Imm);
13376 IEM_MC_END();
13377 return VINF_SUCCESS;
13378 }
13379
13380 case IEMMODE_64BIT:
13381 case IEMMODE_32BIT:
13382 {
13383 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13384 IEM_MC_BEGIN(0, 0);
13385 IEM_MC_REL_JMP_S32(i32Imm);
13386 IEM_MC_END();
13387 return VINF_SUCCESS;
13388 }
13389
13390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13391 }
13392}
13393
13394
13395/** Opcode 0xea. */
13396FNIEMOP_DEF(iemOp_jmp_Ap)
13397{
13398 IEMOP_MNEMONIC("jmp Ap");
13399 IEMOP_HLP_NO_64BIT();
13400
13401 /* Decode the far pointer address and pass it on to the far call C implementation. */
13402 uint32_t offSeg;
13403 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
13404 IEM_OPCODE_GET_NEXT_U32(&offSeg);
13405 else
13406 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
13407 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
13408 IEMOP_HLP_NO_LOCK_PREFIX();
13409 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
13410}
13411
13412
13413/** Opcode 0xeb. */
13414FNIEMOP_DEF(iemOp_jmp_Jb)
13415{
13416 IEMOP_MNEMONIC("jmp Jb");
13417 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13418 IEMOP_HLP_NO_LOCK_PREFIX();
13419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13420
13421 IEM_MC_BEGIN(0, 0);
13422 IEM_MC_REL_JMP_S8(i8Imm);
13423 IEM_MC_END();
13424 return VINF_SUCCESS;
13425}
13426
13427
13428/** Opcode 0xec */
13429FNIEMOP_DEF(iemOp_in_AL_DX)
13430{
13431 IEMOP_MNEMONIC("in AL,DX");
13432 IEMOP_HLP_NO_LOCK_PREFIX();
13433 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
13434}
13435
13436
13437/** Opcode 0xed */
13438FNIEMOP_DEF(iemOp_eAX_DX)
13439{
13440 IEMOP_MNEMONIC("in eAX,DX");
13441 IEMOP_HLP_NO_LOCK_PREFIX();
13442 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
13443}
13444
13445
13446/** Opcode 0xee */
13447FNIEMOP_DEF(iemOp_out_DX_AL)
13448{
13449 IEMOP_MNEMONIC("out DX,AL");
13450 IEMOP_HLP_NO_LOCK_PREFIX();
13451 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
13452}
13453
13454
13455/** Opcode 0xef */
13456FNIEMOP_DEF(iemOp_out_DX_eAX)
13457{
13458 IEMOP_MNEMONIC("out DX,eAX");
13459 IEMOP_HLP_NO_LOCK_PREFIX();
13460 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
13461}
13462
13463
13464/** Opcode 0xf0. */
13465FNIEMOP_DEF(iemOp_lock)
13466{
13467 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
13468
13469 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13470 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13471}
13472
13473
13474/** Opcode 0xf2. */
13475FNIEMOP_DEF(iemOp_repne)
13476{
13477 /* This overrides any previous REPE prefix. */
13478 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
13479 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
13480
13481 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13482 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13483}
13484
13485
13486/** Opcode 0xf3. */
13487FNIEMOP_DEF(iemOp_repe)
13488{
13489 /* This overrides any previous REPNE prefix. */
13490 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
13491 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
13492
13493 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13494 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13495}
13496
13497
13498/** Opcode 0xf4. */
13499FNIEMOP_DEF(iemOp_hlt)
13500{
13501 IEMOP_HLP_NO_LOCK_PREFIX();
13502 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
13503}
13504
13505
13506/** Opcode 0xf5. */
13507FNIEMOP_DEF(iemOp_cmc)
13508{
13509 IEMOP_MNEMONIC("cmc");
13510 IEMOP_HLP_NO_LOCK_PREFIX();
13511 IEM_MC_BEGIN(0, 0);
13512 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13513 IEM_MC_ADVANCE_RIP();
13514 IEM_MC_END();
13515 return VINF_SUCCESS;
13516}
13517
13518
13519/**
13520 * Common implementation of 'inc/dec/not/neg Eb'.
13521 *
13522 * @param bRm The RM byte.
13523 * @param pImpl The instruction implementation.
13524 */
13525FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
13526{
13527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13528 {
13529 /* register access */
13530 IEM_MC_BEGIN(2, 0);
13531 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13532 IEM_MC_ARG(uint32_t *, pEFlags, 1);
13533 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13534 IEM_MC_REF_EFLAGS(pEFlags);
13535 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
13536 IEM_MC_ADVANCE_RIP();
13537 IEM_MC_END();
13538 }
13539 else
13540 {
13541 /* memory access. */
13542 IEM_MC_BEGIN(2, 2);
13543 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13544 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
13545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13546
13547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13548 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13549 IEM_MC_FETCH_EFLAGS(EFlags);
13550 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
13551 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
13552 else
13553 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
13554
13555 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13556 IEM_MC_COMMIT_EFLAGS(EFlags);
13557 IEM_MC_ADVANCE_RIP();
13558 IEM_MC_END();
13559 }
13560 return VINF_SUCCESS;
13561}
13562
13563
13564/**
13565 * Common implementation of 'inc/dec/not/neg Ev'.
13566 *
13567 * @param bRm The RM byte.
13568 * @param pImpl The instruction implementation.
13569 */
13570FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
13571{
13572 /* Registers are handled by a common worker. */
13573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13574 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13575
13576 /* Memory we do here. */
13577 switch (pIemCpu->enmEffOpSize)
13578 {
13579 case IEMMODE_16BIT:
13580 IEM_MC_BEGIN(2, 2);
13581 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13582 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
13583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13584
13585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13586 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13587 IEM_MC_FETCH_EFLAGS(EFlags);
13588 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
13589 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
13590 else
13591 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
13592
13593 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13594 IEM_MC_COMMIT_EFLAGS(EFlags);
13595 IEM_MC_ADVANCE_RIP();
13596 IEM_MC_END();
13597 return VINF_SUCCESS;
13598
13599 case IEMMODE_32BIT:
13600 IEM_MC_BEGIN(2, 2);
13601 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13602 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
13603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13604
13605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13606 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13607 IEM_MC_FETCH_EFLAGS(EFlags);
13608 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
13609 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
13610 else
13611 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
13612
13613 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13614 IEM_MC_COMMIT_EFLAGS(EFlags);
13615 IEM_MC_ADVANCE_RIP();
13616 IEM_MC_END();
13617 return VINF_SUCCESS;
13618
13619 case IEMMODE_64BIT:
13620 IEM_MC_BEGIN(2, 2);
13621 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13622 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
13623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13624
13625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13626 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13627 IEM_MC_FETCH_EFLAGS(EFlags);
13628 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
13629 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
13630 else
13631 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
13632
13633 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13634 IEM_MC_COMMIT_EFLAGS(EFlags);
13635 IEM_MC_ADVANCE_RIP();
13636 IEM_MC_END();
13637 return VINF_SUCCESS;
13638
13639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13640 }
13641}
13642
13643
13644/** Opcode 0xf6 /0. */
13645FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13646{
13647 IEMOP_MNEMONIC("test Eb,Ib");
13648 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13649
13650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13651 {
13652 /* register access */
13653 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13654 IEMOP_HLP_NO_LOCK_PREFIX();
13655
13656 IEM_MC_BEGIN(3, 0);
13657 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13658 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13660 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13661 IEM_MC_REF_EFLAGS(pEFlags);
13662 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13663 IEM_MC_ADVANCE_RIP();
13664 IEM_MC_END();
13665 }
13666 else
13667 {
13668 /* memory access. */
13669 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13670
13671 IEM_MC_BEGIN(3, 2);
13672 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13673 IEM_MC_ARG(uint8_t, u8Src, 1);
13674 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13676
13677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13678 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13679 IEM_MC_ASSIGN(u8Src, u8Imm);
13680 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13681 IEM_MC_FETCH_EFLAGS(EFlags);
13682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13683
13684 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
13685 IEM_MC_COMMIT_EFLAGS(EFlags);
13686 IEM_MC_ADVANCE_RIP();
13687 IEM_MC_END();
13688 }
13689 return VINF_SUCCESS;
13690}
13691
13692
13693/** Opcode 0xf7 /0. */
13694FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13695{
13696 IEMOP_MNEMONIC("test Ev,Iv");
13697 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13698 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13699
13700 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13701 {
13702 /* register access */
13703 switch (pIemCpu->enmEffOpSize)
13704 {
13705 case IEMMODE_16BIT:
13706 {
13707 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13708 IEM_MC_BEGIN(3, 0);
13709 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13710 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13711 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13712 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13713 IEM_MC_REF_EFLAGS(pEFlags);
13714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13715 IEM_MC_ADVANCE_RIP();
13716 IEM_MC_END();
13717 return VINF_SUCCESS;
13718 }
13719
13720 case IEMMODE_32BIT:
13721 {
13722 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13723 IEM_MC_BEGIN(3, 0);
13724 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13725 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13726 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13727 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13728 IEM_MC_REF_EFLAGS(pEFlags);
13729 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13730 IEM_MC_ADVANCE_RIP();
13731 IEM_MC_END();
13732 return VINF_SUCCESS;
13733 }
13734
13735 case IEMMODE_64BIT:
13736 {
13737 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13738 IEM_MC_BEGIN(3, 0);
13739 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13740 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13741 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13742 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13743 IEM_MC_REF_EFLAGS(pEFlags);
13744 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13745 IEM_MC_ADVANCE_RIP();
13746 IEM_MC_END();
13747 return VINF_SUCCESS;
13748 }
13749
13750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13751 }
13752 }
13753 else
13754 {
13755 /* memory access. */
13756 switch (pIemCpu->enmEffOpSize)
13757 {
13758 case IEMMODE_16BIT:
13759 {
13760 IEM_MC_BEGIN(3, 2);
13761 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13762 IEM_MC_ARG(uint16_t, u16Src, 1);
13763 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13765
13766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13767 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13768 IEM_MC_ASSIGN(u16Src, u16Imm);
13769 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13770 IEM_MC_FETCH_EFLAGS(EFlags);
13771 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13772
13773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
13774 IEM_MC_COMMIT_EFLAGS(EFlags);
13775 IEM_MC_ADVANCE_RIP();
13776 IEM_MC_END();
13777 return VINF_SUCCESS;
13778 }
13779
13780 case IEMMODE_32BIT:
13781 {
13782 IEM_MC_BEGIN(3, 2);
13783 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13784 IEM_MC_ARG(uint32_t, u32Src, 1);
13785 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13787
13788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13789 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13790 IEM_MC_ASSIGN(u32Src, u32Imm);
13791 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13792 IEM_MC_FETCH_EFLAGS(EFlags);
13793 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13794
13795 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
13796 IEM_MC_COMMIT_EFLAGS(EFlags);
13797 IEM_MC_ADVANCE_RIP();
13798 IEM_MC_END();
13799 return VINF_SUCCESS;
13800 }
13801
13802 case IEMMODE_64BIT:
13803 {
13804 IEM_MC_BEGIN(3, 2);
13805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13806 IEM_MC_ARG(uint64_t, u64Src, 1);
13807 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13809
13810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13811 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13812 IEM_MC_ASSIGN(u64Src, u64Imm);
13813 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13814 IEM_MC_FETCH_EFLAGS(EFlags);
13815 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13816
13817 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
13818 IEM_MC_COMMIT_EFLAGS(EFlags);
13819 IEM_MC_ADVANCE_RIP();
13820 IEM_MC_END();
13821 return VINF_SUCCESS;
13822 }
13823
13824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13825 }
13826 }
13827}
13828
13829
13830/** Opcode 0xf6 /4, /5, /6 and /7. */
13831FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13832{
13833 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13834
13835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13836 {
13837 /* register access */
13838 IEMOP_HLP_NO_LOCK_PREFIX();
13839 IEM_MC_BEGIN(3, 0);
13840 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13841 IEM_MC_ARG(uint8_t, u8Value, 1);
13842 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13843 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13844 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13845 IEM_MC_REF_EFLAGS(pEFlags);
13846 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
13847 IEM_MC_ADVANCE_RIP();
13848 IEM_MC_END();
13849 }
13850 else
13851 {
13852 /* memory access. */
13853 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13854
13855 IEM_MC_BEGIN(3, 1);
13856 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13857 IEM_MC_ARG(uint8_t, u8Value, 1);
13858 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13860
13861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13862 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
13863 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13864 IEM_MC_REF_EFLAGS(pEFlags);
13865 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
13866
13867 IEM_MC_ADVANCE_RIP();
13868 IEM_MC_END();
13869 }
13870 return VINF_SUCCESS;
13871}
13872
13873
13874/** Opcode 0xf7 /4, /5, /6 and /7. */
13875FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13876{
13877 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13878 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13879
13880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13881 {
13882 /* register access */
13883 switch (pIemCpu->enmEffOpSize)
13884 {
13885 case IEMMODE_16BIT:
13886 {
13887 IEMOP_HLP_NO_LOCK_PREFIX();
13888 IEM_MC_BEGIN(4, 1);
13889 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13890 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13891 IEM_MC_ARG(uint16_t, u16Value, 2);
13892 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13893 IEM_MC_LOCAL(int32_t, rc);
13894
13895 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13896 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13897 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13898 IEM_MC_REF_EFLAGS(pEFlags);
13899 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13900 IEM_MC_IF_LOCAL_IS_Z(rc) {
13901 IEM_MC_ADVANCE_RIP();
13902 } IEM_MC_ELSE() {
13903 IEM_MC_RAISE_DIVIDE_ERROR();
13904 } IEM_MC_ENDIF();
13905
13906 IEM_MC_END();
13907 return VINF_SUCCESS;
13908 }
13909
13910 case IEMMODE_32BIT:
13911 {
13912 IEMOP_HLP_NO_LOCK_PREFIX();
13913 IEM_MC_BEGIN(4, 1);
13914 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13915 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13916 IEM_MC_ARG(uint32_t, u32Value, 2);
13917 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13918 IEM_MC_LOCAL(int32_t, rc);
13919
13920 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13921 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13922 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13923 IEM_MC_REF_EFLAGS(pEFlags);
13924 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13925 IEM_MC_IF_LOCAL_IS_Z(rc) {
13926 IEM_MC_ADVANCE_RIP();
13927 } IEM_MC_ELSE() {
13928 IEM_MC_RAISE_DIVIDE_ERROR();
13929 } IEM_MC_ENDIF();
13930
13931 IEM_MC_END();
13932 return VINF_SUCCESS;
13933 }
13934
13935 case IEMMODE_64BIT:
13936 {
13937 IEMOP_HLP_NO_LOCK_PREFIX();
13938 IEM_MC_BEGIN(4, 1);
13939 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13940 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13941 IEM_MC_ARG(uint64_t, u64Value, 2);
13942 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13943 IEM_MC_LOCAL(int32_t, rc);
13944
13945 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13946 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13947 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13948 IEM_MC_REF_EFLAGS(pEFlags);
13949 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13950 IEM_MC_IF_LOCAL_IS_Z(rc) {
13951 IEM_MC_ADVANCE_RIP();
13952 } IEM_MC_ELSE() {
13953 IEM_MC_RAISE_DIVIDE_ERROR();
13954 } IEM_MC_ENDIF();
13955
13956 IEM_MC_END();
13957 return VINF_SUCCESS;
13958 }
13959
13960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13961 }
13962 }
13963 else
13964 {
13965 /* memory access. */
13966 switch (pIemCpu->enmEffOpSize)
13967 {
13968 case IEMMODE_16BIT:
13969 {
13970 IEMOP_HLP_NO_LOCK_PREFIX();
13971 IEM_MC_BEGIN(4, 2);
13972 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13973 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13974 IEM_MC_ARG(uint16_t, u16Value, 2);
13975 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13977 IEM_MC_LOCAL(int32_t, rc);
13978
13979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13980 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
13981 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13982 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13983 IEM_MC_REF_EFLAGS(pEFlags);
13984 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13985 IEM_MC_IF_LOCAL_IS_Z(rc) {
13986 IEM_MC_ADVANCE_RIP();
13987 } IEM_MC_ELSE() {
13988 IEM_MC_RAISE_DIVIDE_ERROR();
13989 } IEM_MC_ENDIF();
13990
13991 IEM_MC_END();
13992 return VINF_SUCCESS;
13993 }
13994
13995 case IEMMODE_32BIT:
13996 {
13997 IEMOP_HLP_NO_LOCK_PREFIX();
13998 IEM_MC_BEGIN(4, 2);
13999 IEM_MC_ARG(uint32_t *, pu32AX, 0);
14000 IEM_MC_ARG(uint32_t *, pu32DX, 1);
14001 IEM_MC_ARG(uint32_t, u32Value, 2);
14002 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14004 IEM_MC_LOCAL(int32_t, rc);
14005
14006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14007 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
14008 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
14009 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
14010 IEM_MC_REF_EFLAGS(pEFlags);
14011 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
14012 IEM_MC_IF_LOCAL_IS_Z(rc) {
14013 IEM_MC_ADVANCE_RIP();
14014 } IEM_MC_ELSE() {
14015 IEM_MC_RAISE_DIVIDE_ERROR();
14016 } IEM_MC_ENDIF();
14017
14018 IEM_MC_END();
14019 return VINF_SUCCESS;
14020 }
14021
14022 case IEMMODE_64BIT:
14023 {
14024 IEMOP_HLP_NO_LOCK_PREFIX();
14025 IEM_MC_BEGIN(4, 2);
14026 IEM_MC_ARG(uint64_t *, pu64AX, 0);
14027 IEM_MC_ARG(uint64_t *, pu64DX, 1);
14028 IEM_MC_ARG(uint64_t, u64Value, 2);
14029 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14031 IEM_MC_LOCAL(int32_t, rc);
14032
14033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14034 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
14035 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
14036 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
14037 IEM_MC_REF_EFLAGS(pEFlags);
14038 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
14039 IEM_MC_IF_LOCAL_IS_Z(rc) {
14040 IEM_MC_ADVANCE_RIP();
14041 } IEM_MC_ELSE() {
14042 IEM_MC_RAISE_DIVIDE_ERROR();
14043 } IEM_MC_ENDIF();
14044
14045 IEM_MC_END();
14046 return VINF_SUCCESS;
14047 }
14048
14049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14050 }
14051 }
14052}
14053
14054/** Opcode 0xf6. */
14055FNIEMOP_DEF(iemOp_Grp3_Eb)
14056{
14057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14058 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14059 {
14060 case 0:
14061 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14062 case 1:
14063 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
14064 case 2:
14065 IEMOP_MNEMONIC("not Eb");
14066 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
14067 case 3:
14068 IEMOP_MNEMONIC("neg Eb");
14069 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
14070 case 4:
14071 IEMOP_MNEMONIC("mul Eb");
14072 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14073 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
14074 case 5:
14075 IEMOP_MNEMONIC("imul Eb");
14076 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14077 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
14078 case 6:
14079 IEMOP_MNEMONIC("div Eb");
14080 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14081 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
14082 case 7:
14083 IEMOP_MNEMONIC("idiv Eb");
14084 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14085 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
14086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14087 }
14088}
14089
14090
14091/** Opcode 0xf7. */
14092FNIEMOP_DEF(iemOp_Grp3_Ev)
14093{
14094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14095 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14096 {
14097 case 0:
14098 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14099 case 1:
14100 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
14101 case 2:
14102 IEMOP_MNEMONIC("not Ev");
14103 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
14104 case 3:
14105 IEMOP_MNEMONIC("neg Ev");
14106 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
14107 case 4:
14108 IEMOP_MNEMONIC("mul Ev");
14109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14110 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
14111 case 5:
14112 IEMOP_MNEMONIC("imul Ev");
14113 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14114 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
14115 case 6:
14116 IEMOP_MNEMONIC("div Ev");
14117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14118 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
14119 case 7:
14120 IEMOP_MNEMONIC("idiv Ev");
14121 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14122 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
14123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14124 }
14125}
14126
14127
14128/** Opcode 0xf8. */
14129FNIEMOP_DEF(iemOp_clc)
14130{
14131 IEMOP_MNEMONIC("clc");
14132 IEMOP_HLP_NO_LOCK_PREFIX();
14133 IEM_MC_BEGIN(0, 0);
14134 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14135 IEM_MC_ADVANCE_RIP();
14136 IEM_MC_END();
14137 return VINF_SUCCESS;
14138}
14139
14140
14141/** Opcode 0xf9. */
14142FNIEMOP_DEF(iemOp_stc)
14143{
14144 IEMOP_MNEMONIC("stc");
14145 IEMOP_HLP_NO_LOCK_PREFIX();
14146 IEM_MC_BEGIN(0, 0);
14147 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14148 IEM_MC_ADVANCE_RIP();
14149 IEM_MC_END();
14150 return VINF_SUCCESS;
14151}
14152
14153
14154/** Opcode 0xfa. */
14155FNIEMOP_DEF(iemOp_cli)
14156{
14157 IEMOP_MNEMONIC("cli");
14158 IEMOP_HLP_NO_LOCK_PREFIX();
14159 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
14160}
14161
14162
14163FNIEMOP_DEF(iemOp_sti)
14164{
14165 IEMOP_MNEMONIC("sti");
14166 IEMOP_HLP_NO_LOCK_PREFIX();
14167 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
14168}
14169
14170
14171/** Opcode 0xfc. */
14172FNIEMOP_DEF(iemOp_cld)
14173{
14174 IEMOP_MNEMONIC("cld");
14175 IEMOP_HLP_NO_LOCK_PREFIX();
14176 IEM_MC_BEGIN(0, 0);
14177 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14178 IEM_MC_ADVANCE_RIP();
14179 IEM_MC_END();
14180 return VINF_SUCCESS;
14181}
14182
14183
14184/** Opcode 0xfd. */
14185FNIEMOP_DEF(iemOp_std)
14186{
14187 IEMOP_MNEMONIC("std");
14188 IEMOP_HLP_NO_LOCK_PREFIX();
14189 IEM_MC_BEGIN(0, 0);
14190 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14191 IEM_MC_ADVANCE_RIP();
14192 IEM_MC_END();
14193 return VINF_SUCCESS;
14194}
14195
14196
14197/** Opcode 0xfe. */
14198FNIEMOP_DEF(iemOp_Grp4)
14199{
14200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14201 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14202 {
14203 case 0:
14204 IEMOP_MNEMONIC("inc Ev");
14205 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
14206 case 1:
14207 IEMOP_MNEMONIC("dec Ev");
14208 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
14209 default:
14210 IEMOP_MNEMONIC("grp4-ud");
14211 return IEMOP_RAISE_INVALID_OPCODE();
14212 }
14213}
14214
14215
14216/**
14217 * Opcode 0xff /2.
14218 * @param bRm The RM byte.
14219 */
14220FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14221{
14222 IEMOP_MNEMONIC("calln Ev");
14223 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
14224 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14225
14226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14227 {
14228 /* The new RIP is taken from a register. */
14229 switch (pIemCpu->enmEffOpSize)
14230 {
14231 case IEMMODE_16BIT:
14232 IEM_MC_BEGIN(1, 0);
14233 IEM_MC_ARG(uint16_t, u16Target, 0);
14234 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14235 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
14236 IEM_MC_END()
14237 return VINF_SUCCESS;
14238
14239 case IEMMODE_32BIT:
14240 IEM_MC_BEGIN(1, 0);
14241 IEM_MC_ARG(uint32_t, u32Target, 0);
14242 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14243 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
14244 IEM_MC_END()
14245 return VINF_SUCCESS;
14246
14247 case IEMMODE_64BIT:
14248 IEM_MC_BEGIN(1, 0);
14249 IEM_MC_ARG(uint64_t, u64Target, 0);
14250 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14251 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
14252 IEM_MC_END()
14253 return VINF_SUCCESS;
14254
14255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14256 }
14257 }
14258 else
14259 {
14260 /* The new RIP is taken from a register. */
14261 switch (pIemCpu->enmEffOpSize)
14262 {
14263 case IEMMODE_16BIT:
14264 IEM_MC_BEGIN(1, 1);
14265 IEM_MC_ARG(uint16_t, u16Target, 0);
14266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14268 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
14269 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
14270 IEM_MC_END()
14271 return VINF_SUCCESS;
14272
14273 case IEMMODE_32BIT:
14274 IEM_MC_BEGIN(1, 1);
14275 IEM_MC_ARG(uint32_t, u32Target, 0);
14276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14278 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
14279 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
14280 IEM_MC_END()
14281 return VINF_SUCCESS;
14282
14283 case IEMMODE_64BIT:
14284 IEM_MC_BEGIN(1, 1);
14285 IEM_MC_ARG(uint64_t, u64Target, 0);
14286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14288 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
14289 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
14290 IEM_MC_END()
14291 return VINF_SUCCESS;
14292
14293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14294 }
14295 }
14296}
14297
14298typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
14299
14300FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
14301{
14302 /* Registers? How?? */
14303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14304 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
14305
14306 /* Far pointer loaded from memory. */
14307 switch (pIemCpu->enmEffOpSize)
14308 {
14309 case IEMMODE_16BIT:
14310 IEM_MC_BEGIN(3, 1);
14311 IEM_MC_ARG(uint16_t, u16Sel, 0);
14312 IEM_MC_ARG(uint16_t, offSeg, 1);
14313 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
14314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14317 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
14318 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
14319 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
14320 IEM_MC_END();
14321 return VINF_SUCCESS;
14322
14323 case IEMMODE_32BIT:
14324 IEM_MC_BEGIN(3, 1);
14325 IEM_MC_ARG(uint16_t, u16Sel, 0);
14326 IEM_MC_ARG(uint32_t, offSeg, 1);
14327 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
14328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14331 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
14332 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
14333 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
14334 IEM_MC_END();
14335 return VINF_SUCCESS;
14336
14337 case IEMMODE_64BIT:
14338 IEM_MC_BEGIN(3, 1);
14339 IEM_MC_ARG(uint16_t, u16Sel, 0);
14340 IEM_MC_ARG(uint64_t, offSeg, 1);
14341 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
14342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14345 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
14346 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
14347 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
14348 IEM_MC_END();
14349 return VINF_SUCCESS;
14350
14351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14352 }
14353}
14354
14355
14356/**
14357 * Opcode 0xff /3.
14358 * @param bRm The RM byte.
14359 */
14360FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14361{
14362 IEMOP_MNEMONIC("callf Ep");
14363 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
14364}
14365
14366
14367/**
14368 * Opcode 0xff /4.
14369 * @param bRm The RM byte.
14370 */
14371FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14372{
14373 IEMOP_MNEMONIC("jmpn Ev");
14374 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
14375 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14376
14377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14378 {
14379 /* The new RIP is taken from a register. */
14380 switch (pIemCpu->enmEffOpSize)
14381 {
14382 case IEMMODE_16BIT:
14383 IEM_MC_BEGIN(0, 1);
14384 IEM_MC_LOCAL(uint16_t, u16Target);
14385 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14386 IEM_MC_SET_RIP_U16(u16Target);
14387 IEM_MC_END()
14388 return VINF_SUCCESS;
14389
14390 case IEMMODE_32BIT:
14391 IEM_MC_BEGIN(0, 1);
14392 IEM_MC_LOCAL(uint32_t, u32Target);
14393 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14394 IEM_MC_SET_RIP_U32(u32Target);
14395 IEM_MC_END()
14396 return VINF_SUCCESS;
14397
14398 case IEMMODE_64BIT:
14399 IEM_MC_BEGIN(0, 1);
14400 IEM_MC_LOCAL(uint64_t, u64Target);
14401 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14402 IEM_MC_SET_RIP_U64(u64Target);
14403 IEM_MC_END()
14404 return VINF_SUCCESS;
14405
14406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14407 }
14408 }
14409 else
14410 {
14411 /* The new RIP is taken from a register. */
14412 switch (pIemCpu->enmEffOpSize)
14413 {
14414 case IEMMODE_16BIT:
14415 IEM_MC_BEGIN(0, 2);
14416 IEM_MC_LOCAL(uint16_t, u16Target);
14417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14419 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
14420 IEM_MC_SET_RIP_U16(u16Target);
14421 IEM_MC_END()
14422 return VINF_SUCCESS;
14423
14424 case IEMMODE_32BIT:
14425 IEM_MC_BEGIN(0, 2);
14426 IEM_MC_LOCAL(uint32_t, u32Target);
14427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14429 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
14430 IEM_MC_SET_RIP_U32(u32Target);
14431 IEM_MC_END()
14432 return VINF_SUCCESS;
14433
14434 case IEMMODE_64BIT:
14435 IEM_MC_BEGIN(0, 2);
14436 IEM_MC_LOCAL(uint32_t, u32Target);
14437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14439 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
14440 IEM_MC_SET_RIP_U32(u32Target);
14441 IEM_MC_END()
14442 return VINF_SUCCESS;
14443
14444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14445 }
14446 }
14447}
14448
14449
14450/**
14451 * Opcode 0xff /5.
14452 * @param bRm The RM byte.
14453 */
14454FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14455{
14456 IEMOP_MNEMONIC("jmp Ep");
14457 IEMOP_HLP_NO_64BIT();
14458 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
14459}
14460
14461
14462/**
14463 * Opcode 0xff /6.
14464 * @param bRm The RM byte.
14465 */
14466FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14467{
14468 IEMOP_MNEMONIC("push Ev");
14469 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
14470
14471 /* Registers are handled by a common worker. */
14472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14473 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14474
14475 /* Memory we do here. */
14476 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14477 switch (pIemCpu->enmEffOpSize)
14478 {
14479 case IEMMODE_16BIT:
14480 IEM_MC_BEGIN(0, 2);
14481 IEM_MC_LOCAL(uint16_t, u16Src);
14482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14484 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
14485 IEM_MC_PUSH_U16(u16Src);
14486 IEM_MC_ADVANCE_RIP();
14487 IEM_MC_END();
14488 return VINF_SUCCESS;
14489
14490 case IEMMODE_32BIT:
14491 IEM_MC_BEGIN(0, 2);
14492 IEM_MC_LOCAL(uint32_t, u32Src);
14493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14495 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
14496 IEM_MC_PUSH_U32(u32Src);
14497 IEM_MC_ADVANCE_RIP();
14498 IEM_MC_END();
14499 return VINF_SUCCESS;
14500
14501 case IEMMODE_64BIT:
14502 IEM_MC_BEGIN(0, 2);
14503 IEM_MC_LOCAL(uint64_t, u64Src);
14504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
14506 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
14507 IEM_MC_PUSH_U64(u64Src);
14508 IEM_MC_ADVANCE_RIP();
14509 IEM_MC_END();
14510 return VINF_SUCCESS;
14511
14512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14513 }
14514}
14515
14516
14517/** Opcode 0xff. */
14518FNIEMOP_DEF(iemOp_Grp5)
14519{
14520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14521 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14522 {
14523 case 0:
14524 IEMOP_MNEMONIC("inc Ev");
14525 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
14526 case 1:
14527 IEMOP_MNEMONIC("dec Ev");
14528 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
14529 case 2:
14530 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14531 case 3:
14532 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14533 case 4:
14534 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14535 case 5:
14536 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14537 case 6:
14538 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14539 case 7:
14540 IEMOP_MNEMONIC("grp5-ud");
14541 return IEMOP_RAISE_INVALID_OPCODE();
14542 }
14543 AssertFailedReturn(VERR_INTERNAL_ERROR_2);
14544}
14545
14546
14547
14548const PFNIEMOP g_apfnOneByteMap[256] =
14549{
14550 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14551 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14552 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14553 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14554 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14555 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14556 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14557 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14558 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14559 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14560 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14561 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14562 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14563 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14564 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14565 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14566 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14567 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14568 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14569 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14570 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14571 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14572 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14573 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14574 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma, iemOp_arpl_Ew_Gw,
14575 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14576 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14577 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14578 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14579 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14580 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14581 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14582 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14583 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14584 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14585 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_pop_Ev,
14586 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14587 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14588 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14589 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14590 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14591 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14592 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14593 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14594 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14595 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14596 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14597 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14598 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14599 /* 0xc4 */ iemOp_les_Gv_Mp, iemOp_lds_Gv_Mp, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14600 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14601 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14602 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14603 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_Invalid, iemOp_xlat,
14604 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14605 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14606 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14607 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14608 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14609 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14610 /* 0xf0 */ iemOp_lock, iemOp_Invalid, iemOp_repne, iemOp_repe, /** @todo 0xf1 is INT1 / ICEBP. */
14611 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14612 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14613 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14614};
14615
14616
14617/** @} */
14618
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette