VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 99748

Last change on this file since 99748 was 99685, checked in by vboxsync, 20 months ago

VMM/IEM: Backed the iemOpHlpCalcRmEffAddrEx functionality into the regular function by extending the cbImm parameter. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 439.1 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 99685 2023-05-08 21:59:40Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
74 \
75 IEM_MC_BEGIN(3, 0); \
76 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
77 IEM_MC_ARG(uint8_t, u8Src, 1); \
78 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
79 \
80 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
81 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
82 IEM_MC_REF_EFLAGS(pEFlags); \
83 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
84 \
85 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
86 IEM_MC_END(); \
87 } \
88 else \
89 { \
90 /* \
91 * We're accessing memory. \
92 * Note! We're putting the eflags on the stack here so we can commit them \
93 * after the memory. \
94 */ \
95 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
96 { \
97 IEM_MC_BEGIN(3, 2); \
98 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
99 IEM_MC_ARG(uint8_t, u8Src, 1); \
100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
120 IEMOP_HLP_DONE_DECODING(); \
121 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
122 } \
123 } \
124 (void)0
125
126#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
127 IEM_MC_BEGIN(3, 2); \
128 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
129 IEM_MC_ARG(uint8_t, u8Src, 1); \
130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
132 \
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
134 IEMOP_HLP_DONE_DECODING(); \
135 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
136 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
137 IEM_MC_FETCH_EFLAGS(EFlags); \
138 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
139 \
140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
141 IEM_MC_COMMIT_EFLAGS(EFlags); \
142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
143 IEM_MC_END(); \
144 } \
145 } \
146 (void)0
147
148/**
149 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
150 * destination.
151 */
152#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
154 \
155 /* \
156 * If rm is denoting a register, no more instruction bytes. \
157 */ \
158 if (IEM_IS_MODRM_REG_MODE(bRm)) \
159 { \
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
161 IEM_MC_BEGIN(3, 0); \
162 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
163 IEM_MC_ARG(uint8_t, u8Src, 1); \
164 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
165 \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
167 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
168 IEM_MC_REF_EFLAGS(pEFlags); \
169 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
170 \
171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
172 IEM_MC_END(); \
173 } \
174 else \
175 { \
176 /* \
177 * We're accessing memory. \
178 */ \
179 IEM_MC_BEGIN(3, 1); \
180 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
181 IEM_MC_ARG(uint8_t, u8Src, 1); \
182 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
184 \
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
187 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
188 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
189 IEM_MC_REF_EFLAGS(pEFlags); \
190 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
191 \
192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
193 IEM_MC_END(); \
194 } \
195 (void)0
196
197
198/**
199 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
200 * memory/register as the destination.
201 */
202#define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
204 \
205 /* \
206 * If rm is denoting a register, no more instruction bytes. \
207 */ \
208 if (IEM_IS_MODRM_REG_MODE(bRm)) \
209 { \
210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
211 switch (pVCpu->iem.s.enmEffOpSize) \
212 { \
213 case IEMMODE_16BIT: \
214 IEM_MC_BEGIN(3, 0); \
215 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
216 IEM_MC_ARG(uint16_t, u16Src, 1); \
217 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
218 \
219 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
221 IEM_MC_REF_EFLAGS(pEFlags); \
222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
223 \
224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
225 IEM_MC_END(); \
226 break; \
227 \
228 case IEMMODE_32BIT: \
229 IEM_MC_BEGIN(3, 0); \
230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
231 IEM_MC_ARG(uint32_t, u32Src, 1); \
232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
233 \
234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
238 \
239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \
240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
242 IEM_MC_END(); \
243 break; \
244 \
245 case IEMMODE_64BIT: \
246 IEM_MC_BEGIN(3, 0); \
247 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
248 IEM_MC_ARG(uint64_t, u64Src, 1); \
249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
250 \
251 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
252 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
253 IEM_MC_REF_EFLAGS(pEFlags); \
254 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
255 \
256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
257 IEM_MC_END(); \
258 break; \
259 \
260 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
261 } \
262 } \
263 else \
264 { \
265 /* \
266 * We're accessing memory. \
267 * Note! We're putting the eflags on the stack here so we can commit them \
268 * after the memory. \
269 */ \
270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
271 { \
272 switch (pVCpu->iem.s.enmEffOpSize) \
273 { \
274 case IEMMODE_16BIT: \
275 IEM_MC_BEGIN(3, 2); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
280 \
281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
282 IEMOP_HLP_DONE_DECODING(); \
283 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
284 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
285 IEM_MC_FETCH_EFLAGS(EFlags); \
286 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
287 \
288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
289 IEM_MC_COMMIT_EFLAGS(EFlags); \
290 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
291 IEM_MC_END(); \
292 break; \
293 \
294 case IEMMODE_32BIT: \
295 IEM_MC_BEGIN(3, 2); \
296 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
297 IEM_MC_ARG(uint32_t, u32Src, 1); \
298 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
300 \
301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
302 IEMOP_HLP_DONE_DECODING(); \
303 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
304 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
305 IEM_MC_FETCH_EFLAGS(EFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
307 \
308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
309 IEM_MC_COMMIT_EFLAGS(EFlags); \
310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
311 IEM_MC_END(); \
312 break; \
313 \
314 case IEMMODE_64BIT: \
315 IEM_MC_BEGIN(3, 2); \
316 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
317 IEM_MC_ARG(uint64_t, u64Src, 1); \
318 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
320 \
321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
322 IEMOP_HLP_DONE_DECODING(); \
323 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
324 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
325 IEM_MC_FETCH_EFLAGS(EFlags); \
326 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
327 \
328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
329 IEM_MC_COMMIT_EFLAGS(EFlags); \
330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
331 IEM_MC_END(); \
332 break; \
333 \
334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
335 } \
336 } \
337 else \
338 { \
339 (void)0
340
341#define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \
342 IEMOP_HLP_DONE_DECODING(); \
343 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
344 } \
345 } \
346 (void)0
347
348#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
349 switch (pVCpu->iem.s.enmEffOpSize) \
350 { \
351 case IEMMODE_16BIT: \
352 IEM_MC_BEGIN(3, 2); \
353 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
354 IEM_MC_ARG(uint16_t, u16Src, 1); \
355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
357 \
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
359 IEMOP_HLP_DONE_DECODING(); \
360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
361 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
362 IEM_MC_FETCH_EFLAGS(EFlags); \
363 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
364 \
365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
366 IEM_MC_COMMIT_EFLAGS(EFlags); \
367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
368 IEM_MC_END(); \
369 break; \
370 \
371 case IEMMODE_32BIT: \
372 IEM_MC_BEGIN(3, 2); \
373 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
374 IEM_MC_ARG(uint32_t, u32Src, 1); \
375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
377 \
378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
379 IEMOP_HLP_DONE_DECODING(); \
380 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
381 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
382 IEM_MC_FETCH_EFLAGS(EFlags); \
383 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
384 \
385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \
386 IEM_MC_COMMIT_EFLAGS(EFlags); \
387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
388 IEM_MC_END(); \
389 break; \
390 \
391 case IEMMODE_64BIT: \
392 IEM_MC_BEGIN(3, 2); \
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
394 IEM_MC_ARG(uint64_t, u64Src, 1); \
395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
397 \
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
399 IEMOP_HLP_DONE_DECODING(); \
400 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
401 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
402 IEM_MC_FETCH_EFLAGS(EFlags); \
403 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
404 \
405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
406 IEM_MC_COMMIT_EFLAGS(EFlags); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
412 } \
413 } \
414 } \
415 (void)0
416
417
418/**
419 * Body for instructions like ADD, AND, OR, ++ with working on AL with
420 * a byte immediate.
421 */
422#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
423 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
425 \
426 IEM_MC_BEGIN(3, 0); \
427 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
428 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
429 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
430 \
431 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
432 IEM_MC_REF_EFLAGS(pEFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
434 \
435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
436 IEM_MC_END()
437
438/**
439 * Body for instructions like ADD, AND, OR, ++ with working on
440 * AX/EAX/RAX with a word/dword immediate.
441 */
442#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
443 switch (pVCpu->iem.s.enmEffOpSize) \
444 { \
445 case IEMMODE_16BIT: \
446 { \
447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
449 \
450 IEM_MC_BEGIN(3, 0); \
451 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
452 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
453 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
454 \
455 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
456 IEM_MC_REF_EFLAGS(pEFlags); \
457 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
458 \
459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
460 IEM_MC_END(); \
461 } \
462 \
463 case IEMMODE_32BIT: \
464 { \
465 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
467 \
468 IEM_MC_BEGIN(3, 0); \
469 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
470 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
471 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
472 \
473 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
474 IEM_MC_REF_EFLAGS(pEFlags); \
475 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
476 \
477 if (a_fModifiesDstReg) \
478 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
480 IEM_MC_END(); \
481 } \
482 \
483 case IEMMODE_64BIT: \
484 { \
485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
487 \
488 IEM_MC_BEGIN(3, 0); \
489 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
490 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
491 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
492 \
493 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
494 IEM_MC_REF_EFLAGS(pEFlags); \
495 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
496 \
497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
498 IEM_MC_END(); \
499 } \
500 \
501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
502 } \
503 (void)0
504
505
506
507/* Instruction specification format - work in progress: */
508
509/**
510 * @opcode 0x00
511 * @opmnemonic add
512 * @op1 rm:Eb
513 * @op2 reg:Gb
514 * @opmaps one
515 * @openc ModR/M
516 * @opflmodify cf,pf,af,zf,sf,of
517 * @ophints harmless ignores_op_sizes
518 * @opstats add_Eb_Gb
519 * @opgroup og_gen_arith_bin
520 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
521 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
522 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
523 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
524 */
525FNIEMOP_DEF(iemOp_add_Eb_Gb)
526{
527 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
528 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
529 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
530}
531
532
533/**
534 * @opcode 0x01
535 * @opgroup og_gen_arith_bin
536 * @opflmodify cf,pf,af,zf,sf,of
537 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
538 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
539 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
540 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
541 */
542FNIEMOP_DEF(iemOp_add_Ev_Gv)
543{
544 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
545 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
546 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
547}
548
549
550/**
551 * @opcode 0x02
552 * @opgroup og_gen_arith_bin
553 * @opflmodify cf,pf,af,zf,sf,of
554 * @opcopytests iemOp_add_Eb_Gb
555 */
556FNIEMOP_DEF(iemOp_add_Gb_Eb)
557{
558 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
559 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
560}
561
562
563/**
564 * @opcode 0x03
565 * @opgroup og_gen_arith_bin
566 * @opflmodify cf,pf,af,zf,sf,of
567 * @opcopytests iemOp_add_Ev_Gv
568 */
569FNIEMOP_DEF(iemOp_add_Gv_Ev)
570{
571 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
572 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
573}
574
575
576/**
577 * @opcode 0x04
578 * @opgroup og_gen_arith_bin
579 * @opflmodify cf,pf,af,zf,sf,of
580 * @opcopytests iemOp_add_Eb_Gb
581 */
582FNIEMOP_DEF(iemOp_add_Al_Ib)
583{
584 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
585 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
586}
587
588
589/**
590 * @opcode 0x05
591 * @opgroup og_gen_arith_bin
592 * @opflmodify cf,pf,af,zf,sf,of
593 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
594 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
595 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
596 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
597 */
598FNIEMOP_DEF(iemOp_add_eAX_Iz)
599{
600 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
601 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
602}
603
604
605/**
606 * @opcode 0x06
607 * @opgroup og_stack_sreg
608 */
609FNIEMOP_DEF(iemOp_push_ES)
610{
611 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
612 IEMOP_HLP_NO_64BIT();
613 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
614}
615
616
617/**
618 * @opcode 0x07
619 * @opgroup og_stack_sreg
620 */
621FNIEMOP_DEF(iemOp_pop_ES)
622{
623 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
624 IEMOP_HLP_NO_64BIT();
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
627}
628
629
630/**
631 * @opcode 0x08
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
637 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
638 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
639 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
640 */
641FNIEMOP_DEF(iemOp_or_Eb_Gb)
642{
643 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
645 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
646 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
647}
648
649
650/*
651 * @opcode 0x09
652 * @opgroup og_gen_arith_bin
653 * @opflmodify cf,pf,af,zf,sf,of
654 * @opflundef af
655 * @opflclear of,cf
656 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
657 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
658 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
659 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
660 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
661 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
662 */
663FNIEMOP_DEF(iemOp_or_Ev_Gv)
664{
665 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
667 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
668 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
669}
670
671
672/**
673 * @opcode 0x0a
674 * @opgroup og_gen_arith_bin
675 * @opflmodify cf,pf,af,zf,sf,of
676 * @opflundef af
677 * @opflclear of,cf
678 * @opcopytests iemOp_or_Eb_Gb
679 */
680FNIEMOP_DEF(iemOp_or_Gb_Eb)
681{
682 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
684 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
685}
686
687
688/**
689 * @opcode 0x0b
690 * @opgroup og_gen_arith_bin
691 * @opflmodify cf,pf,af,zf,sf,of
692 * @opflundef af
693 * @opflclear of,cf
694 * @opcopytests iemOp_or_Ev_Gv
695 */
696FNIEMOP_DEF(iemOp_or_Gv_Ev)
697{
698 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
700 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
701}
702
703
704/**
705 * @opcode 0x0c
706 * @opgroup og_gen_arith_bin
707 * @opflmodify cf,pf,af,zf,sf,of
708 * @opflundef af
709 * @opflclear of,cf
710 * @opcopytests iemOp_or_Eb_Gb
711 */
712FNIEMOP_DEF(iemOp_or_Al_Ib)
713{
714 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
716 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
717}
718
719
720/**
721 * @opcode 0x0d
722 * @opgroup og_gen_arith_bin
723 * @opflmodify cf,pf,af,zf,sf,of
724 * @opflundef af
725 * @opflclear of,cf
726 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
727 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
728 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
729 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
730 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
731 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
732 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
733 */
734FNIEMOP_DEF(iemOp_or_eAX_Iz)
735{
736 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
738 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
739}
740
741
742/**
743 * @opcode 0x0e
744 * @opgroup og_stack_sreg
745 */
746FNIEMOP_DEF(iemOp_push_CS)
747{
748 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
749 IEMOP_HLP_NO_64BIT();
750 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
751}
752
753
754/**
755 * @opcode 0x0f
756 * @opmnemonic EscTwo0f
757 * @openc two0f
758 * @opdisenum OP_2B_ESC
759 * @ophints harmless
760 * @opgroup og_escapes
761 */
762FNIEMOP_DEF(iemOp_2byteEscape)
763{
764#ifdef VBOX_STRICT
765 /* Sanity check the table the first time around. */
766 static bool s_fTested = false;
767 if (RT_LIKELY(s_fTested)) { /* likely */ }
768 else
769 {
770 s_fTested = true;
771 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
772 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
773 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
774 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
775 }
776#endif
777
778 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
779 {
780 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
783 }
784 /* @opdone */
785
786 /*
787 * On the 8086 this is a POP CS instruction.
788 * For the time being we don't specify this this.
789 */
790 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
791 IEMOP_HLP_NO_64BIT();
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
794}
795
796/**
797 * @opcode 0x10
798 * @opgroup og_gen_arith_bin
799 * @opfltest cf
800 * @opflmodify cf,pf,af,zf,sf,of
801 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
802 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
803 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
804 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
805 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
806 */
807FNIEMOP_DEF(iemOp_adc_Eb_Gb)
808{
809 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
810 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
811 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
812}
813
814
815/**
816 * @opcode 0x11
817 * @opgroup og_gen_arith_bin
818 * @opfltest cf
819 * @opflmodify cf,pf,af,zf,sf,of
820 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
821 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
822 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
823 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
824 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
825 */
826FNIEMOP_DEF(iemOp_adc_Ev_Gv)
827{
828 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
829 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
830 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
831}
832
833
834/**
835 * @opcode 0x12
836 * @opgroup og_gen_arith_bin
837 * @opfltest cf
838 * @opflmodify cf,pf,af,zf,sf,of
839 * @opcopytests iemOp_adc_Eb_Gb
840 */
841FNIEMOP_DEF(iemOp_adc_Gb_Eb)
842{
843 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
844 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
845}
846
847
848/**
849 * @opcode 0x13
850 * @opgroup og_gen_arith_bin
851 * @opfltest cf
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opcopytests iemOp_adc_Ev_Gv
854 */
855FNIEMOP_DEF(iemOp_adc_Gv_Ev)
856{
857 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
858 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
859}
860
861
862/**
863 * @opcode 0x14
864 * @opgroup og_gen_arith_bin
865 * @opfltest cf
866 * @opflmodify cf,pf,af,zf,sf,of
867 * @opcopytests iemOp_adc_Eb_Gb
868 */
869FNIEMOP_DEF(iemOp_adc_Al_Ib)
870{
871 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
872 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
873}
874
875
876/**
877 * @opcode 0x15
878 * @opgroup og_gen_arith_bin
879 * @opfltest cf
880 * @opflmodify cf,pf,af,zf,sf,of
881 * @opcopytests iemOp_adc_Ev_Gv
882 */
883FNIEMOP_DEF(iemOp_adc_eAX_Iz)
884{
885 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
886 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
887}
888
889
890/**
891 * @opcode 0x16
892 */
893FNIEMOP_DEF(iemOp_push_SS)
894{
895 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
896 IEMOP_HLP_NO_64BIT();
897 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
898}
899
900
901/**
902 * @opcode 0x17
903 * @opgroup og_gen_arith_bin
904 * @opfltest cf
905 * @opflmodify cf,pf,af,zf,sf,of
906 */
907FNIEMOP_DEF(iemOp_pop_SS)
908{
909 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
911 IEMOP_HLP_NO_64BIT();
912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
913}
914
915
916/**
917 * @opcode 0x18
918 * @opgroup og_gen_arith_bin
919 * @opfltest cf
920 * @opflmodify cf,pf,af,zf,sf,of
921 */
922FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
923{
924 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
925 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
926 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
927}
928
929
930/**
931 * @opcode 0x19
932 * @opgroup og_gen_arith_bin
933 * @opfltest cf
934 * @opflmodify cf,pf,af,zf,sf,of
935 */
936FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
937{
938 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
939 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
940 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
941}
942
943
944/**
945 * @opcode 0x1a
946 * @opgroup og_gen_arith_bin
947 * @opfltest cf
948 * @opflmodify cf,pf,af,zf,sf,of
949 */
950FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
951{
952 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
953 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
954}
955
956
957/**
958 * @opcode 0x1b
959 * @opgroup og_gen_arith_bin
960 * @opfltest cf
961 * @opflmodify cf,pf,af,zf,sf,of
962 */
963FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
964{
965 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
966 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
967}
968
969
970/**
971 * @opcode 0x1c
972 * @opgroup og_gen_arith_bin
973 * @opfltest cf
974 * @opflmodify cf,pf,af,zf,sf,of
975 */
976FNIEMOP_DEF(iemOp_sbb_Al_Ib)
977{
978 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
979 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
980}
981
982
983/**
984 * @opcode 0x1d
985 * @opgroup og_gen_arith_bin
986 * @opfltest cf
987 * @opflmodify cf,pf,af,zf,sf,of
988 */
989FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
990{
991 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
992 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
993}
994
995
996/**
997 * @opcode 0x1e
998 * @opgroup og_stack_sreg
999 */
1000FNIEMOP_DEF(iemOp_push_DS)
1001{
1002 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1003 IEMOP_HLP_NO_64BIT();
1004 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1005}
1006
1007
1008/**
1009 * @opcode 0x1f
1010 * @opgroup og_stack_sreg
1011 */
1012FNIEMOP_DEF(iemOp_pop_DS)
1013{
1014 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1016 IEMOP_HLP_NO_64BIT();
1017 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1018}
1019
1020
1021/**
1022 * @opcode 0x20
1023 * @opgroup og_gen_arith_bin
1024 * @opflmodify cf,pf,af,zf,sf,of
1025 * @opflundef af
1026 * @opflclear of,cf
1027 */
1028FNIEMOP_DEF(iemOp_and_Eb_Gb)
1029{
1030 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1031 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1032 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
1033 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1034}
1035
1036
1037/**
1038 * @opcode 0x21
1039 * @opgroup og_gen_arith_bin
1040 * @opflmodify cf,pf,af,zf,sf,of
1041 * @opflundef af
1042 * @opflclear of,cf
1043 */
1044FNIEMOP_DEF(iemOp_and_Ev_Gv)
1045{
1046 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1047 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1048 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
1049 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1050}
1051
1052
1053/**
1054 * @opcode 0x22
1055 * @opgroup og_gen_arith_bin
1056 * @opflmodify cf,pf,af,zf,sf,of
1057 * @opflundef af
1058 * @opflclear of,cf
1059 */
1060FNIEMOP_DEF(iemOp_and_Gb_Eb)
1061{
1062 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1063 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x23
1070 * @opgroup og_gen_arith_bin
1071 * @opflmodify cf,pf,af,zf,sf,of
1072 * @opflundef af
1073 * @opflclear of,cf
1074 */
1075FNIEMOP_DEF(iemOp_and_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1079 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1080}
1081
1082
1083/**
1084 * @opcode 0x24
1085 * @opgroup og_gen_arith_bin
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opflundef af
1088 * @opflclear of,cf
1089 */
1090FNIEMOP_DEF(iemOp_and_Al_Ib)
1091{
1092 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1093 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1094 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1095}
1096
1097
1098/**
1099 * @opcode 0x25
1100 * @opgroup og_gen_arith_bin
1101 * @opflmodify cf,pf,af,zf,sf,of
1102 * @opflundef af
1103 * @opflclear of,cf
1104 */
1105FNIEMOP_DEF(iemOp_and_eAX_Iz)
1106{
1107 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1108 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1109 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1110}
1111
1112
1113/**
1114 * @opcode 0x26
1115 * @opmnemonic SEG
1116 * @op1 ES
1117 * @opgroup og_prefix
1118 * @openc prefix
1119 * @opdisenum OP_SEG
1120 * @ophints harmless
1121 */
1122FNIEMOP_DEF(iemOp_seg_ES)
1123{
1124 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1126 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1127
1128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1130}
1131
1132
1133/**
1134 * @opcode 0x27
1135 * @opfltest af,cf
1136 * @opflmodify cf,pf,af,zf,sf,of
1137 * @opflundef of
1138 */
1139FNIEMOP_DEF(iemOp_daa)
1140{
1141 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1142 IEMOP_HLP_NO_64BIT();
1143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1145 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
1146}
1147
1148
1149/**
1150 * @opcode 0x28
1151 * @opgroup og_gen_arith_bin
1152 * @opflmodify cf,pf,af,zf,sf,of
1153 */
1154FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1155{
1156 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1157 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
1158 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1159}
1160
1161
1162/**
1163 * @opcode 0x29
1164 * @opgroup og_gen_arith_bin
1165 * @opflmodify cf,pf,af,zf,sf,of
1166 */
1167FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1168{
1169 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1170 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
1171 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1172}
1173
1174
1175/**
1176 * @opcode 0x2a
1177 * @opgroup og_gen_arith_bin
1178 * @opflmodify cf,pf,af,zf,sf,of
1179 */
1180FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1181{
1182 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1183 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1184}
1185
1186
1187/**
1188 * @opcode 0x2b
1189 * @opgroup og_gen_arith_bin
1190 * @opflmodify cf,pf,af,zf,sf,of
1191 */
1192FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1193{
1194 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1195 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1196}
1197
1198
1199/**
1200 * @opcode 0x2c
1201 * @opgroup og_gen_arith_bin
1202 * @opflmodify cf,pf,af,zf,sf,of
1203 */
1204FNIEMOP_DEF(iemOp_sub_Al_Ib)
1205{
1206 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1207 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1208}
1209
1210
1211/**
1212 * @opcode 0x2d
1213 * @opgroup og_gen_arith_bin
1214 * @opflmodify cf,pf,af,zf,sf,of
1215 */
1216FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1217{
1218 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1219 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1220}
1221
1222
1223/**
1224 * @opcode 0x2e
1225 * @opmnemonic SEG
1226 * @op1 CS
1227 * @opgroup og_prefix
1228 * @openc prefix
1229 * @opdisenum OP_SEG
1230 * @ophints harmless
1231 */
1232FNIEMOP_DEF(iemOp_seg_CS)
1233{
1234 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1235 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1236 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1237
1238 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1239 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1240}
1241
1242
1243/**
1244 * @opcode 0x2f
1245 * @opfltest af,cf
1246 * @opflmodify cf,pf,af,zf,sf,of
1247 * @opflundef of
1248 */
1249FNIEMOP_DEF(iemOp_das)
1250{
1251 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1252 IEMOP_HLP_NO_64BIT();
1253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1254 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1255 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
1256}
1257
1258
1259/**
1260 * @opcode 0x30
1261 * @opgroup og_gen_arith_bin
1262 * @opflmodify cf,pf,af,zf,sf,of
1263 * @opflundef af
1264 * @opflclear of,cf
1265 */
1266FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1267{
1268 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1269 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1270 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
1271 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1272}
1273
1274
1275/**
1276 * @opcode 0x31
1277 * @opgroup og_gen_arith_bin
1278 * @opflmodify cf,pf,af,zf,sf,of
1279 * @opflundef af
1280 * @opflclear of,cf
1281 */
1282FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1283{
1284 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1286 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
1287 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1288}
1289
1290
1291/**
1292 * @opcode 0x32
1293 * @opgroup og_gen_arith_bin
1294 * @opflmodify cf,pf,af,zf,sf,of
1295 * @opflundef af
1296 * @opflclear of,cf
1297 */
1298FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1299{
1300 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1301 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1302 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1303}
1304
1305
1306/**
1307 * @opcode 0x33
1308 * @opgroup og_gen_arith_bin
1309 * @opflmodify cf,pf,af,zf,sf,of
1310 * @opflundef af
1311 * @opflclear of,cf
1312 */
1313FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1314{
1315 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1317 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1318}
1319
1320
1321/**
1322 * @opcode 0x34
1323 * @opgroup og_gen_arith_bin
1324 * @opflmodify cf,pf,af,zf,sf,of
1325 * @opflundef af
1326 * @opflclear of,cf
1327 */
1328FNIEMOP_DEF(iemOp_xor_Al_Ib)
1329{
1330 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1332 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1333}
1334
1335
1336/**
1337 * @opcode 0x35
1338 * @opgroup og_gen_arith_bin
1339 * @opflmodify cf,pf,af,zf,sf,of
1340 * @opflundef af
1341 * @opflclear of,cf
1342 */
1343FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1344{
1345 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1346 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1347 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1348}
1349
1350
1351/**
1352 * @opcode 0x36
1353 * @opmnemonic SEG
1354 * @op1 SS
1355 * @opgroup og_prefix
1356 * @openc prefix
1357 * @opdisenum OP_SEG
1358 * @ophints harmless
1359 */
1360FNIEMOP_DEF(iemOp_seg_SS)
1361{
1362 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1363 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1364 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1365
1366 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1367 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1368}
1369
1370
1371/**
1372 * @opcode 0x37
1373 * @opfltest af,cf
1374 * @opflmodify cf,pf,af,zf,sf,of
1375 * @opflundef pf,zf,sf,of
1376 * @opgroup og_gen_arith_dec
1377 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1378 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1379 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1380 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1381 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1382 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1383 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1384 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1385 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1386 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1387 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1388 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1389 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1390 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1391 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1392 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1393 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1394 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1395 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1396 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1397 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1398 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1399 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1400 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1401 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1402 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1403 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1404 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1405 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1406 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1407 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1408 */
1409FNIEMOP_DEF(iemOp_aaa)
1410{
1411 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1412 IEMOP_HLP_NO_64BIT();
1413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1414 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1415
1416 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
1417}
1418
1419
1420/**
1421 * @opcode 0x38
1422 */
1423FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1424{
1425 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1426 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
1427 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1428}
1429
1430
1431/**
1432 * @opcode 0x39
1433 */
1434FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1435{
1436 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1437 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
1438 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
1439}
1440
1441
1442/**
1443 * @opcode 0x3a
1444 */
1445FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1446{
1447 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1448 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1449}
1450
1451
1452/**
1453 * @opcode 0x3b
1454 */
1455FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1456{
1457 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1458 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1459}
1460
1461
1462/**
1463 * @opcode 0x3c
1464 */
1465FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1466{
1467 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1468 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1469}
1470
1471
1472/**
1473 * @opcode 0x3d
1474 */
1475FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1476{
1477 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1478 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1479}
1480
1481
1482/**
1483 * @opcode 0x3e
1484 */
1485FNIEMOP_DEF(iemOp_seg_DS)
1486{
1487 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1488 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1489 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1490
1491 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1492 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1493}
1494
1495
1496/**
1497 * @opcode 0x3f
1498 * @opfltest af,cf
1499 * @opflmodify cf,pf,af,zf,sf,of
1500 * @opflundef pf,zf,sf,of
1501 * @opgroup og_gen_arith_dec
1502 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1503 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1504 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1505 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1506 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1507 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1508 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1509 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1510 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1511 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1512 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1513 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1514 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1515 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1516 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1517 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1518 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1519 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1520 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1521 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1522 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1523 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1524 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1525 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1526 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1527 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1528 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1529 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1530 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1531 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1532 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1533 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1534 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1535 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1536 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1537 */
1538FNIEMOP_DEF(iemOp_aas)
1539{
1540 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1541 IEMOP_HLP_NO_64BIT();
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1543 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1544
1545 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1546}
1547
1548
1549/**
1550 * Common 'inc/dec register' helper.
1551 *
1552 * Not for 64-bit code, only for what became the rex prefixes.
1553 */
1554#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1556 switch (pVCpu->iem.s.enmEffOpSize) \
1557 { \
1558 case IEMMODE_16BIT: \
1559 IEM_MC_BEGIN(2, 0); \
1560 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1561 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1562 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1563 IEM_MC_REF_EFLAGS(pEFlags); \
1564 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1565 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1566 IEM_MC_END(); \
1567 break; \
1568 \
1569 case IEMMODE_32BIT: \
1570 IEM_MC_BEGIN(2, 0); \
1571 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1572 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1573 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1574 IEM_MC_REF_EFLAGS(pEFlags); \
1575 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1576 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1577 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1578 IEM_MC_END(); \
1579 break; \
1580 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1581 } \
1582 (void)0
1583
1584/**
1585 * @opcode 0x40
1586 */
1587FNIEMOP_DEF(iemOp_inc_eAX)
1588{
1589 /*
1590 * This is a REX prefix in 64-bit mode.
1591 */
1592 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1593 {
1594 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1595 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1596
1597 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1598 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1599 }
1600
1601 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1602 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1603}
1604
1605
1606/**
1607 * @opcode 0x41
1608 */
1609FNIEMOP_DEF(iemOp_inc_eCX)
1610{
1611 /*
1612 * This is a REX prefix in 64-bit mode.
1613 */
1614 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1615 {
1616 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1617 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1618 pVCpu->iem.s.uRexB = 1 << 3;
1619
1620 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1621 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1622 }
1623
1624 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1625 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1626}
1627
1628
1629/**
1630 * @opcode 0x42
1631 */
1632FNIEMOP_DEF(iemOp_inc_eDX)
1633{
1634 /*
1635 * This is a REX prefix in 64-bit mode.
1636 */
1637 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1638 {
1639 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1640 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1641 pVCpu->iem.s.uRexIndex = 1 << 3;
1642
1643 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1644 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1645 }
1646
1647 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1648 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1649}
1650
1651
1652
1653/**
1654 * @opcode 0x43
1655 */
1656FNIEMOP_DEF(iemOp_inc_eBX)
1657{
1658 /*
1659 * This is a REX prefix in 64-bit mode.
1660 */
1661 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1662 {
1663 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1664 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1665 pVCpu->iem.s.uRexB = 1 << 3;
1666 pVCpu->iem.s.uRexIndex = 1 << 3;
1667
1668 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1669 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1670 }
1671
1672 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1673 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1674}
1675
1676
1677/**
1678 * @opcode 0x44
1679 */
1680FNIEMOP_DEF(iemOp_inc_eSP)
1681{
1682 /*
1683 * This is a REX prefix in 64-bit mode.
1684 */
1685 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1686 {
1687 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1688 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1689 pVCpu->iem.s.uRexReg = 1 << 3;
1690
1691 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1692 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1693 }
1694
1695 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1696 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1697}
1698
1699
1700/**
1701 * @opcode 0x45
1702 */
1703FNIEMOP_DEF(iemOp_inc_eBP)
1704{
1705 /*
1706 * This is a REX prefix in 64-bit mode.
1707 */
1708 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1709 {
1710 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1711 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1712 pVCpu->iem.s.uRexReg = 1 << 3;
1713 pVCpu->iem.s.uRexB = 1 << 3;
1714
1715 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1716 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1717 }
1718
1719 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1720 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1721}
1722
1723
1724/**
1725 * @opcode 0x46
1726 */
1727FNIEMOP_DEF(iemOp_inc_eSI)
1728{
1729 /*
1730 * This is a REX prefix in 64-bit mode.
1731 */
1732 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1733 {
1734 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1735 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1736 pVCpu->iem.s.uRexReg = 1 << 3;
1737 pVCpu->iem.s.uRexIndex = 1 << 3;
1738
1739 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1740 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1741 }
1742
1743 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1744 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1745}
1746
1747
1748/**
1749 * @opcode 0x47
1750 */
1751FNIEMOP_DEF(iemOp_inc_eDI)
1752{
1753 /*
1754 * This is a REX prefix in 64-bit mode.
1755 */
1756 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1757 {
1758 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1759 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1760 pVCpu->iem.s.uRexReg = 1 << 3;
1761 pVCpu->iem.s.uRexB = 1 << 3;
1762 pVCpu->iem.s.uRexIndex = 1 << 3;
1763
1764 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1765 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1766 }
1767
1768 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1769 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1770}
1771
1772
1773/**
1774 * @opcode 0x48
1775 */
1776FNIEMOP_DEF(iemOp_dec_eAX)
1777{
1778 /*
1779 * This is a REX prefix in 64-bit mode.
1780 */
1781 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1782 {
1783 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1784 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1785 iemRecalEffOpSize(pVCpu);
1786
1787 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1788 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1789 }
1790
1791 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1792 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1793}
1794
1795
1796/**
1797 * @opcode 0x49
1798 */
1799FNIEMOP_DEF(iemOp_dec_eCX)
1800{
1801 /*
1802 * This is a REX prefix in 64-bit mode.
1803 */
1804 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1805 {
1806 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1807 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1808 pVCpu->iem.s.uRexB = 1 << 3;
1809 iemRecalEffOpSize(pVCpu);
1810
1811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1813 }
1814
1815 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1816 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1817}
1818
1819
1820/**
1821 * @opcode 0x4a
1822 */
1823FNIEMOP_DEF(iemOp_dec_eDX)
1824{
1825 /*
1826 * This is a REX prefix in 64-bit mode.
1827 */
1828 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1829 {
1830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1832 pVCpu->iem.s.uRexIndex = 1 << 3;
1833 iemRecalEffOpSize(pVCpu);
1834
1835 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1836 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1837 }
1838
1839 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1840 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
1841}
1842
1843
1844/**
1845 * @opcode 0x4b
1846 */
1847FNIEMOP_DEF(iemOp_dec_eBX)
1848{
1849 /*
1850 * This is a REX prefix in 64-bit mode.
1851 */
1852 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1853 {
1854 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1855 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1856 pVCpu->iem.s.uRexB = 1 << 3;
1857 pVCpu->iem.s.uRexIndex = 1 << 3;
1858 iemRecalEffOpSize(pVCpu);
1859
1860 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1861 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1862 }
1863
1864 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1865 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
1866}
1867
1868
1869/**
1870 * @opcode 0x4c
1871 */
1872FNIEMOP_DEF(iemOp_dec_eSP)
1873{
1874 /*
1875 * This is a REX prefix in 64-bit mode.
1876 */
1877 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1878 {
1879 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1880 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1881 pVCpu->iem.s.uRexReg = 1 << 3;
1882 iemRecalEffOpSize(pVCpu);
1883
1884 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1885 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1886 }
1887
1888 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1889 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
1890}
1891
1892
1893/**
1894 * @opcode 0x4d
1895 */
1896FNIEMOP_DEF(iemOp_dec_eBP)
1897{
1898 /*
1899 * This is a REX prefix in 64-bit mode.
1900 */
1901 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1902 {
1903 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1904 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1905 pVCpu->iem.s.uRexReg = 1 << 3;
1906 pVCpu->iem.s.uRexB = 1 << 3;
1907 iemRecalEffOpSize(pVCpu);
1908
1909 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1910 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1911 }
1912
1913 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1914 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
1915}
1916
1917
1918/**
1919 * @opcode 0x4e
1920 */
1921FNIEMOP_DEF(iemOp_dec_eSI)
1922{
1923 /*
1924 * This is a REX prefix in 64-bit mode.
1925 */
1926 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1927 {
1928 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1929 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1930 pVCpu->iem.s.uRexReg = 1 << 3;
1931 pVCpu->iem.s.uRexIndex = 1 << 3;
1932 iemRecalEffOpSize(pVCpu);
1933
1934 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1935 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1936 }
1937
1938 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1939 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
1940}
1941
1942
1943/**
1944 * @opcode 0x4f
1945 */
1946FNIEMOP_DEF(iemOp_dec_eDI)
1947{
1948 /*
1949 * This is a REX prefix in 64-bit mode.
1950 */
1951 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1952 {
1953 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1954 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1955 pVCpu->iem.s.uRexReg = 1 << 3;
1956 pVCpu->iem.s.uRexB = 1 << 3;
1957 pVCpu->iem.s.uRexIndex = 1 << 3;
1958 iemRecalEffOpSize(pVCpu);
1959
1960 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1961 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1962 }
1963
1964 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1965 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
1966}
1967
1968
1969/**
1970 * Common 'push register' helper.
1971 */
1972FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1973{
1974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1975 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1976 {
1977 iReg |= pVCpu->iem.s.uRexB;
1978 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1979 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1980 }
1981
1982 switch (pVCpu->iem.s.enmEffOpSize)
1983 {
1984 case IEMMODE_16BIT:
1985 IEM_MC_BEGIN(0, 1);
1986 IEM_MC_LOCAL(uint16_t, u16Value);
1987 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1988 IEM_MC_PUSH_U16(u16Value);
1989 IEM_MC_ADVANCE_RIP_AND_FINISH();
1990 IEM_MC_END();
1991 break;
1992
1993 case IEMMODE_32BIT:
1994 IEM_MC_BEGIN(0, 1);
1995 IEM_MC_LOCAL(uint32_t, u32Value);
1996 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1997 IEM_MC_PUSH_U32(u32Value);
1998 IEM_MC_ADVANCE_RIP_AND_FINISH();
1999 IEM_MC_END();
2000 break;
2001
2002 case IEMMODE_64BIT:
2003 IEM_MC_BEGIN(0, 1);
2004 IEM_MC_LOCAL(uint64_t, u64Value);
2005 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2006 IEM_MC_PUSH_U64(u64Value);
2007 IEM_MC_ADVANCE_RIP_AND_FINISH();
2008 IEM_MC_END();
2009 break;
2010
2011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2012 }
2013}
2014
2015
2016/**
2017 * @opcode 0x50
2018 */
2019FNIEMOP_DEF(iemOp_push_eAX)
2020{
2021 IEMOP_MNEMONIC(push_rAX, "push rAX");
2022 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2023}
2024
2025
2026/**
2027 * @opcode 0x51
2028 */
2029FNIEMOP_DEF(iemOp_push_eCX)
2030{
2031 IEMOP_MNEMONIC(push_rCX, "push rCX");
2032 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2033}
2034
2035
2036/**
2037 * @opcode 0x52
2038 */
2039FNIEMOP_DEF(iemOp_push_eDX)
2040{
2041 IEMOP_MNEMONIC(push_rDX, "push rDX");
2042 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2043}
2044
2045
2046/**
2047 * @opcode 0x53
2048 */
2049FNIEMOP_DEF(iemOp_push_eBX)
2050{
2051 IEMOP_MNEMONIC(push_rBX, "push rBX");
2052 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2053}
2054
2055
2056/**
2057 * @opcode 0x54
2058 */
2059FNIEMOP_DEF(iemOp_push_eSP)
2060{
2061 IEMOP_MNEMONIC(push_rSP, "push rSP");
2062 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2063 {
2064 IEM_MC_BEGIN(0, 1);
2065 IEM_MC_LOCAL(uint16_t, u16Value);
2066 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2067 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2068 IEM_MC_PUSH_U16(u16Value);
2069 IEM_MC_ADVANCE_RIP_AND_FINISH();
2070 IEM_MC_END();
2071 }
2072 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2073}
2074
2075
2076/**
2077 * @opcode 0x55
2078 */
2079FNIEMOP_DEF(iemOp_push_eBP)
2080{
2081 IEMOP_MNEMONIC(push_rBP, "push rBP");
2082 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2083}
2084
2085
2086/**
2087 * @opcode 0x56
2088 */
2089FNIEMOP_DEF(iemOp_push_eSI)
2090{
2091 IEMOP_MNEMONIC(push_rSI, "push rSI");
2092 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2093}
2094
2095
2096/**
2097 * @opcode 0x57
2098 */
2099FNIEMOP_DEF(iemOp_push_eDI)
2100{
2101 IEMOP_MNEMONIC(push_rDI, "push rDI");
2102 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2103}
2104
2105
2106/**
2107 * Common 'pop register' helper.
2108 */
2109FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2110{
2111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2112 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2113 {
2114 iReg |= pVCpu->iem.s.uRexB;
2115 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2116 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2117 }
2118
2119 switch (pVCpu->iem.s.enmEffOpSize)
2120 {
2121 case IEMMODE_16BIT:
2122 IEM_MC_BEGIN(0, 1);
2123 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2124 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2125 IEM_MC_POP_U16(pu16Dst);
2126 IEM_MC_ADVANCE_RIP_AND_FINISH();
2127 IEM_MC_END();
2128 break;
2129
2130 case IEMMODE_32BIT:
2131 IEM_MC_BEGIN(0, 1);
2132 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2133 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2134 IEM_MC_POP_U32(pu32Dst);
2135 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2136 IEM_MC_ADVANCE_RIP_AND_FINISH();
2137 IEM_MC_END();
2138 break;
2139
2140 case IEMMODE_64BIT:
2141 IEM_MC_BEGIN(0, 1);
2142 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2143 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2144 IEM_MC_POP_U64(pu64Dst);
2145 IEM_MC_ADVANCE_RIP_AND_FINISH();
2146 IEM_MC_END();
2147 break;
2148
2149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2150 }
2151}
2152
2153
2154/**
2155 * @opcode 0x58
2156 */
2157FNIEMOP_DEF(iemOp_pop_eAX)
2158{
2159 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2160 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2161}
2162
2163
2164/**
2165 * @opcode 0x59
2166 */
2167FNIEMOP_DEF(iemOp_pop_eCX)
2168{
2169 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2170 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2171}
2172
2173
2174/**
2175 * @opcode 0x5a
2176 */
2177FNIEMOP_DEF(iemOp_pop_eDX)
2178{
2179 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2180 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2181}
2182
2183
2184/**
2185 * @opcode 0x5b
2186 */
2187FNIEMOP_DEF(iemOp_pop_eBX)
2188{
2189 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2190 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2191}
2192
2193
2194/**
2195 * @opcode 0x5c
2196 */
2197FNIEMOP_DEF(iemOp_pop_eSP)
2198{
2199 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2200 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2201 {
2202 if (pVCpu->iem.s.uRexB)
2203 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2204 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2205 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2206 }
2207
2208 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2209 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2210 /** @todo add testcase for this instruction. */
2211 switch (pVCpu->iem.s.enmEffOpSize)
2212 {
2213 case IEMMODE_16BIT:
2214 IEM_MC_BEGIN(0, 1);
2215 IEM_MC_LOCAL(uint16_t, u16Dst);
2216 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2217 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2218 IEM_MC_ADVANCE_RIP_AND_FINISH();
2219 IEM_MC_END();
2220 break;
2221
2222 case IEMMODE_32BIT:
2223 IEM_MC_BEGIN(0, 1);
2224 IEM_MC_LOCAL(uint32_t, u32Dst);
2225 IEM_MC_POP_U32(&u32Dst);
2226 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 break;
2230
2231 case IEMMODE_64BIT:
2232 IEM_MC_BEGIN(0, 1);
2233 IEM_MC_LOCAL(uint64_t, u64Dst);
2234 IEM_MC_POP_U64(&u64Dst);
2235 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2236 IEM_MC_ADVANCE_RIP_AND_FINISH();
2237 IEM_MC_END();
2238 break;
2239
2240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2241 }
2242}
2243
2244
2245/**
2246 * @opcode 0x5d
2247 */
2248FNIEMOP_DEF(iemOp_pop_eBP)
2249{
2250 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2251 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2252}
2253
2254
2255/**
2256 * @opcode 0x5e
2257 */
2258FNIEMOP_DEF(iemOp_pop_eSI)
2259{
2260 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2261 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2262}
2263
2264
2265/**
2266 * @opcode 0x5f
2267 */
2268FNIEMOP_DEF(iemOp_pop_eDI)
2269{
2270 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2271 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2272}
2273
2274
2275/**
2276 * @opcode 0x60
2277 */
2278FNIEMOP_DEF(iemOp_pusha)
2279{
2280 IEMOP_MNEMONIC(pusha, "pusha");
2281 IEMOP_HLP_MIN_186();
2282 IEMOP_HLP_NO_64BIT();
2283 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2284 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
2285 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2286 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
2287}
2288
2289
2290/**
2291 * @opcode 0x61
2292 */
2293FNIEMOP_DEF(iemOp_popa__mvex)
2294{
2295 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
2296 {
2297 IEMOP_MNEMONIC(popa, "popa");
2298 IEMOP_HLP_MIN_186();
2299 IEMOP_HLP_NO_64BIT();
2300 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2301 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
2302 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2303 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
2304 }
2305 IEMOP_MNEMONIC(mvex, "mvex");
2306 Log(("mvex prefix is not supported!\n"));
2307 return IEMOP_RAISE_INVALID_OPCODE();
2308}
2309
2310
2311/**
2312 * @opcode 0x62
2313 * @opmnemonic bound
2314 * @op1 Gv_RO
2315 * @op2 Ma
2316 * @opmincpu 80186
2317 * @ophints harmless x86_invalid_64
2318 * @optest op1=0 op2=0 ->
2319 * @optest op1=1 op2=0 -> value.xcpt=5
2320 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2321 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2322 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2323 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2324 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2325 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2326 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2327 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2328 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2329 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2330 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2331 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2332 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2333 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2334 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2335 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2336 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2337 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2338 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2339 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2340 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2341 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2342 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2343 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2344 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2345 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2346 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2347 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2348 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2349 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2350 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2351 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2352 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2353 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2354 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2355 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2356 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2357 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2358 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2359 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2360 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2361 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2362 */
2363FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2364{
2365 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2366 compatability mode it is invalid with MOD=3.
2367
2368 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2369 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2370 given as R and X without an exact description, so we assume it builds on
2371 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2372 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2373 uint8_t bRm;
2374 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
2375 {
2376 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2377 IEMOP_HLP_MIN_186();
2378 IEM_OPCODE_GET_NEXT_U8(&bRm);
2379 if (IEM_IS_MODRM_MEM_MODE(bRm))
2380 {
2381 /** @todo testcase: check that there are two memory accesses involved. Check
2382 * whether they're both read before the \#BR triggers. */
2383 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2384 {
2385 IEM_MC_BEGIN(3, 1);
2386 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2387 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2388 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2390
2391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2393
2394 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2395 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2396 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2397
2398 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2399 IEM_MC_END();
2400 }
2401 else /* 32-bit operands */
2402 {
2403 IEM_MC_BEGIN(3, 1);
2404 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2405 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2406 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2408
2409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2411
2412 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2413 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2414 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2415
2416 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2417 IEM_MC_END();
2418 }
2419 }
2420
2421 /*
2422 * @opdone
2423 */
2424 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2425 {
2426 /* Note that there is no need for the CPU to fetch further bytes
2427 here because MODRM.MOD == 3. */
2428 Log(("evex not supported by the guest CPU!\n"));
2429 return IEMOP_RAISE_INVALID_OPCODE();
2430 }
2431 }
2432 else
2433 {
2434 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2435 * does modr/m read, whereas AMD probably doesn't... */
2436 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2437 {
2438 Log(("evex not supported by the guest CPU!\n"));
2439 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2440 }
2441 IEM_OPCODE_GET_NEXT_U8(&bRm);
2442 }
2443
2444 IEMOP_MNEMONIC(evex, "evex");
2445 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2446 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2447 Log(("evex prefix is not implemented!\n"));
2448 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2449}
2450
2451
2452/** Opcode 0x63 - non-64-bit modes. */
2453FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2454{
2455 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2456 IEMOP_HLP_MIN_286();
2457 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2459
2460 if (IEM_IS_MODRM_REG_MODE(bRm))
2461 {
2462 /* Register */
2463 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2464 IEM_MC_BEGIN(3, 0);
2465 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2466 IEM_MC_ARG(uint16_t, u16Src, 1);
2467 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2468
2469 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2470 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2471 IEM_MC_REF_EFLAGS(pEFlags);
2472 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2473
2474 IEM_MC_ADVANCE_RIP_AND_FINISH();
2475 IEM_MC_END();
2476 }
2477 else
2478 {
2479 /* Memory */
2480 IEM_MC_BEGIN(3, 2);
2481 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2482 IEM_MC_ARG(uint16_t, u16Src, 1);
2483 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2485
2486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2487 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2488 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2490 IEM_MC_FETCH_EFLAGS(EFlags);
2491 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2492
2493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2494 IEM_MC_COMMIT_EFLAGS(EFlags);
2495 IEM_MC_ADVANCE_RIP_AND_FINISH();
2496 IEM_MC_END();
2497 }
2498}
2499
2500
2501/**
2502 * @opcode 0x63
2503 *
2504 * @note This is a weird one. It works like a regular move instruction if
2505 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2506 * @todo This definitely needs a testcase to verify the odd cases. */
2507FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2508{
2509 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2510
2511 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2513
2514 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2515 {
2516 if (IEM_IS_MODRM_REG_MODE(bRm))
2517 {
2518 /*
2519 * Register to register.
2520 */
2521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2522 IEM_MC_BEGIN(0, 1);
2523 IEM_MC_LOCAL(uint64_t, u64Value);
2524 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2525 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2526 IEM_MC_ADVANCE_RIP_AND_FINISH();
2527 IEM_MC_END();
2528 }
2529 else
2530 {
2531 /*
2532 * We're loading a register from memory.
2533 */
2534 IEM_MC_BEGIN(0, 2);
2535 IEM_MC_LOCAL(uint64_t, u64Value);
2536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2539 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2540 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2541 IEM_MC_ADVANCE_RIP_AND_FINISH();
2542 IEM_MC_END();
2543 }
2544 }
2545 else
2546 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2547}
2548
2549
2550/**
2551 * @opcode 0x64
2552 * @opmnemonic segfs
2553 * @opmincpu 80386
2554 * @opgroup og_prefixes
2555 */
2556FNIEMOP_DEF(iemOp_seg_FS)
2557{
2558 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2559 IEMOP_HLP_MIN_386();
2560
2561 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2562 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2563
2564 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2565 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2566}
2567
2568
2569/**
2570 * @opcode 0x65
2571 * @opmnemonic seggs
2572 * @opmincpu 80386
2573 * @opgroup og_prefixes
2574 */
2575FNIEMOP_DEF(iemOp_seg_GS)
2576{
2577 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2578 IEMOP_HLP_MIN_386();
2579
2580 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2581 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2582
2583 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2584 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2585}
2586
2587
2588/**
2589 * @opcode 0x66
2590 * @opmnemonic opsize
2591 * @openc prefix
2592 * @opmincpu 80386
2593 * @ophints harmless
2594 * @opgroup og_prefixes
2595 */
2596FNIEMOP_DEF(iemOp_op_size)
2597{
2598 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2599 IEMOP_HLP_MIN_386();
2600
2601 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2602 iemRecalEffOpSize(pVCpu);
2603
2604 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2605 when REPZ or REPNZ are present. */
2606 if (pVCpu->iem.s.idxPrefix == 0)
2607 pVCpu->iem.s.idxPrefix = 1;
2608
2609 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2610 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2611}
2612
2613
2614/**
2615 * @opcode 0x67
2616 * @opmnemonic addrsize
2617 * @openc prefix
2618 * @opmincpu 80386
2619 * @ophints harmless
2620 * @opgroup og_prefixes
2621 */
2622FNIEMOP_DEF(iemOp_addr_size)
2623{
2624 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2625 IEMOP_HLP_MIN_386();
2626
2627 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2628 switch (pVCpu->iem.s.enmDefAddrMode)
2629 {
2630 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2631 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2632 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2633 default: AssertFailed();
2634 }
2635
2636 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2637 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2638}
2639
2640
2641/**
2642 * @opcode 0x68
2643 */
2644FNIEMOP_DEF(iemOp_push_Iz)
2645{
2646 IEMOP_MNEMONIC(push_Iz, "push Iz");
2647 IEMOP_HLP_MIN_186();
2648 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2649 switch (pVCpu->iem.s.enmEffOpSize)
2650 {
2651 case IEMMODE_16BIT:
2652 {
2653 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2655 IEM_MC_BEGIN(0,0);
2656 IEM_MC_PUSH_U16(u16Imm);
2657 IEM_MC_ADVANCE_RIP_AND_FINISH();
2658 IEM_MC_END();
2659 break;
2660 }
2661
2662 case IEMMODE_32BIT:
2663 {
2664 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2666 IEM_MC_BEGIN(0,0);
2667 IEM_MC_PUSH_U32(u32Imm);
2668 IEM_MC_ADVANCE_RIP_AND_FINISH();
2669 IEM_MC_END();
2670 break;
2671 }
2672
2673 case IEMMODE_64BIT:
2674 {
2675 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2677 IEM_MC_BEGIN(0,0);
2678 IEM_MC_PUSH_U64(u64Imm);
2679 IEM_MC_ADVANCE_RIP_AND_FINISH();
2680 IEM_MC_END();
2681 break;
2682 }
2683
2684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2685 }
2686}
2687
2688
2689/**
2690 * @opcode 0x69
2691 */
2692FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2693{
2694 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2695 IEMOP_HLP_MIN_186();
2696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2697 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2698
2699 switch (pVCpu->iem.s.enmEffOpSize)
2700 {
2701 case IEMMODE_16BIT:
2702 {
2703 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2704 if (IEM_IS_MODRM_REG_MODE(bRm))
2705 {
2706 /* register operand */
2707 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709
2710 IEM_MC_BEGIN(3, 1);
2711 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2712 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2713 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2714 IEM_MC_LOCAL(uint16_t, u16Tmp);
2715
2716 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2717 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2718 IEM_MC_REF_EFLAGS(pEFlags);
2719 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2720 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2721
2722 IEM_MC_ADVANCE_RIP_AND_FINISH();
2723 IEM_MC_END();
2724 }
2725 else
2726 {
2727 /* memory operand */
2728 IEM_MC_BEGIN(3, 2);
2729 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2730 IEM_MC_ARG(uint16_t, u16Src, 1);
2731 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2732 IEM_MC_LOCAL(uint16_t, u16Tmp);
2733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2734
2735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2736 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2737 IEM_MC_ASSIGN(u16Src, u16Imm);
2738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2739 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2740 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2741 IEM_MC_REF_EFLAGS(pEFlags);
2742 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2743 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2744
2745 IEM_MC_ADVANCE_RIP_AND_FINISH();
2746 IEM_MC_END();
2747 }
2748 break;
2749 }
2750
2751 case IEMMODE_32BIT:
2752 {
2753 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2754 if (IEM_IS_MODRM_REG_MODE(bRm))
2755 {
2756 /* register operand */
2757 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2759
2760 IEM_MC_BEGIN(3, 1);
2761 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2762 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2763 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2764 IEM_MC_LOCAL(uint32_t, u32Tmp);
2765
2766 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2767 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2768 IEM_MC_REF_EFLAGS(pEFlags);
2769 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2770 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2771
2772 IEM_MC_ADVANCE_RIP_AND_FINISH();
2773 IEM_MC_END();
2774 }
2775 else
2776 {
2777 /* memory operand */
2778 IEM_MC_BEGIN(3, 2);
2779 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2780 IEM_MC_ARG(uint32_t, u32Src, 1);
2781 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2782 IEM_MC_LOCAL(uint32_t, u32Tmp);
2783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2784
2785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2786 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2787 IEM_MC_ASSIGN(u32Src, u32Imm);
2788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2789 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2790 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2791 IEM_MC_REF_EFLAGS(pEFlags);
2792 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2793 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2794
2795 IEM_MC_ADVANCE_RIP_AND_FINISH();
2796 IEM_MC_END();
2797 }
2798 break;
2799 }
2800
2801 case IEMMODE_64BIT:
2802 {
2803 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2804 if (IEM_IS_MODRM_REG_MODE(bRm))
2805 {
2806 /* register operand */
2807 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2809
2810 IEM_MC_BEGIN(3, 1);
2811 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2812 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2813 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2814 IEM_MC_LOCAL(uint64_t, u64Tmp);
2815
2816 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2817 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2818 IEM_MC_REF_EFLAGS(pEFlags);
2819 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2820 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2821
2822 IEM_MC_ADVANCE_RIP_AND_FINISH();
2823 IEM_MC_END();
2824 }
2825 else
2826 {
2827 /* memory operand */
2828 IEM_MC_BEGIN(3, 2);
2829 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2830 IEM_MC_ARG(uint64_t, u64Src, 1);
2831 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2832 IEM_MC_LOCAL(uint64_t, u64Tmp);
2833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2834
2835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2836 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2837 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
2838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2839 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2840 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2841 IEM_MC_REF_EFLAGS(pEFlags);
2842 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2843 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2844
2845 IEM_MC_ADVANCE_RIP_AND_FINISH();
2846 IEM_MC_END();
2847 }
2848 break;
2849 }
2850
2851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2852 }
2853}
2854
2855
2856/**
2857 * @opcode 0x6a
2858 */
2859FNIEMOP_DEF(iemOp_push_Ib)
2860{
2861 IEMOP_MNEMONIC(push_Ib, "push Ib");
2862 IEMOP_HLP_MIN_186();
2863 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2865 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2866
2867 switch (pVCpu->iem.s.enmEffOpSize)
2868 {
2869 case IEMMODE_16BIT:
2870 IEM_MC_BEGIN(0,0);
2871 IEM_MC_PUSH_U16(i8Imm);
2872 IEM_MC_ADVANCE_RIP_AND_FINISH();
2873 IEM_MC_END();
2874 break;
2875 case IEMMODE_32BIT:
2876 IEM_MC_BEGIN(0,0);
2877 IEM_MC_PUSH_U32(i8Imm);
2878 IEM_MC_ADVANCE_RIP_AND_FINISH();
2879 IEM_MC_END();
2880 break;
2881 case IEMMODE_64BIT:
2882 IEM_MC_BEGIN(0,0);
2883 IEM_MC_PUSH_U64(i8Imm);
2884 IEM_MC_ADVANCE_RIP_AND_FINISH();
2885 IEM_MC_END();
2886 break;
2887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2888 }
2889}
2890
2891
2892/**
2893 * @opcode 0x6b
2894 */
2895FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2896{
2897 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2898 IEMOP_HLP_MIN_186();
2899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2901
2902 switch (pVCpu->iem.s.enmEffOpSize)
2903 {
2904 case IEMMODE_16BIT:
2905 {
2906 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2907 if (IEM_IS_MODRM_REG_MODE(bRm))
2908 {
2909 /* register operand */
2910 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2912
2913 IEM_MC_BEGIN(3, 1);
2914 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2915 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2916 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2917 IEM_MC_LOCAL(uint16_t, u16Tmp);
2918
2919 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2920 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2921 IEM_MC_REF_EFLAGS(pEFlags);
2922 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2923 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2924
2925 IEM_MC_ADVANCE_RIP_AND_FINISH();
2926 IEM_MC_END();
2927 }
2928 else
2929 {
2930 /* memory operand */
2931 IEM_MC_BEGIN(3, 2);
2932 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2933 IEM_MC_ARG(uint16_t, u16Src, 1);
2934 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2935 IEM_MC_LOCAL(uint16_t, u16Tmp);
2936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2937
2938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2939 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2940 IEM_MC_ASSIGN(u16Src, u16Imm);
2941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2942 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2943 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2944 IEM_MC_REF_EFLAGS(pEFlags);
2945 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2946 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2947
2948 IEM_MC_ADVANCE_RIP_AND_FINISH();
2949 IEM_MC_END();
2950 }
2951 break;
2952 }
2953
2954 case IEMMODE_32BIT:
2955 {
2956 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2957 if (IEM_IS_MODRM_REG_MODE(bRm))
2958 {
2959 /* register operand */
2960 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962
2963 IEM_MC_BEGIN(3, 1);
2964 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2965 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2967 IEM_MC_LOCAL(uint32_t, u32Tmp);
2968
2969 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2970 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2971 IEM_MC_REF_EFLAGS(pEFlags);
2972 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2973 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2974
2975 IEM_MC_ADVANCE_RIP_AND_FINISH();
2976 IEM_MC_END();
2977 }
2978 else
2979 {
2980 /* memory operand */
2981 IEM_MC_BEGIN(3, 2);
2982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2983 IEM_MC_ARG(uint32_t, u32Src, 1);
2984 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2985 IEM_MC_LOCAL(uint32_t, u32Tmp);
2986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2987
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2989 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2990 IEM_MC_ASSIGN(u32Src, u32Imm);
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2992 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2993 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2994 IEM_MC_REF_EFLAGS(pEFlags);
2995 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2996 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2997
2998 IEM_MC_ADVANCE_RIP_AND_FINISH();
2999 IEM_MC_END();
3000 }
3001 break;
3002 }
3003
3004 case IEMMODE_64BIT:
3005 {
3006 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3007 if (IEM_IS_MODRM_REG_MODE(bRm))
3008 {
3009 /* register operand */
3010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3012
3013 IEM_MC_BEGIN(3, 1);
3014 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3015 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3016 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3017 IEM_MC_LOCAL(uint64_t, u64Tmp);
3018
3019 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3020 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3021 IEM_MC_REF_EFLAGS(pEFlags);
3022 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3023 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3024
3025 IEM_MC_ADVANCE_RIP_AND_FINISH();
3026 IEM_MC_END();
3027 }
3028 else
3029 {
3030 /* memory operand */
3031 IEM_MC_BEGIN(3, 2);
3032 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3033 IEM_MC_ARG(uint64_t, u64Src, 1);
3034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3035 IEM_MC_LOCAL(uint64_t, u64Tmp);
3036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3037
3038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3039 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3040 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3042 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3043 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3044 IEM_MC_REF_EFLAGS(pEFlags);
3045 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3046 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3047
3048 IEM_MC_ADVANCE_RIP_AND_FINISH();
3049 IEM_MC_END();
3050 }
3051 break;
3052 }
3053
3054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3055 }
3056}
3057
3058
3059/**
3060 * @opcode 0x6c
3061 */
3062FNIEMOP_DEF(iemOp_insb_Yb_DX)
3063{
3064 IEMOP_HLP_MIN_186();
3065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3066 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3067 {
3068 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3069 switch (pVCpu->iem.s.enmEffAddrMode)
3070 {
3071 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
3072 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
3073 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
3074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3075 }
3076 }
3077 else
3078 {
3079 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3080 switch (pVCpu->iem.s.enmEffAddrMode)
3081 {
3082 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
3083 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
3084 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
3085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3086 }
3087 }
3088}
3089
3090
3091/**
3092 * @opcode 0x6d
3093 */
3094FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3095{
3096 IEMOP_HLP_MIN_186();
3097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3098 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3099 {
3100 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3101 switch (pVCpu->iem.s.enmEffOpSize)
3102 {
3103 case IEMMODE_16BIT:
3104 switch (pVCpu->iem.s.enmEffAddrMode)
3105 {
3106 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
3107 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
3108 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
3109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3110 }
3111 break;
3112 case IEMMODE_64BIT:
3113 case IEMMODE_32BIT:
3114 switch (pVCpu->iem.s.enmEffAddrMode)
3115 {
3116 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
3117 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
3118 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
3119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3120 }
3121 break;
3122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3123 }
3124 }
3125 else
3126 {
3127 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3128 switch (pVCpu->iem.s.enmEffOpSize)
3129 {
3130 case IEMMODE_16BIT:
3131 switch (pVCpu->iem.s.enmEffAddrMode)
3132 {
3133 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
3134 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
3135 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
3136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3137 }
3138 break;
3139 case IEMMODE_64BIT:
3140 case IEMMODE_32BIT:
3141 switch (pVCpu->iem.s.enmEffAddrMode)
3142 {
3143 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
3144 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
3145 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148 break;
3149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3150 }
3151 }
3152}
3153
3154
3155/**
3156 * @opcode 0x6e
3157 */
3158FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3159{
3160 IEMOP_HLP_MIN_186();
3161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3162 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3163 {
3164 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3165 switch (pVCpu->iem.s.enmEffAddrMode)
3166 {
3167 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3168 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3169 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3171 }
3172 }
3173 else
3174 {
3175 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3176 switch (pVCpu->iem.s.enmEffAddrMode)
3177 {
3178 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3179 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3180 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3182 }
3183 }
3184}
3185
3186
3187/**
3188 * @opcode 0x6f
3189 */
3190FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3191{
3192 IEMOP_HLP_MIN_186();
3193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3194 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3195 {
3196 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3197 switch (pVCpu->iem.s.enmEffOpSize)
3198 {
3199 case IEMMODE_16BIT:
3200 switch (pVCpu->iem.s.enmEffAddrMode)
3201 {
3202 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3203 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3204 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3206 }
3207 break;
3208 case IEMMODE_64BIT:
3209 case IEMMODE_32BIT:
3210 switch (pVCpu->iem.s.enmEffAddrMode)
3211 {
3212 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3213 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3214 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3216 }
3217 break;
3218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3219 }
3220 }
3221 else
3222 {
3223 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3224 switch (pVCpu->iem.s.enmEffOpSize)
3225 {
3226 case IEMMODE_16BIT:
3227 switch (pVCpu->iem.s.enmEffAddrMode)
3228 {
3229 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3230 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3231 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3233 }
3234 break;
3235 case IEMMODE_64BIT:
3236 case IEMMODE_32BIT:
3237 switch (pVCpu->iem.s.enmEffAddrMode)
3238 {
3239 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3240 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3241 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3243 }
3244 break;
3245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3246 }
3247 }
3248}
3249
3250
3251/**
3252 * @opcode 0x70
3253 */
3254FNIEMOP_DEF(iemOp_jo_Jb)
3255{
3256 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3257 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3259 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3260
3261 IEM_MC_BEGIN(0, 0);
3262 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3263 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3264 } IEM_MC_ELSE() {
3265 IEM_MC_ADVANCE_RIP_AND_FINISH();
3266 } IEM_MC_ENDIF();
3267 IEM_MC_END();
3268}
3269
3270
3271/**
3272 * @opcode 0x71
3273 */
3274FNIEMOP_DEF(iemOp_jno_Jb)
3275{
3276 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3277 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3279 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3280
3281 IEM_MC_BEGIN(0, 0);
3282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3283 IEM_MC_ADVANCE_RIP_AND_FINISH();
3284 } IEM_MC_ELSE() {
3285 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3286 } IEM_MC_ENDIF();
3287 IEM_MC_END();
3288}
3289
3290/**
3291 * @opcode 0x72
3292 */
3293FNIEMOP_DEF(iemOp_jc_Jb)
3294{
3295 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3296 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3298 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3299
3300 IEM_MC_BEGIN(0, 0);
3301 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3302 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3303 } IEM_MC_ELSE() {
3304 IEM_MC_ADVANCE_RIP_AND_FINISH();
3305 } IEM_MC_ENDIF();
3306 IEM_MC_END();
3307}
3308
3309
3310/**
3311 * @opcode 0x73
3312 */
3313FNIEMOP_DEF(iemOp_jnc_Jb)
3314{
3315 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3316 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3319
3320 IEM_MC_BEGIN(0, 0);
3321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3322 IEM_MC_ADVANCE_RIP_AND_FINISH();
3323 } IEM_MC_ELSE() {
3324 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3325 } IEM_MC_ENDIF();
3326 IEM_MC_END();
3327}
3328
3329
3330/**
3331 * @opcode 0x74
3332 */
3333FNIEMOP_DEF(iemOp_je_Jb)
3334{
3335 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3336 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3338 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3339
3340 IEM_MC_BEGIN(0, 0);
3341 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3342 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3343 } IEM_MC_ELSE() {
3344 IEM_MC_ADVANCE_RIP_AND_FINISH();
3345 } IEM_MC_ENDIF();
3346 IEM_MC_END();
3347}
3348
3349
3350/**
3351 * @opcode 0x75
3352 */
3353FNIEMOP_DEF(iemOp_jne_Jb)
3354{
3355 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3356 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3358 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3359
3360 IEM_MC_BEGIN(0, 0);
3361 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3362 IEM_MC_ADVANCE_RIP_AND_FINISH();
3363 } IEM_MC_ELSE() {
3364 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3365 } IEM_MC_ENDIF();
3366 IEM_MC_END();
3367}
3368
3369
3370/**
3371 * @opcode 0x76
3372 */
3373FNIEMOP_DEF(iemOp_jbe_Jb)
3374{
3375 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3376 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3378 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3379
3380 IEM_MC_BEGIN(0, 0);
3381 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3382 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3383 } IEM_MC_ELSE() {
3384 IEM_MC_ADVANCE_RIP_AND_FINISH();
3385 } IEM_MC_ENDIF();
3386 IEM_MC_END();
3387}
3388
3389
3390/**
3391 * @opcode 0x77
3392 */
3393FNIEMOP_DEF(iemOp_jnbe_Jb)
3394{
3395 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3396 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3398 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3399
3400 IEM_MC_BEGIN(0, 0);
3401 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3402 IEM_MC_ADVANCE_RIP_AND_FINISH();
3403 } IEM_MC_ELSE() {
3404 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3405 } IEM_MC_ENDIF();
3406 IEM_MC_END();
3407}
3408
3409
3410/**
3411 * @opcode 0x78
3412 */
3413FNIEMOP_DEF(iemOp_js_Jb)
3414{
3415 IEMOP_MNEMONIC(js_Jb, "js Jb");
3416 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3419
3420 IEM_MC_BEGIN(0, 0);
3421 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3422 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3423 } IEM_MC_ELSE() {
3424 IEM_MC_ADVANCE_RIP_AND_FINISH();
3425 } IEM_MC_ENDIF();
3426 IEM_MC_END();
3427}
3428
3429
3430/**
3431 * @opcode 0x79
3432 */
3433FNIEMOP_DEF(iemOp_jns_Jb)
3434{
3435 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3436 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3438 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3439
3440 IEM_MC_BEGIN(0, 0);
3441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3442 IEM_MC_ADVANCE_RIP_AND_FINISH();
3443 } IEM_MC_ELSE() {
3444 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3445 } IEM_MC_ENDIF();
3446 IEM_MC_END();
3447}
3448
3449
3450/**
3451 * @opcode 0x7a
3452 */
3453FNIEMOP_DEF(iemOp_jp_Jb)
3454{
3455 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3456 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3458 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3459
3460 IEM_MC_BEGIN(0, 0);
3461 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3462 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3463 } IEM_MC_ELSE() {
3464 IEM_MC_ADVANCE_RIP_AND_FINISH();
3465 } IEM_MC_ENDIF();
3466 IEM_MC_END();
3467}
3468
3469
3470/**
3471 * @opcode 0x7b
3472 */
3473FNIEMOP_DEF(iemOp_jnp_Jb)
3474{
3475 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3476 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3478 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3479
3480 IEM_MC_BEGIN(0, 0);
3481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3482 IEM_MC_ADVANCE_RIP_AND_FINISH();
3483 } IEM_MC_ELSE() {
3484 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3485 } IEM_MC_ENDIF();
3486 IEM_MC_END();
3487}
3488
3489
3490/**
3491 * @opcode 0x7c
3492 */
3493FNIEMOP_DEF(iemOp_jl_Jb)
3494{
3495 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3496 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3499
3500 IEM_MC_BEGIN(0, 0);
3501 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3502 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3503 } IEM_MC_ELSE() {
3504 IEM_MC_ADVANCE_RIP_AND_FINISH();
3505 } IEM_MC_ENDIF();
3506 IEM_MC_END();
3507}
3508
3509
3510/**
3511 * @opcode 0x7d
3512 */
3513FNIEMOP_DEF(iemOp_jnl_Jb)
3514{
3515 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3516 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3518 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3519
3520 IEM_MC_BEGIN(0, 0);
3521 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3522 IEM_MC_ADVANCE_RIP_AND_FINISH();
3523 } IEM_MC_ELSE() {
3524 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3525 } IEM_MC_ENDIF();
3526 IEM_MC_END();
3527}
3528
3529
3530/**
3531 * @opcode 0x7e
3532 */
3533FNIEMOP_DEF(iemOp_jle_Jb)
3534{
3535 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3536 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3538 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3539
3540 IEM_MC_BEGIN(0, 0);
3541 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3542 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3543 } IEM_MC_ELSE() {
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547}
3548
3549
3550/**
3551 * @opcode 0x7f
3552 */
3553FNIEMOP_DEF(iemOp_jnle_Jb)
3554{
3555 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3558 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3559
3560 IEM_MC_BEGIN(0, 0);
3561 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3562 IEM_MC_ADVANCE_RIP_AND_FINISH();
3563 } IEM_MC_ELSE() {
3564 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3565 } IEM_MC_ENDIF();
3566 IEM_MC_END();
3567}
3568
3569
3570/**
3571 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3572 * iemOp_Grp1_Eb_Ib_80.
3573 */
3574#define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \
3575 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3576 { \
3577 /* register target */ \
3578 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3580 IEM_MC_BEGIN(3, 0); \
3581 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3582 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3583 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3584 \
3585 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3586 IEM_MC_REF_EFLAGS(pEFlags); \
3587 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3588 \
3589 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3590 IEM_MC_END(); \
3591 } \
3592 else \
3593 { \
3594 /* memory target */ \
3595 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3596 { \
3597 IEM_MC_BEGIN(3, 2); \
3598 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3599 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3601 \
3602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3603 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3604 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3605 IEMOP_HLP_DONE_DECODING(); \
3606 \
3607 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3608 IEM_MC_FETCH_EFLAGS(EFlags); \
3609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3610 \
3611 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
3612 IEM_MC_COMMIT_EFLAGS(EFlags); \
3613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3614 IEM_MC_END(); \
3615 } \
3616 else \
3617 { \
3618 (void)0
3619
3620#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3621 IEMOP_HLP_DONE_DECODING(); \
3622 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
3623 } \
3624 } \
3625 (void)0
3626
3627#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3628 IEM_MC_BEGIN(3, 2); \
3629 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3630 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3632 \
3633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3635 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3636 IEMOP_HLP_DONE_DECODING(); \
3637 \
3638 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3639 IEM_MC_FETCH_EFLAGS(EFlags); \
3640 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3641 \
3642 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
3643 IEM_MC_COMMIT_EFLAGS(EFlags); \
3644 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3645 IEM_MC_END(); \
3646 } \
3647 } \
3648 (void)0
3649
3650
3651/**
3652 * @opmaps grp1_80,grp1_83
3653 * @opcode /0
3654 */
3655FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3656{
3657 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3658 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
3659 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3660}
3661
3662
3663/**
3664 * @opmaps grp1_80,grp1_83
3665 * @opcode /1
3666 */
3667FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3668{
3669 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3670 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
3671 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3672}
3673
3674
3675/**
3676 * @opmaps grp1_80,grp1_83
3677 * @opcode /2
3678 */
3679FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
3680{
3681 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
3682 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
3683 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
3684}
3685
3686
3687/**
3688 * @opmaps grp1_80,grp1_83
3689 * @opcode /3
3690 */
3691FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
3692{
3693 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
3694 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
3695 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
3696}
3697
3698
3699/**
3700 * @opmaps grp1_80,grp1_83
3701 * @opcode /4
3702 */
3703FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
3704{
3705 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
3706 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
3707 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
3708}
3709
3710
3711/**
3712 * @opmaps grp1_80,grp1_83
3713 * @opcode /5
3714 */
3715FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
3716{
3717 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
3718 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
3719 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
3720}
3721
3722
3723/**
3724 * @opmaps grp1_80,grp1_83
3725 * @opcode /6
3726 */
3727FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
3728{
3729 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
3730 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
3731 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
3732}
3733
3734
3735/**
3736 * @opmaps grp1_80,grp1_83
3737 * @opcode /7
3738 */
3739FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
3740{
3741 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
3742 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
3743 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
3744}
3745
3746
3747/**
3748 * @opcode 0x80
3749 */
3750FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3751{
3752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3753 switch (IEM_GET_MODRM_REG_8(bRm))
3754 {
3755 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
3756 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
3757 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
3758 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
3759 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
3760 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
3761 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
3762 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
3763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3764 }
3765}
3766
3767
3768/**
3769 * Body for a group 1 binary operator.
3770 */
3771#define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
3772 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3773 { \
3774 /* register target */ \
3775 switch (pVCpu->iem.s.enmEffOpSize) \
3776 { \
3777 case IEMMODE_16BIT: \
3778 { \
3779 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3780 IEMOP_HLP_DONE_DECODING(); \
3781 IEM_MC_BEGIN(3, 0); \
3782 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3783 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
3784 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3785 \
3786 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3787 IEM_MC_REF_EFLAGS(pEFlags); \
3788 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3789 \
3790 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3791 IEM_MC_END(); \
3792 break; \
3793 } \
3794 \
3795 case IEMMODE_32BIT: \
3796 { \
3797 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3798 IEMOP_HLP_DONE_DECODING(); \
3799 IEM_MC_BEGIN(3, 0); \
3800 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3801 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
3802 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3803 \
3804 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3805 IEM_MC_REF_EFLAGS(pEFlags); \
3806 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3807 if (a_fRW == IEM_ACCESS_DATA_RW) \
3808 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
3809 \
3810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3811 IEM_MC_END(); \
3812 break; \
3813 } \
3814 \
3815 case IEMMODE_64BIT: \
3816 { \
3817 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3819 IEM_MC_BEGIN(3, 0); \
3820 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3821 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
3822 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3823 \
3824 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3825 IEM_MC_REF_EFLAGS(pEFlags); \
3826 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3827 \
3828 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3829 IEM_MC_END(); \
3830 break; \
3831 } \
3832 \
3833 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3834 } \
3835 } \
3836 else \
3837 { \
3838 /* memory target */ \
3839 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3840 { \
3841 switch (pVCpu->iem.s.enmEffOpSize) \
3842 { \
3843 case IEMMODE_16BIT: \
3844 { \
3845 IEM_MC_BEGIN(3, 2); \
3846 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3847 IEM_MC_ARG(uint16_t, u16Src, 1); \
3848 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3850 \
3851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3852 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3853 IEM_MC_ASSIGN(u16Src, u16Imm); \
3854 IEMOP_HLP_DONE_DECODING(); \
3855 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3856 IEM_MC_FETCH_EFLAGS(EFlags); \
3857 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3858 \
3859 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
3860 IEM_MC_COMMIT_EFLAGS(EFlags); \
3861 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3862 IEM_MC_END(); \
3863 break; \
3864 } \
3865 \
3866 case IEMMODE_32BIT: \
3867 { \
3868 IEM_MC_BEGIN(3, 2); \
3869 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3870 IEM_MC_ARG(uint32_t, u32Src, 1); \
3871 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3873 \
3874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3875 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3876 IEM_MC_ASSIGN(u32Src, u32Imm); \
3877 IEMOP_HLP_DONE_DECODING(); \
3878 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3879 IEM_MC_FETCH_EFLAGS(EFlags); \
3880 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3881 \
3882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
3883 IEM_MC_COMMIT_EFLAGS(EFlags); \
3884 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3885 IEM_MC_END(); \
3886 break; \
3887 } \
3888 \
3889 case IEMMODE_64BIT: \
3890 { \
3891 IEM_MC_BEGIN(3, 2); \
3892 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3893 IEM_MC_ARG(uint64_t, u64Src, 1); \
3894 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3896 \
3897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3898 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3899 IEMOP_HLP_DONE_DECODING(); \
3900 IEM_MC_ASSIGN(u64Src, u64Imm); \
3901 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3902 IEM_MC_FETCH_EFLAGS(EFlags); \
3903 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3904 \
3905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
3906 IEM_MC_COMMIT_EFLAGS(EFlags); \
3907 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3908 IEM_MC_END(); \
3909 break; \
3910 } \
3911 \
3912 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3913 } \
3914 } \
3915 else \
3916 { \
3917 (void)0
3918
3919#define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \
3920 IEMOP_HLP_DONE_DECODING(); \
3921 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
3922 } \
3923 } \
3924 (void)0
3925
3926#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
3927 switch (pVCpu->iem.s.enmEffOpSize) \
3928 { \
3929 case IEMMODE_16BIT: \
3930 { \
3931 IEM_MC_BEGIN(3, 2); \
3932 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3933 IEM_MC_ARG(uint16_t, u16Src, 1); \
3934 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3936 \
3937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3938 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3939 IEM_MC_ASSIGN(u16Src, u16Imm); \
3940 IEMOP_HLP_DONE_DECODING(); \
3941 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3942 IEM_MC_FETCH_EFLAGS(EFlags); \
3943 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
3944 \
3945 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
3946 IEM_MC_COMMIT_EFLAGS(EFlags); \
3947 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3948 IEM_MC_END(); \
3949 break; \
3950 } \
3951 \
3952 case IEMMODE_32BIT: \
3953 { \
3954 IEM_MC_BEGIN(3, 2); \
3955 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3956 IEM_MC_ARG(uint32_t, u32Src, 1); \
3957 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3959 \
3960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3961 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3962 IEM_MC_ASSIGN(u32Src, u32Imm); \
3963 IEMOP_HLP_DONE_DECODING(); \
3964 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3965 IEM_MC_FETCH_EFLAGS(EFlags); \
3966 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
3967 \
3968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
3969 IEM_MC_COMMIT_EFLAGS(EFlags); \
3970 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3971 IEM_MC_END(); \
3972 break; \
3973 } \
3974 \
3975 case IEMMODE_64BIT: \
3976 { \
3977 IEM_MC_BEGIN(3, 2); \
3978 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3979 IEM_MC_ARG(uint64_t, u64Src, 1); \
3980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3982 \
3983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3984 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3985 IEMOP_HLP_DONE_DECODING(); \
3986 IEM_MC_ASSIGN(u64Src, u64Imm); \
3987 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3988 IEM_MC_FETCH_EFLAGS(EFlags); \
3989 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
3990 \
3991 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
3992 IEM_MC_COMMIT_EFLAGS(EFlags); \
3993 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3994 IEM_MC_END(); \
3995 break; \
3996 } \
3997 \
3998 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3999 } \
4000 } \
4001 } \
4002 (void)0
4003
4004
4005/**
4006 * @opmaps grp1_81
4007 * @opcode /0
4008 */
4009FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4010{
4011 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4012 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4013 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4014}
4015
4016
4017/**
4018 * @opmaps grp1_81
4019 * @opcode /1
4020 */
4021FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4022{
4023 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4024 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4025 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4026}
4027
4028
4029/**
4030 * @opmaps grp1_81
4031 * @opcode /2
4032 */
4033FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4034{
4035 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4036 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4037 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4038}
4039
4040
4041/**
4042 * @opmaps grp1_81
4043 * @opcode /3
4044 */
4045FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4046{
4047 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4048 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4049 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4050}
4051
4052
4053/**
4054 * @opmaps grp1_81
4055 * @opcode /4
4056 */
4057FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4058{
4059 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4060 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4061 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4062}
4063
4064
4065/**
4066 * @opmaps grp1_81
4067 * @opcode /5
4068 */
4069FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4070{
4071 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4072 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4073 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4074}
4075
4076
4077/**
4078 * @opmaps grp1_81
4079 * @opcode /6
4080 */
4081FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4082{
4083 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4084 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4085 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4086}
4087
4088
4089/**
4090 * @opmaps grp1_81
4091 * @opcode /7
4092 */
4093FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4094{
4095 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4096 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4097 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK();
4098}
4099
4100
4101/**
4102 * @opcode 0x81
4103 */
4104FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4105{
4106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4107 switch (IEM_GET_MODRM_REG_8(bRm))
4108 {
4109 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4110 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4111 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4112 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4113 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4114 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4115 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4116 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4118 }
4119}
4120
4121
4122/**
4123 * @opcode 0x82
4124 * @opmnemonic grp1_82
4125 * @opgroup og_groups
4126 */
4127FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4128{
4129 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4130 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4131}
4132
4133
4134/**
4135 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4136 * iemOp_Grp1_Ev_Ib.
4137 */
4138#define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
4139 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4140 { \
4141 /* \
4142 * Register target \
4143 */ \
4144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4145 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4146 switch (pVCpu->iem.s.enmEffOpSize) \
4147 { \
4148 case IEMMODE_16BIT: \
4149 { \
4150 IEM_MC_BEGIN(3, 0); \
4151 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4152 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4153 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4154 \
4155 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4156 IEM_MC_REF_EFLAGS(pEFlags); \
4157 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4158 \
4159 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4160 IEM_MC_END(); \
4161 break; \
4162 } \
4163 \
4164 case IEMMODE_32BIT: \
4165 { \
4166 IEM_MC_BEGIN(3, 0); \
4167 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4168 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4169 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4170 \
4171 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4172 IEM_MC_REF_EFLAGS(pEFlags); \
4173 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4174 if ((a_fRW) != IEM_ACCESS_DATA_R) \
4175 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4176 \
4177 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4178 IEM_MC_END(); \
4179 break; \
4180 } \
4181 \
4182 case IEMMODE_64BIT: \
4183 { \
4184 IEM_MC_BEGIN(3, 0); \
4185 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4186 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4187 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4188 \
4189 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4190 IEM_MC_REF_EFLAGS(pEFlags); \
4191 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4192 \
4193 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4194 IEM_MC_END(); \
4195 break; \
4196 } \
4197 \
4198 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4199 } \
4200 } \
4201 else \
4202 { \
4203 /* \
4204 * Memory target. \
4205 */ \
4206 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4207 { \
4208 switch (pVCpu->iem.s.enmEffOpSize) \
4209 { \
4210 case IEMMODE_16BIT: \
4211 { \
4212 IEM_MC_BEGIN(3, 2); \
4213 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4214 IEM_MC_ARG(uint16_t, u16Src, 1); \
4215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4217 \
4218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4219 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4220 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4221 IEMOP_HLP_DONE_DECODING(); \
4222 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4223 IEM_MC_FETCH_EFLAGS(EFlags); \
4224 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4225 \
4226 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
4227 IEM_MC_COMMIT_EFLAGS(EFlags); \
4228 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4229 IEM_MC_END(); \
4230 break; \
4231 } \
4232 \
4233 case IEMMODE_32BIT: \
4234 { \
4235 IEM_MC_BEGIN(3, 2); \
4236 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4237 IEM_MC_ARG(uint32_t, u32Src, 1); \
4238 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4240 \
4241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4242 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4243 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4244 IEMOP_HLP_DONE_DECODING(); \
4245 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4246 IEM_MC_FETCH_EFLAGS(EFlags); \
4247 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4248 \
4249 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
4250 IEM_MC_COMMIT_EFLAGS(EFlags); \
4251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4252 IEM_MC_END(); \
4253 break; \
4254 } \
4255 \
4256 case IEMMODE_64BIT: \
4257 { \
4258 IEM_MC_BEGIN(3, 2); \
4259 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4260 IEM_MC_ARG(uint64_t, u64Src, 1); \
4261 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4263 \
4264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4265 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4266 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4267 IEMOP_HLP_DONE_DECODING(); \
4268 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4269 IEM_MC_FETCH_EFLAGS(EFlags); \
4270 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4271 \
4272 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
4273 IEM_MC_COMMIT_EFLAGS(EFlags); \
4274 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4275 IEM_MC_END(); \
4276 break; \
4277 } \
4278 \
4279 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4280 } \
4281 } \
4282 else \
4283 { \
4284 (void)0
4285
4286#define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \
4287 IEMOP_HLP_DONE_DECODING(); \
4288 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
4289 } \
4290 } \
4291 (void)0
4292
4293#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4294 switch (pVCpu->iem.s.enmEffOpSize) \
4295 { \
4296 case IEMMODE_16BIT: \
4297 { \
4298 IEM_MC_BEGIN(3, 2); \
4299 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4300 IEM_MC_ARG(uint16_t, u16Src, 1); \
4301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4303 \
4304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4305 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4306 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4307 IEMOP_HLP_DONE_DECODING(); \
4308 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4309 IEM_MC_FETCH_EFLAGS(EFlags); \
4310 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4311 \
4312 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4313 IEM_MC_COMMIT_EFLAGS(EFlags); \
4314 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4315 IEM_MC_END(); \
4316 break; \
4317 } \
4318 \
4319 case IEMMODE_32BIT: \
4320 { \
4321 IEM_MC_BEGIN(3, 2); \
4322 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4323 IEM_MC_ARG(uint32_t, u32Src, 1); \
4324 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4326 \
4327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4328 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4329 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4330 IEMOP_HLP_DONE_DECODING(); \
4331 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4332 IEM_MC_FETCH_EFLAGS(EFlags); \
4333 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4334 \
4335 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4336 IEM_MC_COMMIT_EFLAGS(EFlags); \
4337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4338 IEM_MC_END(); \
4339 break; \
4340 } \
4341 \
4342 case IEMMODE_64BIT: \
4343 { \
4344 IEM_MC_BEGIN(3, 2); \
4345 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4346 IEM_MC_ARG(uint64_t, u64Src, 1); \
4347 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4349 \
4350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4351 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4352 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4353 IEMOP_HLP_DONE_DECODING(); \
4354 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4355 IEM_MC_FETCH_EFLAGS(EFlags); \
4356 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4357 \
4358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4359 IEM_MC_COMMIT_EFLAGS(EFlags); \
4360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4361 IEM_MC_END(); \
4362 break; \
4363 } \
4364 \
4365 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4366 } \
4367 } \
4368 } \
4369 (void)0
4370
4371/**
4372 * @opmaps grp1_83
4373 * @opcode /0
4374 */
4375FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4376{
4377 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4378 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4379 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4380}
4381
4382
4383/**
4384 * @opmaps grp1_83
4385 * @opcode /1
4386 */
4387FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4388{
4389 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4390 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4391 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4392}
4393
4394
4395/**
4396 * @opmaps grp1_83
4397 * @opcode /2
4398 */
4399FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
4400{
4401 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
4402 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4403 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4404}
4405
4406
4407/**
4408 * @opmaps grp1_83
4409 * @opcode /3
4410 */
4411FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
4412{
4413 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
4414 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4415 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4416}
4417
4418
4419/**
4420 * @opmaps grp1_83
4421 * @opcode /4
4422 */
4423FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
4424{
4425 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
4426 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4427 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4428}
4429
4430
4431/**
4432 * @opmaps grp1_83
4433 * @opcode /5
4434 */
4435FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
4436{
4437 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
4438 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4439 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4440}
4441
4442
4443/**
4444 * @opmaps grp1_83
4445 * @opcode /6
4446 */
4447FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
4448{
4449 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
4450 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4451 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4452}
4453
4454
4455/**
4456 * @opmaps grp1_83
4457 * @opcode /7
4458 */
4459FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
4460{
4461 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
4462 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4463 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK();
4464}
4465
4466
4467/**
4468 * @opcode 0x83
4469 */
4470FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4471{
4472 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
4473 to the 386 even if absent in the intel reference manuals and some
4474 3rd party opcode listings. */
4475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4476 switch (IEM_GET_MODRM_REG_8(bRm))
4477 {
4478 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
4479 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
4480 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
4481 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
4482 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
4483 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
4484 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
4485 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
4486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4487 }
4488}
4489
4490
4491/**
4492 * @opcode 0x84
4493 */
4494FNIEMOP_DEF(iemOp_test_Eb_Gb)
4495{
4496 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
4497 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4498 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R);
4499 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
4500}
4501
4502
4503/**
4504 * @opcode 0x85
4505 */
4506FNIEMOP_DEF(iemOp_test_Ev_Gv)
4507{
4508 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
4509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4510 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R);
4511 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
4512}
4513
4514
4515/**
4516 * @opcode 0x86
4517 */
4518FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4519{
4520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4521 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
4522
4523 /*
4524 * If rm is denoting a register, no more instruction bytes.
4525 */
4526 if (IEM_IS_MODRM_REG_MODE(bRm))
4527 {
4528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4529
4530 IEM_MC_BEGIN(0, 2);
4531 IEM_MC_LOCAL(uint8_t, uTmp1);
4532 IEM_MC_LOCAL(uint8_t, uTmp2);
4533
4534 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4535 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4536 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4537 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4538
4539 IEM_MC_ADVANCE_RIP_AND_FINISH();
4540 IEM_MC_END();
4541 }
4542 else
4543 {
4544 /*
4545 * We're accessing memory.
4546 */
4547/** @todo the register must be committed separately! */
4548 IEM_MC_BEGIN(2, 2);
4549 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4550 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4552
4553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4554 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4555 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4556 if (!pVCpu->iem.s.fDisregardLock)
4557 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
4558 else
4559 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
4560 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4561
4562 IEM_MC_ADVANCE_RIP_AND_FINISH();
4563 IEM_MC_END();
4564 }
4565}
4566
4567
4568/**
4569 * @opcode 0x87
4570 */
4571FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4572{
4573 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
4574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4575
4576 /*
4577 * If rm is denoting a register, no more instruction bytes.
4578 */
4579 if (IEM_IS_MODRM_REG_MODE(bRm))
4580 {
4581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4582
4583 switch (pVCpu->iem.s.enmEffOpSize)
4584 {
4585 case IEMMODE_16BIT:
4586 IEM_MC_BEGIN(0, 2);
4587 IEM_MC_LOCAL(uint16_t, uTmp1);
4588 IEM_MC_LOCAL(uint16_t, uTmp2);
4589
4590 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4591 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4592 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4593 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4594
4595 IEM_MC_ADVANCE_RIP_AND_FINISH();
4596 IEM_MC_END();
4597 break;
4598
4599 case IEMMODE_32BIT:
4600 IEM_MC_BEGIN(0, 2);
4601 IEM_MC_LOCAL(uint32_t, uTmp1);
4602 IEM_MC_LOCAL(uint32_t, uTmp2);
4603
4604 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4605 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4606 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4607 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4608
4609 IEM_MC_ADVANCE_RIP_AND_FINISH();
4610 IEM_MC_END();
4611 break;
4612
4613 case IEMMODE_64BIT:
4614 IEM_MC_BEGIN(0, 2);
4615 IEM_MC_LOCAL(uint64_t, uTmp1);
4616 IEM_MC_LOCAL(uint64_t, uTmp2);
4617
4618 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4619 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4620 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4621 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4622
4623 IEM_MC_ADVANCE_RIP_AND_FINISH();
4624 IEM_MC_END();
4625 break;
4626
4627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4628 }
4629 }
4630 else
4631 {
4632 /*
4633 * We're accessing memory.
4634 */
4635 switch (pVCpu->iem.s.enmEffOpSize)
4636 {
4637/** @todo the register must be committed separately! */
4638 case IEMMODE_16BIT:
4639 IEM_MC_BEGIN(2, 2);
4640 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4641 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4643
4644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4645 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4646 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4647 if (!pVCpu->iem.s.fDisregardLock)
4648 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
4649 else
4650 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
4651 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4652
4653 IEM_MC_ADVANCE_RIP_AND_FINISH();
4654 IEM_MC_END();
4655 break;
4656
4657 case IEMMODE_32BIT:
4658 IEM_MC_BEGIN(2, 2);
4659 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4660 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4662
4663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4664 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4665 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4666 if (!pVCpu->iem.s.fDisregardLock)
4667 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
4668 else
4669 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
4670 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4671
4672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4673 IEM_MC_ADVANCE_RIP_AND_FINISH();
4674 IEM_MC_END();
4675 break;
4676
4677 case IEMMODE_64BIT:
4678 IEM_MC_BEGIN(2, 2);
4679 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4680 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4682
4683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4684 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4685 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4686 if (!pVCpu->iem.s.fDisregardLock)
4687 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
4688 else
4689 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
4690 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4691
4692 IEM_MC_ADVANCE_RIP_AND_FINISH();
4693 IEM_MC_END();
4694 break;
4695
4696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4697 }
4698 }
4699}
4700
4701
4702/**
4703 * @opcode 0x88
4704 */
4705FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4706{
4707 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
4708
4709 uint8_t bRm;
4710 IEM_OPCODE_GET_NEXT_U8(&bRm);
4711
4712 /*
4713 * If rm is denoting a register, no more instruction bytes.
4714 */
4715 if (IEM_IS_MODRM_REG_MODE(bRm))
4716 {
4717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4718 IEM_MC_BEGIN(0, 1);
4719 IEM_MC_LOCAL(uint8_t, u8Value);
4720 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4721 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
4722 IEM_MC_ADVANCE_RIP_AND_FINISH();
4723 IEM_MC_END();
4724 }
4725 else
4726 {
4727 /*
4728 * We're writing a register to memory.
4729 */
4730 IEM_MC_BEGIN(0, 2);
4731 IEM_MC_LOCAL(uint8_t, u8Value);
4732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4735 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4736 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
4737 IEM_MC_ADVANCE_RIP_AND_FINISH();
4738 IEM_MC_END();
4739 }
4740}
4741
4742
4743/**
4744 * @opcode 0x89
4745 */
4746FNIEMOP_DEF(iemOp_mov_Ev_Gv)
4747{
4748 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
4749
4750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4751
4752 /*
4753 * If rm is denoting a register, no more instruction bytes.
4754 */
4755 if (IEM_IS_MODRM_REG_MODE(bRm))
4756 {
4757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4758 switch (pVCpu->iem.s.enmEffOpSize)
4759 {
4760 case IEMMODE_16BIT:
4761 IEM_MC_BEGIN(0, 1);
4762 IEM_MC_LOCAL(uint16_t, u16Value);
4763 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4764 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4765 IEM_MC_ADVANCE_RIP_AND_FINISH();
4766 IEM_MC_END();
4767 break;
4768
4769 case IEMMODE_32BIT:
4770 IEM_MC_BEGIN(0, 1);
4771 IEM_MC_LOCAL(uint32_t, u32Value);
4772 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4773 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4774 IEM_MC_ADVANCE_RIP_AND_FINISH();
4775 IEM_MC_END();
4776 break;
4777
4778 case IEMMODE_64BIT:
4779 IEM_MC_BEGIN(0, 1);
4780 IEM_MC_LOCAL(uint64_t, u64Value);
4781 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4782 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4783 IEM_MC_ADVANCE_RIP_AND_FINISH();
4784 IEM_MC_END();
4785 break;
4786
4787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4788 }
4789 }
4790 else
4791 {
4792 /*
4793 * We're writing a register to memory.
4794 */
4795 switch (pVCpu->iem.s.enmEffOpSize)
4796 {
4797 case IEMMODE_16BIT:
4798 IEM_MC_BEGIN(0, 2);
4799 IEM_MC_LOCAL(uint16_t, u16Value);
4800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4803 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4804 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4805 IEM_MC_ADVANCE_RIP_AND_FINISH();
4806 IEM_MC_END();
4807 break;
4808
4809 case IEMMODE_32BIT:
4810 IEM_MC_BEGIN(0, 2);
4811 IEM_MC_LOCAL(uint32_t, u32Value);
4812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4816 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
4817 IEM_MC_ADVANCE_RIP_AND_FINISH();
4818 IEM_MC_END();
4819 break;
4820
4821 case IEMMODE_64BIT:
4822 IEM_MC_BEGIN(0, 2);
4823 IEM_MC_LOCAL(uint64_t, u64Value);
4824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4827 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4828 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
4829 IEM_MC_ADVANCE_RIP_AND_FINISH();
4830 IEM_MC_END();
4831 break;
4832
4833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4834 }
4835 }
4836}
4837
4838
4839/**
4840 * @opcode 0x8a
4841 */
4842FNIEMOP_DEF(iemOp_mov_Gb_Eb)
4843{
4844 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
4845
4846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4847
4848 /*
4849 * If rm is denoting a register, no more instruction bytes.
4850 */
4851 if (IEM_IS_MODRM_REG_MODE(bRm))
4852 {
4853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4854 IEM_MC_BEGIN(0, 1);
4855 IEM_MC_LOCAL(uint8_t, u8Value);
4856 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4857 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4858 IEM_MC_ADVANCE_RIP_AND_FINISH();
4859 IEM_MC_END();
4860 }
4861 else
4862 {
4863 /*
4864 * We're loading a register from memory.
4865 */
4866 IEM_MC_BEGIN(0, 2);
4867 IEM_MC_LOCAL(uint8_t, u8Value);
4868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4872 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4873 IEM_MC_ADVANCE_RIP_AND_FINISH();
4874 IEM_MC_END();
4875 }
4876}
4877
4878
4879/**
4880 * @opcode 0x8b
4881 */
4882FNIEMOP_DEF(iemOp_mov_Gv_Ev)
4883{
4884 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
4885
4886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4887
4888 /*
4889 * If rm is denoting a register, no more instruction bytes.
4890 */
4891 if (IEM_IS_MODRM_REG_MODE(bRm))
4892 {
4893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4894 switch (pVCpu->iem.s.enmEffOpSize)
4895 {
4896 case IEMMODE_16BIT:
4897 IEM_MC_BEGIN(0, 1);
4898 IEM_MC_LOCAL(uint16_t, u16Value);
4899 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4900 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4901 IEM_MC_ADVANCE_RIP_AND_FINISH();
4902 IEM_MC_END();
4903 break;
4904
4905 case IEMMODE_32BIT:
4906 IEM_MC_BEGIN(0, 1);
4907 IEM_MC_LOCAL(uint32_t, u32Value);
4908 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4909 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4910 IEM_MC_ADVANCE_RIP_AND_FINISH();
4911 IEM_MC_END();
4912 break;
4913
4914 case IEMMODE_64BIT:
4915 IEM_MC_BEGIN(0, 1);
4916 IEM_MC_LOCAL(uint64_t, u64Value);
4917 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4918 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4919 IEM_MC_ADVANCE_RIP_AND_FINISH();
4920 IEM_MC_END();
4921 break;
4922
4923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4924 }
4925 }
4926 else
4927 {
4928 /*
4929 * We're loading a register from memory.
4930 */
4931 switch (pVCpu->iem.s.enmEffOpSize)
4932 {
4933 case IEMMODE_16BIT:
4934 IEM_MC_BEGIN(0, 2);
4935 IEM_MC_LOCAL(uint16_t, u16Value);
4936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4939 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4940 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4941 IEM_MC_ADVANCE_RIP_AND_FINISH();
4942 IEM_MC_END();
4943 break;
4944
4945 case IEMMODE_32BIT:
4946 IEM_MC_BEGIN(0, 2);
4947 IEM_MC_LOCAL(uint32_t, u32Value);
4948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4951 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4952 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4953 IEM_MC_ADVANCE_RIP_AND_FINISH();
4954 IEM_MC_END();
4955 break;
4956
4957 case IEMMODE_64BIT:
4958 IEM_MC_BEGIN(0, 2);
4959 IEM_MC_LOCAL(uint64_t, u64Value);
4960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4963 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4964 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4965 IEM_MC_ADVANCE_RIP_AND_FINISH();
4966 IEM_MC_END();
4967 break;
4968
4969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4970 }
4971 }
4972}
4973
4974
4975/**
4976 * opcode 0x63
4977 * @todo Table fixme
4978 */
4979FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4980{
4981 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4982 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4983 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4984 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4985 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4986}
4987
4988
4989/**
4990 * @opcode 0x8c
4991 */
4992FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4993{
4994 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4995
4996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4997
4998 /*
4999 * Check that the destination register exists. The REX.R prefix is ignored.
5000 */
5001 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5002 if ( iSegReg > X86_SREG_GS)
5003 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5004
5005 /*
5006 * If rm is denoting a register, no more instruction bytes.
5007 * In that case, the operand size is respected and the upper bits are
5008 * cleared (starting with some pentium).
5009 */
5010 if (IEM_IS_MODRM_REG_MODE(bRm))
5011 {
5012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5013 switch (pVCpu->iem.s.enmEffOpSize)
5014 {
5015 case IEMMODE_16BIT:
5016 IEM_MC_BEGIN(0, 1);
5017 IEM_MC_LOCAL(uint16_t, u16Value);
5018 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5019 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5020 IEM_MC_ADVANCE_RIP_AND_FINISH();
5021 IEM_MC_END();
5022 break;
5023
5024 case IEMMODE_32BIT:
5025 IEM_MC_BEGIN(0, 1);
5026 IEM_MC_LOCAL(uint32_t, u32Value);
5027 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5028 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5029 IEM_MC_ADVANCE_RIP_AND_FINISH();
5030 IEM_MC_END();
5031 break;
5032
5033 case IEMMODE_64BIT:
5034 IEM_MC_BEGIN(0, 1);
5035 IEM_MC_LOCAL(uint64_t, u64Value);
5036 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5037 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5038 IEM_MC_ADVANCE_RIP_AND_FINISH();
5039 IEM_MC_END();
5040 break;
5041
5042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5043 }
5044 }
5045 else
5046 {
5047 /*
5048 * We're saving the register to memory. The access is word sized
5049 * regardless of operand size prefixes.
5050 */
5051#if 0 /* not necessary */
5052 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5053#endif
5054 IEM_MC_BEGIN(0, 2);
5055 IEM_MC_LOCAL(uint16_t, u16Value);
5056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5059 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5060 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5061 IEM_MC_ADVANCE_RIP_AND_FINISH();
5062 IEM_MC_END();
5063 }
5064}
5065
5066
5067
5068
5069/**
5070 * @opcode 0x8d
5071 */
5072FNIEMOP_DEF(iemOp_lea_Gv_M)
5073{
5074 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5076 if (IEM_IS_MODRM_REG_MODE(bRm))
5077 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
5078
5079 switch (pVCpu->iem.s.enmEffOpSize)
5080 {
5081 case IEMMODE_16BIT:
5082 IEM_MC_BEGIN(0, 2);
5083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5084 IEM_MC_LOCAL(uint16_t, u16Cast);
5085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5087 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5088 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5089 IEM_MC_ADVANCE_RIP_AND_FINISH();
5090 IEM_MC_END();
5091 break;
5092
5093 case IEMMODE_32BIT:
5094 IEM_MC_BEGIN(0, 2);
5095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5096 IEM_MC_LOCAL(uint32_t, u32Cast);
5097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5099 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5100 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5101 IEM_MC_ADVANCE_RIP_AND_FINISH();
5102 IEM_MC_END();
5103 break;
5104
5105 case IEMMODE_64BIT:
5106 IEM_MC_BEGIN(0, 1);
5107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5110 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5111 IEM_MC_ADVANCE_RIP_AND_FINISH();
5112 IEM_MC_END();
5113 break;
5114
5115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5116 }
5117}
5118
5119
5120/**
5121 * @opcode 0x8e
5122 */
5123FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5124{
5125 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5126
5127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5128
5129 /*
5130 * The practical operand size is 16-bit.
5131 */
5132#if 0 /* not necessary */
5133 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5134#endif
5135
5136 /*
5137 * Check that the destination register exists and can be used with this
5138 * instruction. The REX.R prefix is ignored.
5139 */
5140 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5141 if ( iSegReg == X86_SREG_CS
5142 || iSegReg > X86_SREG_GS)
5143 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5144
5145 /*
5146 * If rm is denoting a register, no more instruction bytes.
5147 */
5148 if (IEM_IS_MODRM_REG_MODE(bRm))
5149 {
5150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5151 IEM_MC_BEGIN(2, 0);
5152 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5153 IEM_MC_ARG(uint16_t, u16Value, 1);
5154 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5155 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
5156 IEM_MC_END();
5157 }
5158 else
5159 {
5160 /*
5161 * We're loading the register from memory. The access is word sized
5162 * regardless of operand size prefixes.
5163 */
5164 IEM_MC_BEGIN(2, 1);
5165 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5166 IEM_MC_ARG(uint16_t, u16Value, 1);
5167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5170 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5171 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
5172 IEM_MC_END();
5173 }
5174}
5175
5176
5177/** Opcode 0x8f /0. */
5178FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5179{
5180 /* This bugger is rather annoying as it requires rSP to be updated before
5181 doing the effective address calculations. Will eventually require a
5182 split between the R/M+SIB decoding and the effective address
5183 calculation - which is something that is required for any attempt at
5184 reusing this code for a recompiler. It may also be good to have if we
5185 need to delay #UD exception caused by invalid lock prefixes.
5186
5187 For now, we'll do a mostly safe interpreter-only implementation here. */
5188 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5189 * now until tests show it's checked.. */
5190 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5191
5192 /* Register access is relatively easy and can share code. */
5193 if (IEM_IS_MODRM_REG_MODE(bRm))
5194 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5195
5196 /*
5197 * Memory target.
5198 *
5199 * Intel says that RSP is incremented before it's used in any effective
5200 * address calcuations. This means some serious extra annoyance here since
5201 * we decode and calculate the effective address in one step and like to
5202 * delay committing registers till everything is done.
5203 *
5204 * So, we'll decode and calculate the effective address twice. This will
5205 * require some recoding if turned into a recompiler.
5206 */
5207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5208
5209#ifndef TST_IEM_CHECK_MC
5210 /* Calc effective address with modified ESP. */
5211/** @todo testcase */
5212 RTGCPTR GCPtrEff;
5213 VBOXSTRICTRC rcStrict;
5214 switch (pVCpu->iem.s.enmEffOpSize)
5215 {
5216 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5217 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5218 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5220 }
5221 if (rcStrict != VINF_SUCCESS)
5222 return rcStrict;
5223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5224
5225 /* Perform the operation - this should be CImpl. */
5226 RTUINT64U TmpRsp;
5227 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5228 switch (pVCpu->iem.s.enmEffOpSize)
5229 {
5230 case IEMMODE_16BIT:
5231 {
5232 uint16_t u16Value;
5233 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5234 if (rcStrict == VINF_SUCCESS)
5235 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5236 break;
5237 }
5238
5239 case IEMMODE_32BIT:
5240 {
5241 uint32_t u32Value;
5242 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5243 if (rcStrict == VINF_SUCCESS)
5244 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5245 break;
5246 }
5247
5248 case IEMMODE_64BIT:
5249 {
5250 uint64_t u64Value;
5251 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5252 if (rcStrict == VINF_SUCCESS)
5253 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5254 break;
5255 }
5256
5257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5258 }
5259 if (rcStrict == VINF_SUCCESS)
5260 {
5261 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5262 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5263 }
5264 return rcStrict;
5265
5266#else
5267 return VERR_IEM_IPE_2;
5268#endif
5269}
5270
5271
5272/**
5273 * @opcode 0x8f
5274 */
5275FNIEMOP_DEF(iemOp_Grp1A__xop)
5276{
5277 /*
5278 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5279 * three byte VEX prefix, except that the mmmmm field cannot have the values
5280 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5281 */
5282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5283 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5284 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5285
5286 IEMOP_MNEMONIC(xop, "xop");
5287 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5288 {
5289 /** @todo Test when exctly the XOP conformance checks kick in during
5290 * instruction decoding and fetching (using \#PF). */
5291 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5292 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5293 if ( ( pVCpu->iem.s.fPrefixes
5294 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5295 == 0)
5296 {
5297 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5298 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
5299 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5300 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5301 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5302 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5303 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5304 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5305 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5306
5307 /** @todo XOP: Just use new tables and decoders. */
5308 switch (bRm & 0x1f)
5309 {
5310 case 8: /* xop opcode map 8. */
5311 IEMOP_BITCH_ABOUT_STUB();
5312 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5313
5314 case 9: /* xop opcode map 9. */
5315 IEMOP_BITCH_ABOUT_STUB();
5316 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5317
5318 case 10: /* xop opcode map 10. */
5319 IEMOP_BITCH_ABOUT_STUB();
5320 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5321
5322 default:
5323 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5324 return IEMOP_RAISE_INVALID_OPCODE();
5325 }
5326 }
5327 else
5328 Log(("XOP: Invalid prefix mix!\n"));
5329 }
5330 else
5331 Log(("XOP: XOP support disabled!\n"));
5332 return IEMOP_RAISE_INVALID_OPCODE();
5333}
5334
5335
5336/**
5337 * Common 'xchg reg,rAX' helper.
5338 */
5339FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5340{
5341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5342
5343 iReg |= pVCpu->iem.s.uRexB;
5344 switch (pVCpu->iem.s.enmEffOpSize)
5345 {
5346 case IEMMODE_16BIT:
5347 IEM_MC_BEGIN(0, 2);
5348 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5349 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5350 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5351 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5352 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5353 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5354 IEM_MC_ADVANCE_RIP_AND_FINISH();
5355 IEM_MC_END();
5356 break;
5357
5358 case IEMMODE_32BIT:
5359 IEM_MC_BEGIN(0, 2);
5360 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5361 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5362 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5363 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5364 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5365 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5366 IEM_MC_ADVANCE_RIP_AND_FINISH();
5367 IEM_MC_END();
5368 break;
5369
5370 case IEMMODE_64BIT:
5371 IEM_MC_BEGIN(0, 2);
5372 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5373 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5374 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5375 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5376 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5377 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5378 IEM_MC_ADVANCE_RIP_AND_FINISH();
5379 IEM_MC_END();
5380 break;
5381
5382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5383 }
5384}
5385
5386
5387/**
5388 * @opcode 0x90
5389 */
5390FNIEMOP_DEF(iemOp_nop)
5391{
5392 /* R8/R8D and RAX/EAX can be exchanged. */
5393 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
5394 {
5395 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
5396 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5397 }
5398
5399 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
5400 {
5401 IEMOP_MNEMONIC(pause, "pause");
5402#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5403 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
5404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
5405#endif
5406#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5407 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
5408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
5409#endif
5410 }
5411 else
5412 IEMOP_MNEMONIC(nop, "nop");
5413 IEM_MC_BEGIN(0, 0);
5414 IEM_MC_ADVANCE_RIP_AND_FINISH();
5415 IEM_MC_END();
5416}
5417
5418
5419/**
5420 * @opcode 0x91
5421 */
5422FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5423{
5424 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
5425 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5426}
5427
5428
5429/**
5430 * @opcode 0x92
5431 */
5432FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5433{
5434 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
5435 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5436}
5437
5438
5439/**
5440 * @opcode 0x93
5441 */
5442FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5443{
5444 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
5445 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5446}
5447
5448
5449/**
5450 * @opcode 0x94
5451 */
5452FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5453{
5454 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
5455 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5456}
5457
5458
5459/**
5460 * @opcode 0x95
5461 */
5462FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5463{
5464 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
5465 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5466}
5467
5468
5469/**
5470 * @opcode 0x96
5471 */
5472FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5473{
5474 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
5475 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5476}
5477
5478
5479/**
5480 * @opcode 0x97
5481 */
5482FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5483{
5484 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
5485 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5486}
5487
5488
5489/**
5490 * @opcode 0x98
5491 */
5492FNIEMOP_DEF(iemOp_cbw)
5493{
5494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5495 switch (pVCpu->iem.s.enmEffOpSize)
5496 {
5497 case IEMMODE_16BIT:
5498 IEMOP_MNEMONIC(cbw, "cbw");
5499 IEM_MC_BEGIN(0, 1);
5500 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
5501 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
5502 } IEM_MC_ELSE() {
5503 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
5504 } IEM_MC_ENDIF();
5505 IEM_MC_ADVANCE_RIP_AND_FINISH();
5506 IEM_MC_END();
5507 break;
5508
5509 case IEMMODE_32BIT:
5510 IEMOP_MNEMONIC(cwde, "cwde");
5511 IEM_MC_BEGIN(0, 1);
5512 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5513 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
5514 } IEM_MC_ELSE() {
5515 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
5516 } IEM_MC_ENDIF();
5517 IEM_MC_ADVANCE_RIP_AND_FINISH();
5518 IEM_MC_END();
5519 break;
5520
5521 case IEMMODE_64BIT:
5522 IEMOP_MNEMONIC(cdqe, "cdqe");
5523 IEM_MC_BEGIN(0, 1);
5524 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5525 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
5526 } IEM_MC_ELSE() {
5527 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
5528 } IEM_MC_ENDIF();
5529 IEM_MC_ADVANCE_RIP_AND_FINISH();
5530 IEM_MC_END();
5531 break;
5532
5533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5534 }
5535}
5536
5537
5538/**
5539 * @opcode 0x99
5540 */
5541FNIEMOP_DEF(iemOp_cwd)
5542{
5543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5544 switch (pVCpu->iem.s.enmEffOpSize)
5545 {
5546 case IEMMODE_16BIT:
5547 IEMOP_MNEMONIC(cwd, "cwd");
5548 IEM_MC_BEGIN(0, 1);
5549 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5550 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
5551 } IEM_MC_ELSE() {
5552 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
5553 } IEM_MC_ENDIF();
5554 IEM_MC_ADVANCE_RIP_AND_FINISH();
5555 IEM_MC_END();
5556 break;
5557
5558 case IEMMODE_32BIT:
5559 IEMOP_MNEMONIC(cdq, "cdq");
5560 IEM_MC_BEGIN(0, 1);
5561 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5562 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
5563 } IEM_MC_ELSE() {
5564 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
5565 } IEM_MC_ENDIF();
5566 IEM_MC_ADVANCE_RIP_AND_FINISH();
5567 IEM_MC_END();
5568 break;
5569
5570 case IEMMODE_64BIT:
5571 IEMOP_MNEMONIC(cqo, "cqo");
5572 IEM_MC_BEGIN(0, 1);
5573 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5574 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5575 } IEM_MC_ELSE() {
5576 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
5577 } IEM_MC_ENDIF();
5578 IEM_MC_ADVANCE_RIP_AND_FINISH();
5579 IEM_MC_END();
5580 break;
5581
5582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5583 }
5584}
5585
5586
5587/**
5588 * @opcode 0x9a
5589 */
5590FNIEMOP_DEF(iemOp_call_Ap)
5591{
5592 IEMOP_MNEMONIC(call_Ap, "call Ap");
5593 IEMOP_HLP_NO_64BIT();
5594
5595 /* Decode the far pointer address and pass it on to the far call C implementation. */
5596 uint32_t offSeg;
5597 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
5598 IEM_OPCODE_GET_NEXT_U32(&offSeg);
5599 else
5600 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
5601 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
5602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5603 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
5604}
5605
5606
5607/** Opcode 0x9b. (aka fwait) */
5608FNIEMOP_DEF(iemOp_wait)
5609{
5610 IEMOP_MNEMONIC(wait, "wait");
5611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5612
5613 IEM_MC_BEGIN(0, 0);
5614 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
5615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5616 IEM_MC_ADVANCE_RIP_AND_FINISH();
5617 IEM_MC_END();
5618}
5619
5620
5621/**
5622 * @opcode 0x9c
5623 */
5624FNIEMOP_DEF(iemOp_pushf_Fv)
5625{
5626 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
5627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5628 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5629 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
5630}
5631
5632
5633/**
5634 * @opcode 0x9d
5635 */
5636FNIEMOP_DEF(iemOp_popf_Fv)
5637{
5638 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5641 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
5642}
5643
5644
5645/**
5646 * @opcode 0x9e
5647 */
5648FNIEMOP_DEF(iemOp_sahf)
5649{
5650 IEMOP_MNEMONIC(sahf, "sahf");
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5653 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5654 return IEMOP_RAISE_INVALID_OPCODE();
5655 IEM_MC_BEGIN(0, 2);
5656 IEM_MC_LOCAL(uint32_t, u32Flags);
5657 IEM_MC_LOCAL(uint32_t, EFlags);
5658 IEM_MC_FETCH_EFLAGS(EFlags);
5659 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
5660 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5661 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
5662 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
5663 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
5664 IEM_MC_COMMIT_EFLAGS(EFlags);
5665 IEM_MC_ADVANCE_RIP_AND_FINISH();
5666 IEM_MC_END();
5667}
5668
5669
5670/**
5671 * @opcode 0x9f
5672 */
5673FNIEMOP_DEF(iemOp_lahf)
5674{
5675 IEMOP_MNEMONIC(lahf, "lahf");
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5678 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5679 return IEMOP_RAISE_INVALID_OPCODE();
5680 IEM_MC_BEGIN(0, 1);
5681 IEM_MC_LOCAL(uint8_t, u8Flags);
5682 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
5683 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
5684 IEM_MC_ADVANCE_RIP_AND_FINISH();
5685 IEM_MC_END();
5686}
5687
5688
5689/**
5690 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5691 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
5692 * prefixes. Will return on failures.
5693 * @param a_GCPtrMemOff The variable to store the offset in.
5694 */
5695#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5696 do \
5697 { \
5698 switch (pVCpu->iem.s.enmEffAddrMode) \
5699 { \
5700 case IEMMODE_16BIT: \
5701 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
5702 break; \
5703 case IEMMODE_32BIT: \
5704 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
5705 break; \
5706 case IEMMODE_64BIT: \
5707 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
5708 break; \
5709 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5710 } \
5711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5712 } while (0)
5713
5714/**
5715 * @opcode 0xa0
5716 */
5717FNIEMOP_DEF(iemOp_mov_AL_Ob)
5718{
5719 /*
5720 * Get the offset and fend off lock prefixes.
5721 */
5722 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
5723 RTGCPTR GCPtrMemOff;
5724 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5725
5726 /*
5727 * Fetch AL.
5728 */
5729 IEM_MC_BEGIN(0,1);
5730 IEM_MC_LOCAL(uint8_t, u8Tmp);
5731 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5732 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5733 IEM_MC_ADVANCE_RIP_AND_FINISH();
5734 IEM_MC_END();
5735}
5736
5737
5738/**
5739 * @opcode 0xa1
5740 */
5741FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5742{
5743 /*
5744 * Get the offset and fend off lock prefixes.
5745 */
5746 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
5747 RTGCPTR GCPtrMemOff;
5748 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5749
5750 /*
5751 * Fetch rAX.
5752 */
5753 switch (pVCpu->iem.s.enmEffOpSize)
5754 {
5755 case IEMMODE_16BIT:
5756 IEM_MC_BEGIN(0,1);
5757 IEM_MC_LOCAL(uint16_t, u16Tmp);
5758 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5759 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5760 IEM_MC_ADVANCE_RIP_AND_FINISH();
5761 IEM_MC_END();
5762 break;
5763
5764 case IEMMODE_32BIT:
5765 IEM_MC_BEGIN(0,1);
5766 IEM_MC_LOCAL(uint32_t, u32Tmp);
5767 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5768 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5769 IEM_MC_ADVANCE_RIP_AND_FINISH();
5770 IEM_MC_END();
5771 break;
5772
5773 case IEMMODE_64BIT:
5774 IEM_MC_BEGIN(0,1);
5775 IEM_MC_LOCAL(uint64_t, u64Tmp);
5776 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5777 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5778 IEM_MC_ADVANCE_RIP_AND_FINISH();
5779 IEM_MC_END();
5780 break;
5781
5782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5783 }
5784}
5785
5786
5787/**
5788 * @opcode 0xa2
5789 */
5790FNIEMOP_DEF(iemOp_mov_Ob_AL)
5791{
5792 /*
5793 * Get the offset and fend off lock prefixes.
5794 */
5795 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
5796 RTGCPTR GCPtrMemOff;
5797 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5798
5799 /*
5800 * Store AL.
5801 */
5802 IEM_MC_BEGIN(0,1);
5803 IEM_MC_LOCAL(uint8_t, u8Tmp);
5804 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5805 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
5806 IEM_MC_ADVANCE_RIP_AND_FINISH();
5807 IEM_MC_END();
5808}
5809
5810
5811/**
5812 * @opcode 0xa3
5813 */
5814FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5815{
5816 /*
5817 * Get the offset and fend off lock prefixes.
5818 */
5819 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
5820 RTGCPTR GCPtrMemOff;
5821 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5822
5823 /*
5824 * Store rAX.
5825 */
5826 switch (pVCpu->iem.s.enmEffOpSize)
5827 {
5828 case IEMMODE_16BIT:
5829 IEM_MC_BEGIN(0,1);
5830 IEM_MC_LOCAL(uint16_t, u16Tmp);
5831 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5832 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
5833 IEM_MC_ADVANCE_RIP_AND_FINISH();
5834 IEM_MC_END();
5835 break;
5836
5837 case IEMMODE_32BIT:
5838 IEM_MC_BEGIN(0,1);
5839 IEM_MC_LOCAL(uint32_t, u32Tmp);
5840 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5841 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
5842 IEM_MC_ADVANCE_RIP_AND_FINISH();
5843 IEM_MC_END();
5844 break;
5845
5846 case IEMMODE_64BIT:
5847 IEM_MC_BEGIN(0,1);
5848 IEM_MC_LOCAL(uint64_t, u64Tmp);
5849 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
5850 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
5851 IEM_MC_ADVANCE_RIP_AND_FINISH();
5852 IEM_MC_END();
5853 break;
5854
5855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5856 }
5857}
5858
5859/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
5860#define IEM_MOVS_CASE(ValBits, AddrBits) \
5861 IEM_MC_BEGIN(0, 2); \
5862 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5863 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5864 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5865 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5866 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5867 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5869 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5870 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5871 } IEM_MC_ELSE() { \
5872 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5873 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5874 } IEM_MC_ENDIF(); \
5875 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5876 IEM_MC_END() \
5877
5878/**
5879 * @opcode 0xa4
5880 */
5881FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
5882{
5883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5884
5885 /*
5886 * Use the C implementation if a repeat prefix is encountered.
5887 */
5888 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5889 {
5890 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
5891 switch (pVCpu->iem.s.enmEffAddrMode)
5892 {
5893 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
5894 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
5895 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
5896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5897 }
5898 }
5899 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
5900
5901 /*
5902 * Sharing case implementation with movs[wdq] below.
5903 */
5904 switch (pVCpu->iem.s.enmEffAddrMode)
5905 {
5906 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5907 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5908 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911}
5912
5913
5914/**
5915 * @opcode 0xa5
5916 */
5917FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5918{
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920
5921 /*
5922 * Use the C implementation if a repeat prefix is encountered.
5923 */
5924 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5925 {
5926 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5927 switch (pVCpu->iem.s.enmEffOpSize)
5928 {
5929 case IEMMODE_16BIT:
5930 switch (pVCpu->iem.s.enmEffAddrMode)
5931 {
5932 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5933 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5934 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5936 }
5937 break;
5938 case IEMMODE_32BIT:
5939 switch (pVCpu->iem.s.enmEffAddrMode)
5940 {
5941 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5942 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5943 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5945 }
5946 case IEMMODE_64BIT:
5947 switch (pVCpu->iem.s.enmEffAddrMode)
5948 {
5949 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5950 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5951 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5953 }
5954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5955 }
5956 }
5957 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5958
5959 /*
5960 * Annoying double switch here.
5961 * Using ugly macro for implementing the cases, sharing it with movsb.
5962 */
5963 switch (pVCpu->iem.s.enmEffOpSize)
5964 {
5965 case IEMMODE_16BIT:
5966 switch (pVCpu->iem.s.enmEffAddrMode)
5967 {
5968 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5969 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5970 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5972 }
5973 break;
5974
5975 case IEMMODE_32BIT:
5976 switch (pVCpu->iem.s.enmEffAddrMode)
5977 {
5978 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5979 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5980 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5982 }
5983 break;
5984
5985 case IEMMODE_64BIT:
5986 switch (pVCpu->iem.s.enmEffAddrMode)
5987 {
5988 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5989 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5990 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5992 }
5993 break;
5994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5995 }
5996}
5997
5998#undef IEM_MOVS_CASE
5999
6000/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6001#define IEM_CMPS_CASE(ValBits, AddrBits) \
6002 IEM_MC_BEGIN(3, 3); \
6003 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6004 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6005 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6006 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6007 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6008 \
6009 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6010 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6011 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6012 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6013 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6014 IEM_MC_REF_EFLAGS(pEFlags); \
6015 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6016 \
6017 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6018 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6019 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6020 } IEM_MC_ELSE() { \
6021 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6022 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6023 } IEM_MC_ENDIF(); \
6024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6025 IEM_MC_END() \
6026
6027/**
6028 * @opcode 0xa6
6029 */
6030FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6031{
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033
6034 /*
6035 * Use the C implementation if a repeat prefix is encountered.
6036 */
6037 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6038 {
6039 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6040 switch (pVCpu->iem.s.enmEffAddrMode)
6041 {
6042 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6043 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6044 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6046 }
6047 }
6048 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6049 {
6050 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6051 switch (pVCpu->iem.s.enmEffAddrMode)
6052 {
6053 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6054 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6055 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6057 }
6058 }
6059 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6060
6061 /*
6062 * Sharing case implementation with cmps[wdq] below.
6063 */
6064 switch (pVCpu->iem.s.enmEffAddrMode)
6065 {
6066 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6067 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6068 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6070 }
6071}
6072
6073
6074/**
6075 * @opcode 0xa7
6076 */
6077FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6078{
6079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6080
6081 /*
6082 * Use the C implementation if a repeat prefix is encountered.
6083 */
6084 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6085 {
6086 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6087 switch (pVCpu->iem.s.enmEffOpSize)
6088 {
6089 case IEMMODE_16BIT:
6090 switch (pVCpu->iem.s.enmEffAddrMode)
6091 {
6092 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6093 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6094 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6096 }
6097 break;
6098 case IEMMODE_32BIT:
6099 switch (pVCpu->iem.s.enmEffAddrMode)
6100 {
6101 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6102 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6103 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6105 }
6106 case IEMMODE_64BIT:
6107 switch (pVCpu->iem.s.enmEffAddrMode)
6108 {
6109 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6110 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6111 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6113 }
6114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6115 }
6116 }
6117
6118 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6119 {
6120 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6121 switch (pVCpu->iem.s.enmEffOpSize)
6122 {
6123 case IEMMODE_16BIT:
6124 switch (pVCpu->iem.s.enmEffAddrMode)
6125 {
6126 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6127 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6128 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6130 }
6131 break;
6132 case IEMMODE_32BIT:
6133 switch (pVCpu->iem.s.enmEffAddrMode)
6134 {
6135 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6136 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6137 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6139 }
6140 case IEMMODE_64BIT:
6141 switch (pVCpu->iem.s.enmEffAddrMode)
6142 {
6143 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6144 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6145 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6147 }
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 }
6151
6152 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6153
6154 /*
6155 * Annoying double switch here.
6156 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6157 */
6158 switch (pVCpu->iem.s.enmEffOpSize)
6159 {
6160 case IEMMODE_16BIT:
6161 switch (pVCpu->iem.s.enmEffAddrMode)
6162 {
6163 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6164 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6165 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6167 }
6168 break;
6169
6170 case IEMMODE_32BIT:
6171 switch (pVCpu->iem.s.enmEffAddrMode)
6172 {
6173 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6174 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6175 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6177 }
6178 break;
6179
6180 case IEMMODE_64BIT:
6181 switch (pVCpu->iem.s.enmEffAddrMode)
6182 {
6183 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6184 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6185 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6187 }
6188 break;
6189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6190 }
6191}
6192
6193#undef IEM_CMPS_CASE
6194
6195/**
6196 * @opcode 0xa8
6197 */
6198FNIEMOP_DEF(iemOp_test_AL_Ib)
6199{
6200 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6202 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6203}
6204
6205
6206/**
6207 * @opcode 0xa9
6208 */
6209FNIEMOP_DEF(iemOp_test_eAX_Iz)
6210{
6211 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6212 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6213 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6214}
6215
6216
6217/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6218#define IEM_STOS_CASE(ValBits, AddrBits) \
6219 IEM_MC_BEGIN(0, 2); \
6220 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6221 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6222 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6223 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6224 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6225 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6226 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6227 } IEM_MC_ELSE() { \
6228 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6229 } IEM_MC_ENDIF(); \
6230 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6231 IEM_MC_END() \
6232
6233/**
6234 * @opcode 0xaa
6235 */
6236FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6237{
6238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6239
6240 /*
6241 * Use the C implementation if a repeat prefix is encountered.
6242 */
6243 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6244 {
6245 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6246 switch (pVCpu->iem.s.enmEffAddrMode)
6247 {
6248 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
6249 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
6250 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
6251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6252 }
6253 }
6254 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6255
6256 /*
6257 * Sharing case implementation with stos[wdq] below.
6258 */
6259 switch (pVCpu->iem.s.enmEffAddrMode)
6260 {
6261 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6262 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6263 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6265 }
6266}
6267
6268
6269/**
6270 * @opcode 0xab
6271 */
6272FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6273{
6274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6275
6276 /*
6277 * Use the C implementation if a repeat prefix is encountered.
6278 */
6279 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6280 {
6281 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6282 switch (pVCpu->iem.s.enmEffOpSize)
6283 {
6284 case IEMMODE_16BIT:
6285 switch (pVCpu->iem.s.enmEffAddrMode)
6286 {
6287 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
6288 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
6289 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
6290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6291 }
6292 break;
6293 case IEMMODE_32BIT:
6294 switch (pVCpu->iem.s.enmEffAddrMode)
6295 {
6296 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
6297 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
6298 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
6299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6300 }
6301 case IEMMODE_64BIT:
6302 switch (pVCpu->iem.s.enmEffAddrMode)
6303 {
6304 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6305 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
6306 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
6307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6308 }
6309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6310 }
6311 }
6312 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
6313
6314 /*
6315 * Annoying double switch here.
6316 * Using ugly macro for implementing the cases, sharing it with stosb.
6317 */
6318 switch (pVCpu->iem.s.enmEffOpSize)
6319 {
6320 case IEMMODE_16BIT:
6321 switch (pVCpu->iem.s.enmEffAddrMode)
6322 {
6323 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6324 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6325 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6327 }
6328 break;
6329
6330 case IEMMODE_32BIT:
6331 switch (pVCpu->iem.s.enmEffAddrMode)
6332 {
6333 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6334 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6335 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6337 }
6338 break;
6339
6340 case IEMMODE_64BIT:
6341 switch (pVCpu->iem.s.enmEffAddrMode)
6342 {
6343 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6344 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6345 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6347 }
6348 break;
6349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6350 }
6351}
6352
6353#undef IEM_STOS_CASE
6354
6355/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6356#define IEM_LODS_CASE(ValBits, AddrBits) \
6357 IEM_MC_BEGIN(0, 2); \
6358 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6359 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6360 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6361 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6362 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6363 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6364 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6365 } IEM_MC_ELSE() { \
6366 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6367 } IEM_MC_ENDIF(); \
6368 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6369 IEM_MC_END() \
6370
6371/**
6372 * @opcode 0xac
6373 */
6374FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6375{
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6377
6378 /*
6379 * Use the C implementation if a repeat prefix is encountered.
6380 */
6381 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6382 {
6383 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
6384 switch (pVCpu->iem.s.enmEffAddrMode)
6385 {
6386 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
6387 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
6388 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
6389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6390 }
6391 }
6392 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
6393
6394 /*
6395 * Sharing case implementation with stos[wdq] below.
6396 */
6397 switch (pVCpu->iem.s.enmEffAddrMode)
6398 {
6399 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6400 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6401 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6403 }
6404}
6405
6406
6407/**
6408 * @opcode 0xad
6409 */
6410FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6411{
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413
6414 /*
6415 * Use the C implementation if a repeat prefix is encountered.
6416 */
6417 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6418 {
6419 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
6420 switch (pVCpu->iem.s.enmEffOpSize)
6421 {
6422 case IEMMODE_16BIT:
6423 switch (pVCpu->iem.s.enmEffAddrMode)
6424 {
6425 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
6426 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
6427 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
6428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6429 }
6430 break;
6431 case IEMMODE_32BIT:
6432 switch (pVCpu->iem.s.enmEffAddrMode)
6433 {
6434 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
6435 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
6436 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
6437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6438 }
6439 case IEMMODE_64BIT:
6440 switch (pVCpu->iem.s.enmEffAddrMode)
6441 {
6442 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
6443 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
6444 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
6445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6446 }
6447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6448 }
6449 }
6450 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
6451
6452 /*
6453 * Annoying double switch here.
6454 * Using ugly macro for implementing the cases, sharing it with lodsb.
6455 */
6456 switch (pVCpu->iem.s.enmEffOpSize)
6457 {
6458 case IEMMODE_16BIT:
6459 switch (pVCpu->iem.s.enmEffAddrMode)
6460 {
6461 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6462 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6463 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6465 }
6466 break;
6467
6468 case IEMMODE_32BIT:
6469 switch (pVCpu->iem.s.enmEffAddrMode)
6470 {
6471 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6472 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6473 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6475 }
6476 break;
6477
6478 case IEMMODE_64BIT:
6479 switch (pVCpu->iem.s.enmEffAddrMode)
6480 {
6481 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6482 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6483 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6485 }
6486 break;
6487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6488 }
6489}
6490
6491#undef IEM_LODS_CASE
6492
6493/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
6494#define IEM_SCAS_CASE(ValBits, AddrBits) \
6495 IEM_MC_BEGIN(3, 2); \
6496 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
6497 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
6498 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6499 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6500 \
6501 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6502 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
6503 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
6504 IEM_MC_REF_EFLAGS(pEFlags); \
6505 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
6506 \
6507 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6508 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6509 } IEM_MC_ELSE() { \
6510 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6511 } IEM_MC_ENDIF(); \
6512 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6513 IEM_MC_END();
6514
6515/**
6516 * @opcode 0xae
6517 */
6518FNIEMOP_DEF(iemOp_scasb_AL_Xb)
6519{
6520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6521
6522 /*
6523 * Use the C implementation if a repeat prefix is encountered.
6524 */
6525 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6526 {
6527 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
6528 switch (pVCpu->iem.s.enmEffAddrMode)
6529 {
6530 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
6531 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
6532 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
6533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6534 }
6535 }
6536 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6537 {
6538 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
6539 switch (pVCpu->iem.s.enmEffAddrMode)
6540 {
6541 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
6542 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
6543 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
6544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6545 }
6546 }
6547 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
6548
6549 /*
6550 * Sharing case implementation with stos[wdq] below.
6551 */
6552 switch (pVCpu->iem.s.enmEffAddrMode)
6553 {
6554 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
6555 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
6556 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
6557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6558 }
6559}
6560
6561
6562/**
6563 * @opcode 0xaf
6564 */
6565FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
6566{
6567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6568
6569 /*
6570 * Use the C implementation if a repeat prefix is encountered.
6571 */
6572 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6573 {
6574 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
6575 switch (pVCpu->iem.s.enmEffOpSize)
6576 {
6577 case IEMMODE_16BIT:
6578 switch (pVCpu->iem.s.enmEffAddrMode)
6579 {
6580 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
6581 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
6582 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
6583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6584 }
6585 break;
6586 case IEMMODE_32BIT:
6587 switch (pVCpu->iem.s.enmEffAddrMode)
6588 {
6589 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
6590 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
6591 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
6592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6593 }
6594 case IEMMODE_64BIT:
6595 switch (pVCpu->iem.s.enmEffAddrMode)
6596 {
6597 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
6598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
6599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
6600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6601 }
6602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6603 }
6604 }
6605 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6606 {
6607 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
6608 switch (pVCpu->iem.s.enmEffOpSize)
6609 {
6610 case IEMMODE_16BIT:
6611 switch (pVCpu->iem.s.enmEffAddrMode)
6612 {
6613 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
6614 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
6615 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
6616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6617 }
6618 break;
6619 case IEMMODE_32BIT:
6620 switch (pVCpu->iem.s.enmEffAddrMode)
6621 {
6622 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
6623 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
6624 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
6625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6626 }
6627 case IEMMODE_64BIT:
6628 switch (pVCpu->iem.s.enmEffAddrMode)
6629 {
6630 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
6631 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
6632 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
6633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6634 }
6635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6636 }
6637 }
6638 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
6639
6640 /*
6641 * Annoying double switch here.
6642 * Using ugly macro for implementing the cases, sharing it with scasb.
6643 */
6644 switch (pVCpu->iem.s.enmEffOpSize)
6645 {
6646 case IEMMODE_16BIT:
6647 switch (pVCpu->iem.s.enmEffAddrMode)
6648 {
6649 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
6650 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
6651 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
6652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6653 }
6654 break;
6655
6656 case IEMMODE_32BIT:
6657 switch (pVCpu->iem.s.enmEffAddrMode)
6658 {
6659 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
6660 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
6661 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
6662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6663 }
6664 break;
6665
6666 case IEMMODE_64BIT:
6667 switch (pVCpu->iem.s.enmEffAddrMode)
6668 {
6669 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6670 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
6671 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
6672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6673 }
6674 break;
6675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6676 }
6677}
6678
6679#undef IEM_SCAS_CASE
6680
6681/**
6682 * Common 'mov r8, imm8' helper.
6683 */
6684FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
6685{
6686 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6688
6689 IEM_MC_BEGIN(0, 1);
6690 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6691 IEM_MC_STORE_GREG_U8(iReg, u8Value);
6692 IEM_MC_ADVANCE_RIP_AND_FINISH();
6693 IEM_MC_END();
6694}
6695
6696
6697/**
6698 * @opcode 0xb0
6699 */
6700FNIEMOP_DEF(iemOp_mov_AL_Ib)
6701{
6702 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
6703 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6704}
6705
6706
6707/**
6708 * @opcode 0xb1
6709 */
6710FNIEMOP_DEF(iemOp_CL_Ib)
6711{
6712 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
6713 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6714}
6715
6716
6717/**
6718 * @opcode 0xb2
6719 */
6720FNIEMOP_DEF(iemOp_DL_Ib)
6721{
6722 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
6723 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6724}
6725
6726
6727/**
6728 * @opcode 0xb3
6729 */
6730FNIEMOP_DEF(iemOp_BL_Ib)
6731{
6732 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
6733 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6734}
6735
6736
6737/**
6738 * @opcode 0xb4
6739 */
6740FNIEMOP_DEF(iemOp_mov_AH_Ib)
6741{
6742 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
6743 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6744}
6745
6746
6747/**
6748 * @opcode 0xb5
6749 */
6750FNIEMOP_DEF(iemOp_CH_Ib)
6751{
6752 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
6753 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6754}
6755
6756
6757/**
6758 * @opcode 0xb6
6759 */
6760FNIEMOP_DEF(iemOp_DH_Ib)
6761{
6762 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
6763 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6764}
6765
6766
6767/**
6768 * @opcode 0xb7
6769 */
6770FNIEMOP_DEF(iemOp_BH_Ib)
6771{
6772 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
6773 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6774}
6775
6776
6777/**
6778 * Common 'mov regX,immX' helper.
6779 */
6780FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
6781{
6782 switch (pVCpu->iem.s.enmEffOpSize)
6783 {
6784 case IEMMODE_16BIT:
6785 {
6786 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6788
6789 IEM_MC_BEGIN(0, 1);
6790 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6791 IEM_MC_STORE_GREG_U16(iReg, u16Value);
6792 IEM_MC_ADVANCE_RIP_AND_FINISH();
6793 IEM_MC_END();
6794 break;
6795 }
6796
6797 case IEMMODE_32BIT:
6798 {
6799 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6801
6802 IEM_MC_BEGIN(0, 1);
6803 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6804 IEM_MC_STORE_GREG_U32(iReg, u32Value);
6805 IEM_MC_ADVANCE_RIP_AND_FINISH();
6806 IEM_MC_END();
6807 break;
6808 }
6809 case IEMMODE_64BIT:
6810 {
6811 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813
6814 IEM_MC_BEGIN(0, 1);
6815 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6816 IEM_MC_STORE_GREG_U64(iReg, u64Value);
6817 IEM_MC_ADVANCE_RIP_AND_FINISH();
6818 IEM_MC_END();
6819 break;
6820 }
6821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6822 }
6823}
6824
6825
6826/**
6827 * @opcode 0xb8
6828 */
6829FNIEMOP_DEF(iemOp_eAX_Iv)
6830{
6831 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
6832 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6833}
6834
6835
6836/**
6837 * @opcode 0xb9
6838 */
6839FNIEMOP_DEF(iemOp_eCX_Iv)
6840{
6841 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
6842 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6843}
6844
6845
6846/**
6847 * @opcode 0xba
6848 */
6849FNIEMOP_DEF(iemOp_eDX_Iv)
6850{
6851 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
6852 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6853}
6854
6855
6856/**
6857 * @opcode 0xbb
6858 */
6859FNIEMOP_DEF(iemOp_eBX_Iv)
6860{
6861 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
6862 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6863}
6864
6865
6866/**
6867 * @opcode 0xbc
6868 */
6869FNIEMOP_DEF(iemOp_eSP_Iv)
6870{
6871 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
6872 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6873}
6874
6875
6876/**
6877 * @opcode 0xbd
6878 */
6879FNIEMOP_DEF(iemOp_eBP_Iv)
6880{
6881 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
6882 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6883}
6884
6885
6886/**
6887 * @opcode 0xbe
6888 */
6889FNIEMOP_DEF(iemOp_eSI_Iv)
6890{
6891 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6892 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6893}
6894
6895
6896/**
6897 * @opcode 0xbf
6898 */
6899FNIEMOP_DEF(iemOp_eDI_Iv)
6900{
6901 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6902 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6903}
6904
6905
6906/**
6907 * @opcode 0xc0
6908 */
6909FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6910{
6911 IEMOP_HLP_MIN_186();
6912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6913 PCIEMOPSHIFTSIZES pImpl;
6914 switch (IEM_GET_MODRM_REG_8(bRm))
6915 {
6916 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6917 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6918 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6919 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6920 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6921 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6922 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6923 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6924 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6925 }
6926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6927
6928 if (IEM_IS_MODRM_REG_MODE(bRm))
6929 {
6930 /* register */
6931 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6933 IEM_MC_BEGIN(3, 0);
6934 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6935 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6936 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6937 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6938 IEM_MC_REF_EFLAGS(pEFlags);
6939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6940 IEM_MC_ADVANCE_RIP_AND_FINISH();
6941 IEM_MC_END();
6942 }
6943 else
6944 {
6945 /* memory */
6946 IEM_MC_BEGIN(3, 2);
6947 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6948 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6949 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6951
6952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6953 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6954 IEM_MC_ASSIGN(cShiftArg, cShift);
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6957 IEM_MC_FETCH_EFLAGS(EFlags);
6958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6959
6960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6961 IEM_MC_COMMIT_EFLAGS(EFlags);
6962 IEM_MC_ADVANCE_RIP_AND_FINISH();
6963 IEM_MC_END();
6964 }
6965}
6966
6967
6968/**
6969 * @opcode 0xc1
6970 */
6971FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6972{
6973 IEMOP_HLP_MIN_186();
6974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6975 PCIEMOPSHIFTSIZES pImpl;
6976 switch (IEM_GET_MODRM_REG_8(bRm))
6977 {
6978 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6979 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6980 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6981 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6982 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6983 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6984 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6985 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6986 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6987 }
6988 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6989
6990 if (IEM_IS_MODRM_REG_MODE(bRm))
6991 {
6992 /* register */
6993 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6995 switch (pVCpu->iem.s.enmEffOpSize)
6996 {
6997 case IEMMODE_16BIT:
6998 IEM_MC_BEGIN(3, 0);
6999 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7000 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7001 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7002 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7003 IEM_MC_REF_EFLAGS(pEFlags);
7004 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7005 IEM_MC_ADVANCE_RIP_AND_FINISH();
7006 IEM_MC_END();
7007 break;
7008
7009 case IEMMODE_32BIT:
7010 IEM_MC_BEGIN(3, 0);
7011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7012 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7014 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7015 IEM_MC_REF_EFLAGS(pEFlags);
7016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7017 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7018 IEM_MC_ADVANCE_RIP_AND_FINISH();
7019 IEM_MC_END();
7020 break;
7021
7022 case IEMMODE_64BIT:
7023 IEM_MC_BEGIN(3, 0);
7024 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7025 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7026 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7027 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7028 IEM_MC_REF_EFLAGS(pEFlags);
7029 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7030 IEM_MC_ADVANCE_RIP_AND_FINISH();
7031 IEM_MC_END();
7032 break;
7033
7034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7035 }
7036 }
7037 else
7038 {
7039 /* memory */
7040 switch (pVCpu->iem.s.enmEffOpSize)
7041 {
7042 case IEMMODE_16BIT:
7043 IEM_MC_BEGIN(3, 2);
7044 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7045 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7046 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7048
7049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7050 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7051 IEM_MC_ASSIGN(cShiftArg, cShift);
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7054 IEM_MC_FETCH_EFLAGS(EFlags);
7055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7056
7057 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7058 IEM_MC_COMMIT_EFLAGS(EFlags);
7059 IEM_MC_ADVANCE_RIP_AND_FINISH();
7060 IEM_MC_END();
7061 break;
7062
7063 case IEMMODE_32BIT:
7064 IEM_MC_BEGIN(3, 2);
7065 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7066 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7067 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7069
7070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7071 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7072 IEM_MC_ASSIGN(cShiftArg, cShift);
7073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7074 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7075 IEM_MC_FETCH_EFLAGS(EFlags);
7076 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7077
7078 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7079 IEM_MC_COMMIT_EFLAGS(EFlags);
7080 IEM_MC_ADVANCE_RIP_AND_FINISH();
7081 IEM_MC_END();
7082 break;
7083
7084 case IEMMODE_64BIT:
7085 IEM_MC_BEGIN(3, 2);
7086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7087 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7088 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7090
7091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7092 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7093 IEM_MC_ASSIGN(cShiftArg, cShift);
7094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7095 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7096 IEM_MC_FETCH_EFLAGS(EFlags);
7097 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7098
7099 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7100 IEM_MC_COMMIT_EFLAGS(EFlags);
7101 IEM_MC_ADVANCE_RIP_AND_FINISH();
7102 IEM_MC_END();
7103 break;
7104
7105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7106 }
7107 }
7108}
7109
7110
7111/**
7112 * @opcode 0xc2
7113 */
7114FNIEMOP_DEF(iemOp_retn_Iw)
7115{
7116 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7117 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7118 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7120 switch (pVCpu->iem.s.enmEffOpSize)
7121 {
7122 case IEMMODE_16BIT:
7123 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_16, u16Imm);
7124 case IEMMODE_32BIT:
7125 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_32, u16Imm);
7126 case IEMMODE_64BIT:
7127 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_64, u16Imm);
7128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7129 }
7130}
7131
7132
7133/**
7134 * @opcode 0xc3
7135 */
7136FNIEMOP_DEF(iemOp_retn)
7137{
7138 IEMOP_MNEMONIC(retn, "retn");
7139 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7141 switch (pVCpu->iem.s.enmEffOpSize)
7142 {
7143 case IEMMODE_16BIT:
7144 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_16);
7145 case IEMMODE_32BIT:
7146 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_32);
7147 case IEMMODE_64BIT:
7148 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_64);
7149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7150 }
7151}
7152
7153
7154/**
7155 * @opcode 0xc4
7156 */
7157FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7158{
7159 /* The LDS instruction is invalid 64-bit mode. In legacy and
7160 compatability mode it is invalid with MOD=3.
7161 The use as a VEX prefix is made possible by assigning the inverted
7162 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7163 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7165 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
7166 || IEM_IS_MODRM_REG_MODE(bRm) )
7167 {
7168 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7169 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
7170 {
7171 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7172 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7173 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7174 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7175 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7176 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
7177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7178 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7179 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7180 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7181 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7182 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7183 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7184
7185 switch (bRm & 0x1f)
7186 {
7187 case 1: /* 0x0f lead opcode byte. */
7188#ifdef IEM_WITH_VEX
7189 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7190#else
7191 IEMOP_BITCH_ABOUT_STUB();
7192 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7193#endif
7194
7195 case 2: /* 0x0f 0x38 lead opcode bytes. */
7196#ifdef IEM_WITH_VEX
7197 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7198#else
7199 IEMOP_BITCH_ABOUT_STUB();
7200 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7201#endif
7202
7203 case 3: /* 0x0f 0x3a lead opcode bytes. */
7204#ifdef IEM_WITH_VEX
7205 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7206#else
7207 IEMOP_BITCH_ABOUT_STUB();
7208 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7209#endif
7210
7211 default:
7212 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7213 return IEMOP_RAISE_INVALID_OPCODE();
7214 }
7215 }
7216 Log(("VEX3: AVX support disabled!\n"));
7217 return IEMOP_RAISE_INVALID_OPCODE();
7218 }
7219
7220 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7221 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7222}
7223
7224
7225/**
7226 * @opcode 0xc5
7227 */
7228FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7229{
7230 /* The LES instruction is invalid 64-bit mode. In legacy and
7231 compatability mode it is invalid with MOD=3.
7232 The use as a VEX prefix is made possible by assigning the inverted
7233 REX.R to the top MOD bit, and the top bit in the inverted register
7234 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7235 to accessing registers 0..7 in this VEX form. */
7236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7237 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
7238 || IEM_IS_MODRM_REG_MODE(bRm))
7239 {
7240 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7241 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
7242 {
7243 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7244 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7245 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7246 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7247 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7248 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7249 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7250 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7251
7252#ifdef IEM_WITH_VEX
7253 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7254#else
7255 IEMOP_BITCH_ABOUT_STUB();
7256 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7257#endif
7258 }
7259
7260 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7261 Log(("VEX2: AVX support disabled!\n"));
7262 return IEMOP_RAISE_INVALID_OPCODE();
7263 }
7264
7265 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7266 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7267}
7268
7269
7270/**
7271 * @opcode 0xc6
7272 */
7273FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7274{
7275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7276 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7277 return IEMOP_RAISE_INVALID_OPCODE();
7278 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7279
7280 if (IEM_IS_MODRM_REG_MODE(bRm))
7281 {
7282 /* register access */
7283 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7285 IEM_MC_BEGIN(0, 0);
7286 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7287 IEM_MC_ADVANCE_RIP_AND_FINISH();
7288 IEM_MC_END();
7289 }
7290 else
7291 {
7292 /* memory access. */
7293 IEM_MC_BEGIN(0, 1);
7294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7296 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7298 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7299 IEM_MC_ADVANCE_RIP_AND_FINISH();
7300 IEM_MC_END();
7301 }
7302}
7303
7304
7305/**
7306 * @opcode 0xc7
7307 */
7308FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7309{
7310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7311 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7312 return IEMOP_RAISE_INVALID_OPCODE();
7313 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7314
7315 if (IEM_IS_MODRM_REG_MODE(bRm))
7316 {
7317 /* register access */
7318 switch (pVCpu->iem.s.enmEffOpSize)
7319 {
7320 case IEMMODE_16BIT:
7321 IEM_MC_BEGIN(0, 0);
7322 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7324 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
7325 IEM_MC_ADVANCE_RIP_AND_FINISH();
7326 IEM_MC_END();
7327 break;
7328
7329 case IEMMODE_32BIT:
7330 IEM_MC_BEGIN(0, 0);
7331 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7333 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
7334 IEM_MC_ADVANCE_RIP_AND_FINISH();
7335 IEM_MC_END();
7336 break;
7337
7338 case IEMMODE_64BIT:
7339 IEM_MC_BEGIN(0, 0);
7340 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7342 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
7343 IEM_MC_ADVANCE_RIP_AND_FINISH();
7344 IEM_MC_END();
7345 break;
7346
7347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7348 }
7349 }
7350 else
7351 {
7352 /* memory access. */
7353 switch (pVCpu->iem.s.enmEffOpSize)
7354 {
7355 case IEMMODE_16BIT:
7356 IEM_MC_BEGIN(0, 1);
7357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7359 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7361 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
7362 IEM_MC_ADVANCE_RIP_AND_FINISH();
7363 IEM_MC_END();
7364 break;
7365
7366 case IEMMODE_32BIT:
7367 IEM_MC_BEGIN(0, 1);
7368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7370 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7372 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
7373 IEM_MC_ADVANCE_RIP_AND_FINISH();
7374 IEM_MC_END();
7375 break;
7376
7377 case IEMMODE_64BIT:
7378 IEM_MC_BEGIN(0, 1);
7379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7381 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7383 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
7384 IEM_MC_ADVANCE_RIP_AND_FINISH();
7385 IEM_MC_END();
7386 break;
7387
7388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7389 }
7390 }
7391}
7392
7393
7394
7395
7396/**
7397 * @opcode 0xc8
7398 */
7399FNIEMOP_DEF(iemOp_enter_Iw_Ib)
7400{
7401 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
7402 IEMOP_HLP_MIN_186();
7403 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7404 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
7405 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
7406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7407 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
7408}
7409
7410
7411/**
7412 * @opcode 0xc9
7413 */
7414FNIEMOP_DEF(iemOp_leave)
7415{
7416 IEMOP_MNEMONIC(leave, "leave");
7417 IEMOP_HLP_MIN_186();
7418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7420 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
7421}
7422
7423
7424/**
7425 * @opcode 0xca
7426 */
7427FNIEMOP_DEF(iemOp_retf_Iw)
7428{
7429 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
7430 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7432 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
7433}
7434
7435
7436/**
7437 * @opcode 0xcb
7438 */
7439FNIEMOP_DEF(iemOp_retf)
7440{
7441 IEMOP_MNEMONIC(retf, "retf");
7442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7443 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
7444}
7445
7446
7447/**
7448 * @opcode 0xcc
7449 */
7450FNIEMOP_DEF(iemOp_int3)
7451{
7452 IEMOP_MNEMONIC(int3, "int3");
7453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7454 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
7455}
7456
7457
7458/**
7459 * @opcode 0xcd
7460 */
7461FNIEMOP_DEF(iemOp_int_Ib)
7462{
7463 IEMOP_MNEMONIC(int_Ib, "int Ib");
7464 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
7465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7466 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
7467}
7468
7469
7470/**
7471 * @opcode 0xce
7472 */
7473FNIEMOP_DEF(iemOp_into)
7474{
7475 IEMOP_MNEMONIC(into, "into");
7476 IEMOP_HLP_NO_64BIT();
7477
7478 IEM_MC_BEGIN(2, 0);
7479 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
7480 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
7481 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
7482 IEM_MC_END();
7483}
7484
7485
7486/**
7487 * @opcode 0xcf
7488 */
7489FNIEMOP_DEF(iemOp_iret)
7490{
7491 IEMOP_MNEMONIC(iret, "iret");
7492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7493 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
7494}
7495
7496
7497/**
7498 * @opcode 0xd0
7499 */
7500FNIEMOP_DEF(iemOp_Grp2_Eb_1)
7501{
7502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7503 PCIEMOPSHIFTSIZES pImpl;
7504 switch (IEM_GET_MODRM_REG_8(bRm))
7505 {
7506 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
7507 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
7508 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
7509 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
7510 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
7511 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
7512 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
7513 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7514 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7515 }
7516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7517
7518 if (IEM_IS_MODRM_REG_MODE(bRm))
7519 {
7520 /* register */
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7522 IEM_MC_BEGIN(3, 0);
7523 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7524 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7525 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7526 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7527 IEM_MC_REF_EFLAGS(pEFlags);
7528 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7529 IEM_MC_ADVANCE_RIP_AND_FINISH();
7530 IEM_MC_END();
7531 }
7532 else
7533 {
7534 /* memory */
7535 IEM_MC_BEGIN(3, 2);
7536 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7537 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7538 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7540
7541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7543 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7544 IEM_MC_FETCH_EFLAGS(EFlags);
7545 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7546
7547 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7548 IEM_MC_COMMIT_EFLAGS(EFlags);
7549 IEM_MC_ADVANCE_RIP_AND_FINISH();
7550 IEM_MC_END();
7551 }
7552}
7553
7554
7555
7556/**
7557 * @opcode 0xd1
7558 */
7559FNIEMOP_DEF(iemOp_Grp2_Ev_1)
7560{
7561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7562 PCIEMOPSHIFTSIZES pImpl;
7563 switch (IEM_GET_MODRM_REG_8(bRm))
7564 {
7565 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
7566 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
7567 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
7568 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
7569 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
7570 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
7571 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
7572 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7573 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7574 }
7575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7576
7577 if (IEM_IS_MODRM_REG_MODE(bRm))
7578 {
7579 /* register */
7580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7581 switch (pVCpu->iem.s.enmEffOpSize)
7582 {
7583 case IEMMODE_16BIT:
7584 IEM_MC_BEGIN(3, 0);
7585 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7586 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7587 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7588 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7589 IEM_MC_REF_EFLAGS(pEFlags);
7590 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7591 IEM_MC_ADVANCE_RIP_AND_FINISH();
7592 IEM_MC_END();
7593 break;
7594
7595 case IEMMODE_32BIT:
7596 IEM_MC_BEGIN(3, 0);
7597 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7598 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7600 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7601 IEM_MC_REF_EFLAGS(pEFlags);
7602 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7604 IEM_MC_ADVANCE_RIP_AND_FINISH();
7605 IEM_MC_END();
7606 break;
7607
7608 case IEMMODE_64BIT:
7609 IEM_MC_BEGIN(3, 0);
7610 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7611 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7613 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7614 IEM_MC_REF_EFLAGS(pEFlags);
7615 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7616 IEM_MC_ADVANCE_RIP_AND_FINISH();
7617 IEM_MC_END();
7618 break;
7619
7620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7621 }
7622 }
7623 else
7624 {
7625 /* memory */
7626 switch (pVCpu->iem.s.enmEffOpSize)
7627 {
7628 case IEMMODE_16BIT:
7629 IEM_MC_BEGIN(3, 2);
7630 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7631 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7632 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7634
7635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7637 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7638 IEM_MC_FETCH_EFLAGS(EFlags);
7639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7640
7641 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7642 IEM_MC_COMMIT_EFLAGS(EFlags);
7643 IEM_MC_ADVANCE_RIP_AND_FINISH();
7644 IEM_MC_END();
7645 break;
7646
7647 case IEMMODE_32BIT:
7648 IEM_MC_BEGIN(3, 2);
7649 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7650 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7651 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7653
7654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7656 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7657 IEM_MC_FETCH_EFLAGS(EFlags);
7658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7659
7660 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7661 IEM_MC_COMMIT_EFLAGS(EFlags);
7662 IEM_MC_ADVANCE_RIP_AND_FINISH();
7663 IEM_MC_END();
7664 break;
7665
7666 case IEMMODE_64BIT:
7667 IEM_MC_BEGIN(3, 2);
7668 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7669 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7670 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7672
7673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7675 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7676 IEM_MC_FETCH_EFLAGS(EFlags);
7677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7678
7679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7680 IEM_MC_COMMIT_EFLAGS(EFlags);
7681 IEM_MC_ADVANCE_RIP_AND_FINISH();
7682 IEM_MC_END();
7683 break;
7684
7685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7686 }
7687 }
7688}
7689
7690
7691/**
7692 * @opcode 0xd2
7693 */
7694FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7695{
7696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7697 PCIEMOPSHIFTSIZES pImpl;
7698 switch (IEM_GET_MODRM_REG_8(bRm))
7699 {
7700 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
7701 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
7702 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
7703 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
7704 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
7705 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
7706 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
7707 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7709 }
7710 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7711
7712 if (IEM_IS_MODRM_REG_MODE(bRm))
7713 {
7714 /* register */
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716 IEM_MC_BEGIN(3, 0);
7717 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7718 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7719 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7720 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7721 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7722 IEM_MC_REF_EFLAGS(pEFlags);
7723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7724 IEM_MC_ADVANCE_RIP_AND_FINISH();
7725 IEM_MC_END();
7726 }
7727 else
7728 {
7729 /* memory */
7730 IEM_MC_BEGIN(3, 2);
7731 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7732 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7733 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7735
7736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7739 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7740 IEM_MC_FETCH_EFLAGS(EFlags);
7741 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7742
7743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7744 IEM_MC_COMMIT_EFLAGS(EFlags);
7745 IEM_MC_ADVANCE_RIP_AND_FINISH();
7746 IEM_MC_END();
7747 }
7748}
7749
7750
7751/**
7752 * @opcode 0xd3
7753 */
7754FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7755{
7756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7757 PCIEMOPSHIFTSIZES pImpl;
7758 switch (IEM_GET_MODRM_REG_8(bRm))
7759 {
7760 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
7761 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
7762 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
7763 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
7764 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
7765 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
7766 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
7767 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7768 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7769 }
7770 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7771
7772 if (IEM_IS_MODRM_REG_MODE(bRm))
7773 {
7774 /* register */
7775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7776 switch (pVCpu->iem.s.enmEffOpSize)
7777 {
7778 case IEMMODE_16BIT:
7779 IEM_MC_BEGIN(3, 0);
7780 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7781 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7783 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7784 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7785 IEM_MC_REF_EFLAGS(pEFlags);
7786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7787 IEM_MC_ADVANCE_RIP_AND_FINISH();
7788 IEM_MC_END();
7789 break;
7790
7791 case IEMMODE_32BIT:
7792 IEM_MC_BEGIN(3, 0);
7793 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7794 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7795 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7796 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7797 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7798 IEM_MC_REF_EFLAGS(pEFlags);
7799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7800 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7801 IEM_MC_ADVANCE_RIP_AND_FINISH();
7802 IEM_MC_END();
7803 break;
7804
7805 case IEMMODE_64BIT:
7806 IEM_MC_BEGIN(3, 0);
7807 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7808 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7810 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7811 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7812 IEM_MC_REF_EFLAGS(pEFlags);
7813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7814 IEM_MC_ADVANCE_RIP_AND_FINISH();
7815 IEM_MC_END();
7816 break;
7817
7818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7819 }
7820 }
7821 else
7822 {
7823 /* memory */
7824 switch (pVCpu->iem.s.enmEffOpSize)
7825 {
7826 case IEMMODE_16BIT:
7827 IEM_MC_BEGIN(3, 2);
7828 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7829 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7830 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7832
7833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7835 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7836 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7837 IEM_MC_FETCH_EFLAGS(EFlags);
7838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7839
7840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7841 IEM_MC_COMMIT_EFLAGS(EFlags);
7842 IEM_MC_ADVANCE_RIP_AND_FINISH();
7843 IEM_MC_END();
7844 break;
7845
7846 case IEMMODE_32BIT:
7847 IEM_MC_BEGIN(3, 2);
7848 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7849 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7850 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7852
7853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7855 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7856 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7857 IEM_MC_FETCH_EFLAGS(EFlags);
7858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7859
7860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7861 IEM_MC_COMMIT_EFLAGS(EFlags);
7862 IEM_MC_ADVANCE_RIP_AND_FINISH();
7863 IEM_MC_END();
7864 break;
7865
7866 case IEMMODE_64BIT:
7867 IEM_MC_BEGIN(3, 2);
7868 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7869 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7870 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7872
7873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7875 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7876 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7877 IEM_MC_FETCH_EFLAGS(EFlags);
7878 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7879
7880 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7881 IEM_MC_COMMIT_EFLAGS(EFlags);
7882 IEM_MC_ADVANCE_RIP_AND_FINISH();
7883 IEM_MC_END();
7884 break;
7885
7886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7887 }
7888 }
7889}
7890
7891/**
7892 * @opcode 0xd4
7893 */
7894FNIEMOP_DEF(iemOp_aam_Ib)
7895{
7896 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
7897 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7899 IEMOP_HLP_NO_64BIT();
7900 if (!bImm)
7901 return IEMOP_RAISE_DIVIDE_ERROR();
7902 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
7903}
7904
7905
7906/**
7907 * @opcode 0xd5
7908 */
7909FNIEMOP_DEF(iemOp_aad_Ib)
7910{
7911 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7912 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7914 IEMOP_HLP_NO_64BIT();
7915 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7916}
7917
7918
7919/**
7920 * @opcode 0xd6
7921 */
7922FNIEMOP_DEF(iemOp_salc)
7923{
7924 IEMOP_MNEMONIC(salc, "salc");
7925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7926 IEMOP_HLP_NO_64BIT();
7927
7928 IEM_MC_BEGIN(0, 0);
7929 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7930 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7931 } IEM_MC_ELSE() {
7932 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7933 } IEM_MC_ENDIF();
7934 IEM_MC_ADVANCE_RIP_AND_FINISH();
7935 IEM_MC_END();
7936}
7937
7938
7939/**
7940 * @opcode 0xd7
7941 */
7942FNIEMOP_DEF(iemOp_xlat)
7943{
7944 IEMOP_MNEMONIC(xlat, "xlat");
7945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7946 switch (pVCpu->iem.s.enmEffAddrMode)
7947 {
7948 case IEMMODE_16BIT:
7949 IEM_MC_BEGIN(2, 0);
7950 IEM_MC_LOCAL(uint8_t, u8Tmp);
7951 IEM_MC_LOCAL(uint16_t, u16Addr);
7952 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7953 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7954 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7955 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7956 IEM_MC_ADVANCE_RIP_AND_FINISH();
7957 IEM_MC_END();
7958 break;
7959
7960 case IEMMODE_32BIT:
7961 IEM_MC_BEGIN(2, 0);
7962 IEM_MC_LOCAL(uint8_t, u8Tmp);
7963 IEM_MC_LOCAL(uint32_t, u32Addr);
7964 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7965 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7966 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7967 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7968 IEM_MC_ADVANCE_RIP_AND_FINISH();
7969 IEM_MC_END();
7970 break;
7971
7972 case IEMMODE_64BIT:
7973 IEM_MC_BEGIN(2, 0);
7974 IEM_MC_LOCAL(uint8_t, u8Tmp);
7975 IEM_MC_LOCAL(uint64_t, u64Addr);
7976 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7977 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7978 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7979 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7980 IEM_MC_ADVANCE_RIP_AND_FINISH();
7981 IEM_MC_END();
7982 break;
7983
7984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7985 }
7986}
7987
7988
7989/**
7990 * Common worker for FPU instructions working on ST0 and STn, and storing the
7991 * result in ST0.
7992 *
7993 * @param bRm Mod R/M byte.
7994 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7995 */
7996FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7997{
7998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7999
8000 IEM_MC_BEGIN(3, 1);
8001 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8002 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8003 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8004 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8005
8006 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8007 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8008 IEM_MC_PREPARE_FPU_USAGE();
8009 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8010 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8011 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8012 } IEM_MC_ELSE() {
8013 IEM_MC_FPU_STACK_UNDERFLOW(0);
8014 } IEM_MC_ENDIF();
8015 IEM_MC_ADVANCE_RIP_AND_FINISH();
8016
8017 IEM_MC_END();
8018}
8019
8020
8021/**
8022 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8023 * flags.
8024 *
8025 * @param bRm Mod R/M byte.
8026 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8027 */
8028FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8029{
8030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8031
8032 IEM_MC_BEGIN(3, 1);
8033 IEM_MC_LOCAL(uint16_t, u16Fsw);
8034 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8035 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8036 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8037
8038 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8039 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8040 IEM_MC_PREPARE_FPU_USAGE();
8041 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8042 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8043 IEM_MC_UPDATE_FSW(u16Fsw);
8044 } IEM_MC_ELSE() {
8045 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8046 } IEM_MC_ENDIF();
8047 IEM_MC_ADVANCE_RIP_AND_FINISH();
8048
8049 IEM_MC_END();
8050}
8051
8052
8053/**
8054 * Common worker for FPU instructions working on ST0 and STn, only affecting
8055 * flags, and popping when done.
8056 *
8057 * @param bRm Mod R/M byte.
8058 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8059 */
8060FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8061{
8062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8063
8064 IEM_MC_BEGIN(3, 1);
8065 IEM_MC_LOCAL(uint16_t, u16Fsw);
8066 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8067 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8068 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8069
8070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8072 IEM_MC_PREPARE_FPU_USAGE();
8073 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8074 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8075 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8076 } IEM_MC_ELSE() {
8077 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
8078 } IEM_MC_ENDIF();
8079 IEM_MC_ADVANCE_RIP_AND_FINISH();
8080
8081 IEM_MC_END();
8082}
8083
8084
8085/** Opcode 0xd8 11/0. */
8086FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8087{
8088 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8089 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8090}
8091
8092
8093/** Opcode 0xd8 11/1. */
8094FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8095{
8096 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8097 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8098}
8099
8100
8101/** Opcode 0xd8 11/2. */
8102FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8103{
8104 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8105 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8106}
8107
8108
8109/** Opcode 0xd8 11/3. */
8110FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8111{
8112 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8113 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8114}
8115
8116
8117/** Opcode 0xd8 11/4. */
8118FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8119{
8120 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8121 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8122}
8123
8124
8125/** Opcode 0xd8 11/5. */
8126FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8127{
8128 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8129 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8130}
8131
8132
8133/** Opcode 0xd8 11/6. */
8134FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8135{
8136 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8137 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8138}
8139
8140
8141/** Opcode 0xd8 11/7. */
8142FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8143{
8144 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8145 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8146}
8147
8148
8149/**
8150 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8151 * the result in ST0.
8152 *
8153 * @param bRm Mod R/M byte.
8154 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8155 */
8156FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8157{
8158 IEM_MC_BEGIN(3, 3);
8159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8160 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8161 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8162 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8163 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8164 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8165
8166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8168
8169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8171 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8172
8173 IEM_MC_PREPARE_FPU_USAGE();
8174 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8175 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8176 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8177 } IEM_MC_ELSE() {
8178 IEM_MC_FPU_STACK_UNDERFLOW(0);
8179 } IEM_MC_ENDIF();
8180 IEM_MC_ADVANCE_RIP_AND_FINISH();
8181
8182 IEM_MC_END();
8183}
8184
8185
8186/** Opcode 0xd8 !11/0. */
8187FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8188{
8189 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8190 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8191}
8192
8193
8194/** Opcode 0xd8 !11/1. */
8195FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8196{
8197 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8198 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8199}
8200
8201
8202/** Opcode 0xd8 !11/2. */
8203FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8204{
8205 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8206
8207 IEM_MC_BEGIN(3, 3);
8208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8209 IEM_MC_LOCAL(uint16_t, u16Fsw);
8210 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8212 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8213 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8214
8215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217
8218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8220 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8221
8222 IEM_MC_PREPARE_FPU_USAGE();
8223 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8224 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8225 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8226 } IEM_MC_ELSE() {
8227 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8228 } IEM_MC_ENDIF();
8229 IEM_MC_ADVANCE_RIP_AND_FINISH();
8230
8231 IEM_MC_END();
8232}
8233
8234
8235/** Opcode 0xd8 !11/3. */
8236FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8237{
8238 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8239
8240 IEM_MC_BEGIN(3, 3);
8241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8242 IEM_MC_LOCAL(uint16_t, u16Fsw);
8243 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8244 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8245 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8246 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8247
8248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8250
8251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8253 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8254
8255 IEM_MC_PREPARE_FPU_USAGE();
8256 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8257 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8258 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8259 } IEM_MC_ELSE() {
8260 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8261 } IEM_MC_ENDIF();
8262 IEM_MC_ADVANCE_RIP_AND_FINISH();
8263
8264 IEM_MC_END();
8265}
8266
8267
8268/** Opcode 0xd8 !11/4. */
8269FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8270{
8271 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8272 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8273}
8274
8275
8276/** Opcode 0xd8 !11/5. */
8277FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8278{
8279 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8281}
8282
8283
8284/** Opcode 0xd8 !11/6. */
8285FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8286{
8287 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8289}
8290
8291
8292/** Opcode 0xd8 !11/7. */
8293FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8294{
8295 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8297}
8298
8299
8300/**
8301 * @opcode 0xd8
8302 */
8303FNIEMOP_DEF(iemOp_EscF0)
8304{
8305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8306 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
8307
8308 if (IEM_IS_MODRM_REG_MODE(bRm))
8309 {
8310 switch (IEM_GET_MODRM_REG_8(bRm))
8311 {
8312 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
8313 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
8314 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
8315 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8316 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
8317 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
8318 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
8319 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
8320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8321 }
8322 }
8323 else
8324 {
8325 switch (IEM_GET_MODRM_REG_8(bRm))
8326 {
8327 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
8328 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
8329 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
8330 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
8331 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
8332 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
8333 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
8334 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
8335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8336 }
8337 }
8338}
8339
8340
8341/** Opcode 0xd9 /0 mem32real
8342 * @sa iemOp_fld_m64r */
8343FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
8344{
8345 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
8346
8347 IEM_MC_BEGIN(2, 3);
8348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8349 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8350 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
8351 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8352 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
8353
8354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8356
8357 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8359 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8360
8361 IEM_MC_PREPARE_FPU_USAGE();
8362 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8363 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
8364 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8365 } IEM_MC_ELSE() {
8366 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8367 } IEM_MC_ENDIF();
8368 IEM_MC_ADVANCE_RIP_AND_FINISH();
8369
8370 IEM_MC_END();
8371}
8372
8373
8374/** Opcode 0xd9 !11/2 mem32real */
8375FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
8376{
8377 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
8378 IEM_MC_BEGIN(3, 2);
8379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8380 IEM_MC_LOCAL(uint16_t, u16Fsw);
8381 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8382 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8383 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8384
8385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8389
8390 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8391 IEM_MC_PREPARE_FPU_USAGE();
8392 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8393 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8394 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8395 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8396 } IEM_MC_ELSE() {
8397 IEM_MC_IF_FCW_IM() {
8398 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8399 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8400 } IEM_MC_ENDIF();
8401 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8402 } IEM_MC_ENDIF();
8403 IEM_MC_ADVANCE_RIP_AND_FINISH();
8404
8405 IEM_MC_END();
8406}
8407
8408
8409/** Opcode 0xd9 !11/3 */
8410FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
8411{
8412 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
8413 IEM_MC_BEGIN(3, 2);
8414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8415 IEM_MC_LOCAL(uint16_t, u16Fsw);
8416 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8417 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8418 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8419
8420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8422 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8423 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8424
8425 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8426 IEM_MC_PREPARE_FPU_USAGE();
8427 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8428 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8429 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8430 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8431 } IEM_MC_ELSE() {
8432 IEM_MC_IF_FCW_IM() {
8433 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8434 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8435 } IEM_MC_ENDIF();
8436 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8437 } IEM_MC_ENDIF();
8438 IEM_MC_ADVANCE_RIP_AND_FINISH();
8439
8440 IEM_MC_END();
8441}
8442
8443
8444/** Opcode 0xd9 !11/4 */
8445FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
8446{
8447 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
8448 IEM_MC_BEGIN(3, 0);
8449 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8450 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8451 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8455 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8456 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8457 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8458 IEM_MC_END();
8459}
8460
8461
8462/** Opcode 0xd9 !11/5 */
8463FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
8464{
8465 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
8466 IEM_MC_BEGIN(1, 1);
8467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8468 IEM_MC_ARG(uint16_t, u16Fsw, 0);
8469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8472 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8473 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8474 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
8475 IEM_MC_END();
8476}
8477
8478
8479/** Opcode 0xd9 !11/6 */
8480FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
8481{
8482 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
8483 IEM_MC_BEGIN(3, 0);
8484 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8485 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8486 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8490 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8491 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8492 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
8493 IEM_MC_END();
8494}
8495
8496
8497/** Opcode 0xd9 !11/7 */
8498FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
8499{
8500 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
8501 IEM_MC_BEGIN(2, 0);
8502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8503 IEM_MC_LOCAL(uint16_t, u16Fcw);
8504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8507 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8508 IEM_MC_FETCH_FCW(u16Fcw);
8509 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
8510 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8511 IEM_MC_END();
8512}
8513
8514
8515/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
8516FNIEMOP_DEF(iemOp_fnop)
8517{
8518 IEMOP_MNEMONIC(fnop, "fnop");
8519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8520
8521 IEM_MC_BEGIN(0, 0);
8522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8524 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8525 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
8526 * intel optimizations. Investigate. */
8527 IEM_MC_UPDATE_FPU_OPCODE_IP();
8528 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8529 IEM_MC_END();
8530}
8531
8532
8533/** Opcode 0xd9 11/0 stN */
8534FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
8535{
8536 IEMOP_MNEMONIC(fld_stN, "fld stN");
8537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8538
8539 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8540 * indicates that it does. */
8541 IEM_MC_BEGIN(0, 2);
8542 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8543 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8544 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8545 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8546
8547 IEM_MC_PREPARE_FPU_USAGE();
8548 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
8549 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8550 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8551 } IEM_MC_ELSE() {
8552 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
8553 } IEM_MC_ENDIF();
8554
8555 IEM_MC_ADVANCE_RIP_AND_FINISH();
8556 IEM_MC_END();
8557}
8558
8559
8560/** Opcode 0xd9 11/3 stN */
8561FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
8562{
8563 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
8564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8565
8566 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8567 * indicates that it does. */
8568 IEM_MC_BEGIN(1, 3);
8569 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
8570 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
8571 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8572 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
8573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8575
8576 IEM_MC_PREPARE_FPU_USAGE();
8577 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8578 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
8579 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
8580 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8581 } IEM_MC_ELSE() {
8582 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
8583 } IEM_MC_ENDIF();
8584
8585 IEM_MC_ADVANCE_RIP_AND_FINISH();
8586 IEM_MC_END();
8587}
8588
8589
8590/** Opcode 0xd9 11/4, 0xdd 11/2. */
8591FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
8592{
8593 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
8594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8595
8596 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
8597 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
8598 if (!iDstReg)
8599 {
8600 IEM_MC_BEGIN(0, 1);
8601 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
8602 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8603 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8604
8605 IEM_MC_PREPARE_FPU_USAGE();
8606 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
8607 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8608 } IEM_MC_ELSE() {
8609 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
8610 } IEM_MC_ENDIF();
8611
8612 IEM_MC_ADVANCE_RIP_AND_FINISH();
8613 IEM_MC_END();
8614 }
8615 else
8616 {
8617 IEM_MC_BEGIN(0, 2);
8618 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8619 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8622
8623 IEM_MC_PREPARE_FPU_USAGE();
8624 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8625 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8626 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
8627 } IEM_MC_ELSE() {
8628 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
8629 } IEM_MC_ENDIF();
8630
8631 IEM_MC_ADVANCE_RIP_AND_FINISH();
8632 IEM_MC_END();
8633 }
8634}
8635
8636
8637/**
8638 * Common worker for FPU instructions working on ST0 and replaces it with the
8639 * result, i.e. unary operators.
8640 *
8641 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8642 */
8643FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
8644{
8645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8646
8647 IEM_MC_BEGIN(2, 1);
8648 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8649 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8650 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8651
8652 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8653 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8654 IEM_MC_PREPARE_FPU_USAGE();
8655 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8656 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
8657 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8658 } IEM_MC_ELSE() {
8659 IEM_MC_FPU_STACK_UNDERFLOW(0);
8660 } IEM_MC_ENDIF();
8661 IEM_MC_ADVANCE_RIP_AND_FINISH();
8662
8663 IEM_MC_END();
8664}
8665
8666
8667/** Opcode 0xd9 0xe0. */
8668FNIEMOP_DEF(iemOp_fchs)
8669{
8670 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
8671 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
8672}
8673
8674
8675/** Opcode 0xd9 0xe1. */
8676FNIEMOP_DEF(iemOp_fabs)
8677{
8678 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
8679 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
8680}
8681
8682
8683/** Opcode 0xd9 0xe4. */
8684FNIEMOP_DEF(iemOp_ftst)
8685{
8686 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
8687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8688
8689 IEM_MC_BEGIN(2, 1);
8690 IEM_MC_LOCAL(uint16_t, u16Fsw);
8691 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8692 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8693
8694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8696 IEM_MC_PREPARE_FPU_USAGE();
8697 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8698 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
8699 IEM_MC_UPDATE_FSW(u16Fsw);
8700 } IEM_MC_ELSE() {
8701 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8702 } IEM_MC_ENDIF();
8703 IEM_MC_ADVANCE_RIP_AND_FINISH();
8704
8705 IEM_MC_END();
8706}
8707
8708
8709/** Opcode 0xd9 0xe5. */
8710FNIEMOP_DEF(iemOp_fxam)
8711{
8712 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
8713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8714
8715 IEM_MC_BEGIN(2, 1);
8716 IEM_MC_LOCAL(uint16_t, u16Fsw);
8717 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8718 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8719
8720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8722 IEM_MC_PREPARE_FPU_USAGE();
8723 IEM_MC_REF_FPUREG(pr80Value, 0);
8724 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
8725 IEM_MC_UPDATE_FSW(u16Fsw);
8726 IEM_MC_ADVANCE_RIP_AND_FINISH();
8727
8728 IEM_MC_END();
8729}
8730
8731
8732/**
8733 * Common worker for FPU instructions pushing a constant onto the FPU stack.
8734 *
8735 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8736 */
8737FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
8738{
8739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8740
8741 IEM_MC_BEGIN(1, 1);
8742 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8743 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8744
8745 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8746 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8747 IEM_MC_PREPARE_FPU_USAGE();
8748 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8749 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
8750 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8751 } IEM_MC_ELSE() {
8752 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
8753 } IEM_MC_ENDIF();
8754 IEM_MC_ADVANCE_RIP_AND_FINISH();
8755
8756 IEM_MC_END();
8757}
8758
8759
8760/** Opcode 0xd9 0xe8. */
8761FNIEMOP_DEF(iemOp_fld1)
8762{
8763 IEMOP_MNEMONIC(fld1, "fld1");
8764 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
8765}
8766
8767
8768/** Opcode 0xd9 0xe9. */
8769FNIEMOP_DEF(iemOp_fldl2t)
8770{
8771 IEMOP_MNEMONIC(fldl2t, "fldl2t");
8772 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
8773}
8774
8775
8776/** Opcode 0xd9 0xea. */
8777FNIEMOP_DEF(iemOp_fldl2e)
8778{
8779 IEMOP_MNEMONIC(fldl2e, "fldl2e");
8780 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
8781}
8782
8783/** Opcode 0xd9 0xeb. */
8784FNIEMOP_DEF(iemOp_fldpi)
8785{
8786 IEMOP_MNEMONIC(fldpi, "fldpi");
8787 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
8788}
8789
8790
8791/** Opcode 0xd9 0xec. */
8792FNIEMOP_DEF(iemOp_fldlg2)
8793{
8794 IEMOP_MNEMONIC(fldlg2, "fldlg2");
8795 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
8796}
8797
8798/** Opcode 0xd9 0xed. */
8799FNIEMOP_DEF(iemOp_fldln2)
8800{
8801 IEMOP_MNEMONIC(fldln2, "fldln2");
8802 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
8803}
8804
8805
8806/** Opcode 0xd9 0xee. */
8807FNIEMOP_DEF(iemOp_fldz)
8808{
8809 IEMOP_MNEMONIC(fldz, "fldz");
8810 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
8811}
8812
8813
8814/** Opcode 0xd9 0xf0.
8815 *
8816 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
8817 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
8818 * to produce proper results for +Inf and -Inf.
8819 *
8820 * This is probably usful in the implementation pow() and similar.
8821 */
8822FNIEMOP_DEF(iemOp_f2xm1)
8823{
8824 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
8825 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
8826}
8827
8828
8829/**
8830 * Common worker for FPU instructions working on STn and ST0, storing the result
8831 * in STn, and popping the stack unless IE, DE or ZE was raised.
8832 *
8833 * @param bRm Mod R/M byte.
8834 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8835 */
8836FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8837{
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839
8840 IEM_MC_BEGIN(3, 1);
8841 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8842 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8843 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8844 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8845
8846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8847 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8848
8849 IEM_MC_PREPARE_FPU_USAGE();
8850 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
8851 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8852 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
8853 } IEM_MC_ELSE() {
8854 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
8855 } IEM_MC_ENDIF();
8856 IEM_MC_ADVANCE_RIP_AND_FINISH();
8857
8858 IEM_MC_END();
8859}
8860
8861
8862/** Opcode 0xd9 0xf1. */
8863FNIEMOP_DEF(iemOp_fyl2x)
8864{
8865 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
8866 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
8867}
8868
8869
8870/**
8871 * Common worker for FPU instructions working on ST0 and having two outputs, one
8872 * replacing ST0 and one pushed onto the stack.
8873 *
8874 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8875 */
8876FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8877{
8878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8879
8880 IEM_MC_BEGIN(2, 1);
8881 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8882 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8883 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8884
8885 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8886 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8887 IEM_MC_PREPARE_FPU_USAGE();
8888 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8889 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8890 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
8891 } IEM_MC_ELSE() {
8892 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
8893 } IEM_MC_ENDIF();
8894 IEM_MC_ADVANCE_RIP_AND_FINISH();
8895
8896 IEM_MC_END();
8897}
8898
8899
8900/** Opcode 0xd9 0xf2. */
8901FNIEMOP_DEF(iemOp_fptan)
8902{
8903 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8904 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8905}
8906
8907
8908/** Opcode 0xd9 0xf3. */
8909FNIEMOP_DEF(iemOp_fpatan)
8910{
8911 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8912 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8913}
8914
8915
8916/** Opcode 0xd9 0xf4. */
8917FNIEMOP_DEF(iemOp_fxtract)
8918{
8919 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8920 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8921}
8922
8923
8924/** Opcode 0xd9 0xf5. */
8925FNIEMOP_DEF(iemOp_fprem1)
8926{
8927 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8928 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8929}
8930
8931
8932/** Opcode 0xd9 0xf6. */
8933FNIEMOP_DEF(iemOp_fdecstp)
8934{
8935 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8937 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8938 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8939 * FINCSTP and FDECSTP. */
8940
8941 IEM_MC_BEGIN(0,0);
8942
8943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8944 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8945
8946 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8947 IEM_MC_FPU_STACK_DEC_TOP();
8948 IEM_MC_UPDATE_FSW_CONST(0);
8949
8950 IEM_MC_ADVANCE_RIP_AND_FINISH();
8951 IEM_MC_END();
8952}
8953
8954
8955/** Opcode 0xd9 0xf7. */
8956FNIEMOP_DEF(iemOp_fincstp)
8957{
8958 IEMOP_MNEMONIC(fincstp, "fincstp");
8959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8960 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8961 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8962 * FINCSTP and FDECSTP. */
8963
8964 IEM_MC_BEGIN(0,0);
8965
8966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8967 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8968
8969 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8970 IEM_MC_FPU_STACK_INC_TOP();
8971 IEM_MC_UPDATE_FSW_CONST(0);
8972
8973 IEM_MC_ADVANCE_RIP_AND_FINISH();
8974 IEM_MC_END();
8975}
8976
8977
8978/** Opcode 0xd9 0xf8. */
8979FNIEMOP_DEF(iemOp_fprem)
8980{
8981 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8982 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8983}
8984
8985
8986/** Opcode 0xd9 0xf9. */
8987FNIEMOP_DEF(iemOp_fyl2xp1)
8988{
8989 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8990 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8991}
8992
8993
8994/** Opcode 0xd9 0xfa. */
8995FNIEMOP_DEF(iemOp_fsqrt)
8996{
8997 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8998 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8999}
9000
9001
9002/** Opcode 0xd9 0xfb. */
9003FNIEMOP_DEF(iemOp_fsincos)
9004{
9005 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9006 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9007}
9008
9009
9010/** Opcode 0xd9 0xfc. */
9011FNIEMOP_DEF(iemOp_frndint)
9012{
9013 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9014 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9015}
9016
9017
9018/** Opcode 0xd9 0xfd. */
9019FNIEMOP_DEF(iemOp_fscale)
9020{
9021 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9023}
9024
9025
9026/** Opcode 0xd9 0xfe. */
9027FNIEMOP_DEF(iemOp_fsin)
9028{
9029 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9030 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9031}
9032
9033
9034/** Opcode 0xd9 0xff. */
9035FNIEMOP_DEF(iemOp_fcos)
9036{
9037 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9038 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9039}
9040
9041
9042/** Used by iemOp_EscF1. */
9043IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9044{
9045 /* 0xe0 */ iemOp_fchs,
9046 /* 0xe1 */ iemOp_fabs,
9047 /* 0xe2 */ iemOp_Invalid,
9048 /* 0xe3 */ iemOp_Invalid,
9049 /* 0xe4 */ iemOp_ftst,
9050 /* 0xe5 */ iemOp_fxam,
9051 /* 0xe6 */ iemOp_Invalid,
9052 /* 0xe7 */ iemOp_Invalid,
9053 /* 0xe8 */ iemOp_fld1,
9054 /* 0xe9 */ iemOp_fldl2t,
9055 /* 0xea */ iemOp_fldl2e,
9056 /* 0xeb */ iemOp_fldpi,
9057 /* 0xec */ iemOp_fldlg2,
9058 /* 0xed */ iemOp_fldln2,
9059 /* 0xee */ iemOp_fldz,
9060 /* 0xef */ iemOp_Invalid,
9061 /* 0xf0 */ iemOp_f2xm1,
9062 /* 0xf1 */ iemOp_fyl2x,
9063 /* 0xf2 */ iemOp_fptan,
9064 /* 0xf3 */ iemOp_fpatan,
9065 /* 0xf4 */ iemOp_fxtract,
9066 /* 0xf5 */ iemOp_fprem1,
9067 /* 0xf6 */ iemOp_fdecstp,
9068 /* 0xf7 */ iemOp_fincstp,
9069 /* 0xf8 */ iemOp_fprem,
9070 /* 0xf9 */ iemOp_fyl2xp1,
9071 /* 0xfa */ iemOp_fsqrt,
9072 /* 0xfb */ iemOp_fsincos,
9073 /* 0xfc */ iemOp_frndint,
9074 /* 0xfd */ iemOp_fscale,
9075 /* 0xfe */ iemOp_fsin,
9076 /* 0xff */ iemOp_fcos
9077};
9078
9079
9080/**
9081 * @opcode 0xd9
9082 */
9083FNIEMOP_DEF(iemOp_EscF1)
9084{
9085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9086 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9087
9088 if (IEM_IS_MODRM_REG_MODE(bRm))
9089 {
9090 switch (IEM_GET_MODRM_REG_8(bRm))
9091 {
9092 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9093 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9094 case 2:
9095 if (bRm == 0xd0)
9096 return FNIEMOP_CALL(iemOp_fnop);
9097 return IEMOP_RAISE_INVALID_OPCODE();
9098 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9099 case 4:
9100 case 5:
9101 case 6:
9102 case 7:
9103 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9104 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9106 }
9107 }
9108 else
9109 {
9110 switch (IEM_GET_MODRM_REG_8(bRm))
9111 {
9112 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9113 case 1: return IEMOP_RAISE_INVALID_OPCODE();
9114 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9115 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9116 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9117 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9118 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9119 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9120 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9121 }
9122 }
9123}
9124
9125
9126/** Opcode 0xda 11/0. */
9127FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9128{
9129 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9131
9132 IEM_MC_BEGIN(0, 1);
9133 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9134
9135 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9136 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9137
9138 IEM_MC_PREPARE_FPU_USAGE();
9139 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9140 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9141 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9142 } IEM_MC_ENDIF();
9143 IEM_MC_UPDATE_FPU_OPCODE_IP();
9144 } IEM_MC_ELSE() {
9145 IEM_MC_FPU_STACK_UNDERFLOW(0);
9146 } IEM_MC_ENDIF();
9147 IEM_MC_ADVANCE_RIP_AND_FINISH();
9148
9149 IEM_MC_END();
9150}
9151
9152
9153/** Opcode 0xda 11/1. */
9154FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9155{
9156 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9158
9159 IEM_MC_BEGIN(0, 1);
9160 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9161
9162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9164
9165 IEM_MC_PREPARE_FPU_USAGE();
9166 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9167 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9168 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9169 } IEM_MC_ENDIF();
9170 IEM_MC_UPDATE_FPU_OPCODE_IP();
9171 } IEM_MC_ELSE() {
9172 IEM_MC_FPU_STACK_UNDERFLOW(0);
9173 } IEM_MC_ENDIF();
9174 IEM_MC_ADVANCE_RIP_AND_FINISH();
9175
9176 IEM_MC_END();
9177}
9178
9179
9180/** Opcode 0xda 11/2. */
9181FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9182{
9183 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9185
9186 IEM_MC_BEGIN(0, 1);
9187 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9188
9189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9190 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9191
9192 IEM_MC_PREPARE_FPU_USAGE();
9193 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9194 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9195 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9196 } IEM_MC_ENDIF();
9197 IEM_MC_UPDATE_FPU_OPCODE_IP();
9198 } IEM_MC_ELSE() {
9199 IEM_MC_FPU_STACK_UNDERFLOW(0);
9200 } IEM_MC_ENDIF();
9201 IEM_MC_ADVANCE_RIP_AND_FINISH();
9202
9203 IEM_MC_END();
9204}
9205
9206
9207/** Opcode 0xda 11/3. */
9208FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9209{
9210 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9212
9213 IEM_MC_BEGIN(0, 1);
9214 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9215
9216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9218
9219 IEM_MC_PREPARE_FPU_USAGE();
9220 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9221 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9222 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9223 } IEM_MC_ENDIF();
9224 IEM_MC_UPDATE_FPU_OPCODE_IP();
9225 } IEM_MC_ELSE() {
9226 IEM_MC_FPU_STACK_UNDERFLOW(0);
9227 } IEM_MC_ENDIF();
9228 IEM_MC_ADVANCE_RIP_AND_FINISH();
9229
9230 IEM_MC_END();
9231}
9232
9233
9234/**
9235 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9236 * flags, and popping twice when done.
9237 *
9238 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9239 */
9240FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9241{
9242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9243
9244 IEM_MC_BEGIN(3, 1);
9245 IEM_MC_LOCAL(uint16_t, u16Fsw);
9246 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9247 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9248 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9249
9250 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9251 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9252
9253 IEM_MC_PREPARE_FPU_USAGE();
9254 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9255 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9256 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
9257 } IEM_MC_ELSE() {
9258 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
9259 } IEM_MC_ENDIF();
9260 IEM_MC_ADVANCE_RIP_AND_FINISH();
9261
9262 IEM_MC_END();
9263}
9264
9265
9266/** Opcode 0xda 0xe9. */
9267FNIEMOP_DEF(iemOp_fucompp)
9268{
9269 IEMOP_MNEMONIC(fucompp, "fucompp");
9270 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9271}
9272
9273
9274/**
9275 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9276 * the result in ST0.
9277 *
9278 * @param bRm Mod R/M byte.
9279 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9280 */
9281FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9282{
9283 IEM_MC_BEGIN(3, 3);
9284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9285 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9286 IEM_MC_LOCAL(int32_t, i32Val2);
9287 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9288 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9289 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9290
9291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9293
9294 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9295 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9296 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9297
9298 IEM_MC_PREPARE_FPU_USAGE();
9299 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9300 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9301 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9302 } IEM_MC_ELSE() {
9303 IEM_MC_FPU_STACK_UNDERFLOW(0);
9304 } IEM_MC_ENDIF();
9305 IEM_MC_ADVANCE_RIP_AND_FINISH();
9306
9307 IEM_MC_END();
9308}
9309
9310
9311/** Opcode 0xda !11/0. */
9312FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9313{
9314 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9315 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9316}
9317
9318
9319/** Opcode 0xda !11/1. */
9320FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
9321{
9322 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
9323 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
9324}
9325
9326
9327/** Opcode 0xda !11/2. */
9328FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
9329{
9330 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
9331
9332 IEM_MC_BEGIN(3, 3);
9333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9334 IEM_MC_LOCAL(uint16_t, u16Fsw);
9335 IEM_MC_LOCAL(int32_t, i32Val2);
9336 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9337 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9338 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9339
9340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9342
9343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9345 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9346
9347 IEM_MC_PREPARE_FPU_USAGE();
9348 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9349 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9350 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9351 } IEM_MC_ELSE() {
9352 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9353 } IEM_MC_ENDIF();
9354 IEM_MC_ADVANCE_RIP_AND_FINISH();
9355
9356 IEM_MC_END();
9357}
9358
9359
9360/** Opcode 0xda !11/3. */
9361FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
9362{
9363 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
9364
9365 IEM_MC_BEGIN(3, 3);
9366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9367 IEM_MC_LOCAL(uint16_t, u16Fsw);
9368 IEM_MC_LOCAL(int32_t, i32Val2);
9369 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9370 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9371 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9372
9373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9375
9376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9377 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9378 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9379
9380 IEM_MC_PREPARE_FPU_USAGE();
9381 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9382 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9383 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9384 } IEM_MC_ELSE() {
9385 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9386 } IEM_MC_ENDIF();
9387 IEM_MC_ADVANCE_RIP_AND_FINISH();
9388
9389 IEM_MC_END();
9390}
9391
9392
9393/** Opcode 0xda !11/4. */
9394FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
9395{
9396 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
9397 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
9398}
9399
9400
9401/** Opcode 0xda !11/5. */
9402FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
9403{
9404 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
9405 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
9406}
9407
9408
9409/** Opcode 0xda !11/6. */
9410FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
9411{
9412 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
9413 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
9414}
9415
9416
9417/** Opcode 0xda !11/7. */
9418FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
9419{
9420 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
9421 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
9422}
9423
9424
9425/**
9426 * @opcode 0xda
9427 */
9428FNIEMOP_DEF(iemOp_EscF2)
9429{
9430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9431 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
9432 if (IEM_IS_MODRM_REG_MODE(bRm))
9433 {
9434 switch (IEM_GET_MODRM_REG_8(bRm))
9435 {
9436 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
9437 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
9438 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
9439 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
9440 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9441 case 5:
9442 if (bRm == 0xe9)
9443 return FNIEMOP_CALL(iemOp_fucompp);
9444 return IEMOP_RAISE_INVALID_OPCODE();
9445 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9446 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9448 }
9449 }
9450 else
9451 {
9452 switch (IEM_GET_MODRM_REG_8(bRm))
9453 {
9454 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
9455 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
9456 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
9457 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
9458 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
9459 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
9460 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
9461 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
9462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9463 }
9464 }
9465}
9466
9467
9468/** Opcode 0xdb !11/0. */
9469FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
9470{
9471 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
9472
9473 IEM_MC_BEGIN(2, 3);
9474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9475 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9476 IEM_MC_LOCAL(int32_t, i32Val);
9477 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9478 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
9479
9480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9482
9483 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9484 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9485 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9486
9487 IEM_MC_PREPARE_FPU_USAGE();
9488 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9489 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
9490 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9491 } IEM_MC_ELSE() {
9492 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9493 } IEM_MC_ENDIF();
9494 IEM_MC_ADVANCE_RIP_AND_FINISH();
9495
9496 IEM_MC_END();
9497}
9498
9499
9500/** Opcode 0xdb !11/1. */
9501FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
9502{
9503 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
9504 IEM_MC_BEGIN(3, 2);
9505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9506 IEM_MC_LOCAL(uint16_t, u16Fsw);
9507 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9508 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9509 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9510
9511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9514 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9515
9516 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9517 IEM_MC_PREPARE_FPU_USAGE();
9518 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9519 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9520 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9521 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9522 } IEM_MC_ELSE() {
9523 IEM_MC_IF_FCW_IM() {
9524 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9525 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9526 } IEM_MC_ENDIF();
9527 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9528 } IEM_MC_ENDIF();
9529 IEM_MC_ADVANCE_RIP_AND_FINISH();
9530
9531 IEM_MC_END();
9532}
9533
9534
9535/** Opcode 0xdb !11/2. */
9536FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
9537{
9538 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
9539 IEM_MC_BEGIN(3, 2);
9540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9541 IEM_MC_LOCAL(uint16_t, u16Fsw);
9542 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9543 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9544 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9545
9546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9548 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9549 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9550
9551 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9552 IEM_MC_PREPARE_FPU_USAGE();
9553 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9554 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9555 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9556 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9557 } IEM_MC_ELSE() {
9558 IEM_MC_IF_FCW_IM() {
9559 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9560 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9561 } IEM_MC_ENDIF();
9562 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9563 } IEM_MC_ENDIF();
9564 IEM_MC_ADVANCE_RIP_AND_FINISH();
9565
9566 IEM_MC_END();
9567}
9568
9569
9570/** Opcode 0xdb !11/3. */
9571FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
9572{
9573 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
9574 IEM_MC_BEGIN(3, 2);
9575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9576 IEM_MC_LOCAL(uint16_t, u16Fsw);
9577 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9578 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9580
9581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9585
9586 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9587 IEM_MC_PREPARE_FPU_USAGE();
9588 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9589 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9590 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9591 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9592 } IEM_MC_ELSE() {
9593 IEM_MC_IF_FCW_IM() {
9594 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9595 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9596 } IEM_MC_ENDIF();
9597 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9598 } IEM_MC_ENDIF();
9599 IEM_MC_ADVANCE_RIP_AND_FINISH();
9600
9601 IEM_MC_END();
9602}
9603
9604
9605/** Opcode 0xdb !11/5. */
9606FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
9607{
9608 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
9609
9610 IEM_MC_BEGIN(2, 3);
9611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9612 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9613 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
9614 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9615 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
9616
9617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9619
9620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9622 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9623
9624 IEM_MC_PREPARE_FPU_USAGE();
9625 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9626 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
9627 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9628 } IEM_MC_ELSE() {
9629 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9630 } IEM_MC_ENDIF();
9631 IEM_MC_ADVANCE_RIP_AND_FINISH();
9632
9633 IEM_MC_END();
9634}
9635
9636
9637/** Opcode 0xdb !11/7. */
9638FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
9639{
9640 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
9641 IEM_MC_BEGIN(3, 2);
9642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9643 IEM_MC_LOCAL(uint16_t, u16Fsw);
9644 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9645 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
9646 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9647
9648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9650 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9651 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9652
9653 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
9654 IEM_MC_PREPARE_FPU_USAGE();
9655 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9656 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
9657 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
9658 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9659 } IEM_MC_ELSE() {
9660 IEM_MC_IF_FCW_IM() {
9661 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
9662 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
9663 } IEM_MC_ENDIF();
9664 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9665 } IEM_MC_ENDIF();
9666 IEM_MC_ADVANCE_RIP_AND_FINISH();
9667
9668 IEM_MC_END();
9669}
9670
9671
9672/** Opcode 0xdb 11/0. */
9673FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
9674{
9675 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
9676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9677
9678 IEM_MC_BEGIN(0, 1);
9679 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9680
9681 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9682 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9683
9684 IEM_MC_PREPARE_FPU_USAGE();
9685 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9686 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
9687 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9688 } IEM_MC_ENDIF();
9689 IEM_MC_UPDATE_FPU_OPCODE_IP();
9690 } IEM_MC_ELSE() {
9691 IEM_MC_FPU_STACK_UNDERFLOW(0);
9692 } IEM_MC_ENDIF();
9693 IEM_MC_ADVANCE_RIP_AND_FINISH();
9694
9695 IEM_MC_END();
9696}
9697
9698
9699/** Opcode 0xdb 11/1. */
9700FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
9701{
9702 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
9703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9704
9705 IEM_MC_BEGIN(0, 1);
9706 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9707
9708 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9709 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9710
9711 IEM_MC_PREPARE_FPU_USAGE();
9712 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9713 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9714 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9715 } IEM_MC_ENDIF();
9716 IEM_MC_UPDATE_FPU_OPCODE_IP();
9717 } IEM_MC_ELSE() {
9718 IEM_MC_FPU_STACK_UNDERFLOW(0);
9719 } IEM_MC_ENDIF();
9720 IEM_MC_ADVANCE_RIP_AND_FINISH();
9721
9722 IEM_MC_END();
9723}
9724
9725
9726/** Opcode 0xdb 11/2. */
9727FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
9728{
9729 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
9730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9731
9732 IEM_MC_BEGIN(0, 1);
9733 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9734
9735 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9736 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9737
9738 IEM_MC_PREPARE_FPU_USAGE();
9739 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9740 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9741 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9742 } IEM_MC_ENDIF();
9743 IEM_MC_UPDATE_FPU_OPCODE_IP();
9744 } IEM_MC_ELSE() {
9745 IEM_MC_FPU_STACK_UNDERFLOW(0);
9746 } IEM_MC_ENDIF();
9747 IEM_MC_ADVANCE_RIP_AND_FINISH();
9748
9749 IEM_MC_END();
9750}
9751
9752
9753/** Opcode 0xdb 11/3. */
9754FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
9755{
9756 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
9757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9758
9759 IEM_MC_BEGIN(0, 1);
9760 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9761
9762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9763 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9764
9765 IEM_MC_PREPARE_FPU_USAGE();
9766 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9767 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
9768 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9769 } IEM_MC_ENDIF();
9770 IEM_MC_UPDATE_FPU_OPCODE_IP();
9771 } IEM_MC_ELSE() {
9772 IEM_MC_FPU_STACK_UNDERFLOW(0);
9773 } IEM_MC_ENDIF();
9774 IEM_MC_ADVANCE_RIP_AND_FINISH();
9775
9776 IEM_MC_END();
9777}
9778
9779
9780/** Opcode 0xdb 0xe0. */
9781FNIEMOP_DEF(iemOp_fneni)
9782{
9783 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
9784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9785 IEM_MC_BEGIN(0,0);
9786 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9787 IEM_MC_ADVANCE_RIP_AND_FINISH();
9788 IEM_MC_END();
9789}
9790
9791
9792/** Opcode 0xdb 0xe1. */
9793FNIEMOP_DEF(iemOp_fndisi)
9794{
9795 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
9796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9797 IEM_MC_BEGIN(0,0);
9798 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9799 IEM_MC_ADVANCE_RIP_AND_FINISH();
9800 IEM_MC_END();
9801}
9802
9803
9804/** Opcode 0xdb 0xe2. */
9805FNIEMOP_DEF(iemOp_fnclex)
9806{
9807 IEMOP_MNEMONIC(fnclex, "fnclex");
9808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9809
9810 IEM_MC_BEGIN(0,0);
9811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9812 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9813 IEM_MC_CLEAR_FSW_EX();
9814 IEM_MC_ADVANCE_RIP_AND_FINISH();
9815 IEM_MC_END();
9816}
9817
9818
9819/** Opcode 0xdb 0xe3. */
9820FNIEMOP_DEF(iemOp_fninit)
9821{
9822 IEMOP_MNEMONIC(fninit, "fninit");
9823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9824 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
9825}
9826
9827
9828/** Opcode 0xdb 0xe4. */
9829FNIEMOP_DEF(iemOp_fnsetpm)
9830{
9831 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
9832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9833 IEM_MC_BEGIN(0,0);
9834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9835 IEM_MC_ADVANCE_RIP_AND_FINISH();
9836 IEM_MC_END();
9837}
9838
9839
9840/** Opcode 0xdb 0xe5. */
9841FNIEMOP_DEF(iemOp_frstpm)
9842{
9843 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9844#if 0 /* #UDs on newer CPUs */
9845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9846 IEM_MC_BEGIN(0,0);
9847 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9848 IEM_MC_ADVANCE_RIP_AND_FINISH();
9849 IEM_MC_END();
9850 return VINF_SUCCESS;
9851#else
9852 return IEMOP_RAISE_INVALID_OPCODE();
9853#endif
9854}
9855
9856
9857/** Opcode 0xdb 11/5. */
9858FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9859{
9860 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9861 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
9862}
9863
9864
9865/** Opcode 0xdb 11/6. */
9866FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9867{
9868 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9869 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
9870}
9871
9872
9873/**
9874 * @opcode 0xdb
9875 */
9876FNIEMOP_DEF(iemOp_EscF3)
9877{
9878 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9879 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9880 if (IEM_IS_MODRM_REG_MODE(bRm))
9881 {
9882 switch (IEM_GET_MODRM_REG_8(bRm))
9883 {
9884 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9885 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9886 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9887 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9888 case 4:
9889 switch (bRm)
9890 {
9891 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9892 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9893 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9894 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9895 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9896 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9897 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
9898 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
9899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9900 }
9901 break;
9902 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9903 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9904 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9906 }
9907 }
9908 else
9909 {
9910 switch (IEM_GET_MODRM_REG_8(bRm))
9911 {
9912 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9913 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9914 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9915 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9916 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9917 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9918 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9919 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9921 }
9922 }
9923}
9924
9925
9926/**
9927 * Common worker for FPU instructions working on STn and ST0, and storing the
9928 * result in STn unless IE, DE or ZE was raised.
9929 *
9930 * @param bRm Mod R/M byte.
9931 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9932 */
9933FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9934{
9935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9936
9937 IEM_MC_BEGIN(3, 1);
9938 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9939 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9940 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9941 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9942
9943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9944 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9945
9946 IEM_MC_PREPARE_FPU_USAGE();
9947 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9948 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9949 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9950 } IEM_MC_ELSE() {
9951 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9952 } IEM_MC_ENDIF();
9953 IEM_MC_ADVANCE_RIP_AND_FINISH();
9954
9955 IEM_MC_END();
9956}
9957
9958
9959/** Opcode 0xdc 11/0. */
9960FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9961{
9962 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9963 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9964}
9965
9966
9967/** Opcode 0xdc 11/1. */
9968FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9969{
9970 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9971 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9972}
9973
9974
9975/** Opcode 0xdc 11/4. */
9976FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9977{
9978 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9979 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9980}
9981
9982
9983/** Opcode 0xdc 11/5. */
9984FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9985{
9986 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9987 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9988}
9989
9990
9991/** Opcode 0xdc 11/6. */
9992FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9993{
9994 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9995 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9996}
9997
9998
9999/** Opcode 0xdc 11/7. */
10000FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10001{
10002 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10003 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10004}
10005
10006
10007/**
10008 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10009 * memory operand, and storing the result in ST0.
10010 *
10011 * @param bRm Mod R/M byte.
10012 * @param pfnImpl Pointer to the instruction implementation (assembly).
10013 */
10014FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10015{
10016 IEM_MC_BEGIN(3, 3);
10017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10018 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10019 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10020 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10021 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10022 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10023
10024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10026 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10027 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10028
10029 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10030 IEM_MC_PREPARE_FPU_USAGE();
10031 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10032 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10033 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10034 } IEM_MC_ELSE() {
10035 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10036 } IEM_MC_ENDIF();
10037 IEM_MC_ADVANCE_RIP_AND_FINISH();
10038
10039 IEM_MC_END();
10040}
10041
10042
10043/** Opcode 0xdc !11/0. */
10044FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10045{
10046 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10047 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10048}
10049
10050
10051/** Opcode 0xdc !11/1. */
10052FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10053{
10054 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10055 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10056}
10057
10058
10059/** Opcode 0xdc !11/2. */
10060FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10061{
10062 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10063
10064 IEM_MC_BEGIN(3, 3);
10065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10066 IEM_MC_LOCAL(uint16_t, u16Fsw);
10067 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10068 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10069 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10070 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10071
10072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10074
10075 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10076 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10077 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10078
10079 IEM_MC_PREPARE_FPU_USAGE();
10080 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10081 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10082 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10083 } IEM_MC_ELSE() {
10084 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10085 } IEM_MC_ENDIF();
10086 IEM_MC_ADVANCE_RIP_AND_FINISH();
10087
10088 IEM_MC_END();
10089}
10090
10091
10092/** Opcode 0xdc !11/3. */
10093FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10094{
10095 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10096
10097 IEM_MC_BEGIN(3, 3);
10098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10099 IEM_MC_LOCAL(uint16_t, u16Fsw);
10100 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10101 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10102 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10103 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10104
10105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10107
10108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10109 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10110 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10111
10112 IEM_MC_PREPARE_FPU_USAGE();
10113 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10114 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10115 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10116 } IEM_MC_ELSE() {
10117 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10118 } IEM_MC_ENDIF();
10119 IEM_MC_ADVANCE_RIP_AND_FINISH();
10120
10121 IEM_MC_END();
10122}
10123
10124
10125/** Opcode 0xdc !11/4. */
10126FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10127{
10128 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10129 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10130}
10131
10132
10133/** Opcode 0xdc !11/5. */
10134FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10135{
10136 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10137 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10138}
10139
10140
10141/** Opcode 0xdc !11/6. */
10142FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10143{
10144 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10145 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10146}
10147
10148
10149/** Opcode 0xdc !11/7. */
10150FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10151{
10152 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10153 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10154}
10155
10156
10157/**
10158 * @opcode 0xdc
10159 */
10160FNIEMOP_DEF(iemOp_EscF4)
10161{
10162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10163 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10164 if (IEM_IS_MODRM_REG_MODE(bRm))
10165 {
10166 switch (IEM_GET_MODRM_REG_8(bRm))
10167 {
10168 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10169 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10170 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10171 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10172 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10173 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10174 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10175 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10177 }
10178 }
10179 else
10180 {
10181 switch (IEM_GET_MODRM_REG_8(bRm))
10182 {
10183 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10184 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10185 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10186 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10187 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10188 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10189 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10190 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10192 }
10193 }
10194}
10195
10196
10197/** Opcode 0xdd !11/0.
10198 * @sa iemOp_fld_m32r */
10199FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10200{
10201 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10202
10203 IEM_MC_BEGIN(2, 3);
10204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10205 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10206 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10207 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10208 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10209
10210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10212 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10213 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10214
10215 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10216 IEM_MC_PREPARE_FPU_USAGE();
10217 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10218 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10219 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10220 } IEM_MC_ELSE() {
10221 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10222 } IEM_MC_ENDIF();
10223 IEM_MC_ADVANCE_RIP_AND_FINISH();
10224
10225 IEM_MC_END();
10226}
10227
10228
10229/** Opcode 0xdd !11/0. */
10230FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10231{
10232 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10233 IEM_MC_BEGIN(3, 2);
10234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10235 IEM_MC_LOCAL(uint16_t, u16Fsw);
10236 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10237 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10238 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10239
10240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10244
10245 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10246 IEM_MC_PREPARE_FPU_USAGE();
10247 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10248 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10249 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10250 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10251 } IEM_MC_ELSE() {
10252 IEM_MC_IF_FCW_IM() {
10253 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10254 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10255 } IEM_MC_ENDIF();
10256 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10257 } IEM_MC_ENDIF();
10258 IEM_MC_ADVANCE_RIP_AND_FINISH();
10259
10260 IEM_MC_END();
10261}
10262
10263
10264/** Opcode 0xdd !11/0. */
10265FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10266{
10267 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10268 IEM_MC_BEGIN(3, 2);
10269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10270 IEM_MC_LOCAL(uint16_t, u16Fsw);
10271 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10272 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10273 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10274
10275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10277 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10278 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10279
10280 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10281 IEM_MC_PREPARE_FPU_USAGE();
10282 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10283 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10284 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10285 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10286 } IEM_MC_ELSE() {
10287 IEM_MC_IF_FCW_IM() {
10288 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10289 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10290 } IEM_MC_ENDIF();
10291 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10292 } IEM_MC_ENDIF();
10293 IEM_MC_ADVANCE_RIP_AND_FINISH();
10294
10295 IEM_MC_END();
10296}
10297
10298
10299
10300
10301/** Opcode 0xdd !11/0. */
10302FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10303{
10304 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10305 IEM_MC_BEGIN(3, 2);
10306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10307 IEM_MC_LOCAL(uint16_t, u16Fsw);
10308 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10309 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10310 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10311
10312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10316
10317 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10318 IEM_MC_PREPARE_FPU_USAGE();
10319 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10320 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10321 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10322 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10323 } IEM_MC_ELSE() {
10324 IEM_MC_IF_FCW_IM() {
10325 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10326 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10327 } IEM_MC_ENDIF();
10328 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10329 } IEM_MC_ENDIF();
10330 IEM_MC_ADVANCE_RIP_AND_FINISH();
10331
10332 IEM_MC_END();
10333}
10334
10335
10336/** Opcode 0xdd !11/0. */
10337FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
10338{
10339 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
10340 IEM_MC_BEGIN(3, 0);
10341 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10342 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10343 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10346 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10347 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10348 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10349 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10350 IEM_MC_END();
10351}
10352
10353
10354/** Opcode 0xdd !11/0. */
10355FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
10356{
10357 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
10358 IEM_MC_BEGIN(3, 0);
10359 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10360 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10361 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10365 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
10366 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10367 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
10368 IEM_MC_END();
10369}
10370
10371/** Opcode 0xdd !11/0. */
10372FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
10373{
10374 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
10375
10376 IEM_MC_BEGIN(0, 2);
10377 IEM_MC_LOCAL(uint16_t, u16Tmp);
10378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10379
10380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10383
10384 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10385 IEM_MC_FETCH_FSW(u16Tmp);
10386 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
10387 IEM_MC_ADVANCE_RIP_AND_FINISH();
10388
10389/** @todo Debug / drop a hint to the verifier that things may differ
10390 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
10391 * NT4SP1. (X86_FSW_PE) */
10392 IEM_MC_END();
10393}
10394
10395
10396/** Opcode 0xdd 11/0. */
10397FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
10398{
10399 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
10400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10401 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
10402 unmodified. */
10403
10404 IEM_MC_BEGIN(0, 0);
10405
10406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10407 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10408
10409 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10410 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10411 IEM_MC_UPDATE_FPU_OPCODE_IP();
10412
10413 IEM_MC_ADVANCE_RIP_AND_FINISH();
10414 IEM_MC_END();
10415}
10416
10417
10418/** Opcode 0xdd 11/1. */
10419FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
10420{
10421 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
10422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10423
10424 IEM_MC_BEGIN(0, 2);
10425 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10426 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10429
10430 IEM_MC_PREPARE_FPU_USAGE();
10431 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10432 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10433 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
10434 } IEM_MC_ELSE() {
10435 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
10436 } IEM_MC_ENDIF();
10437
10438 IEM_MC_ADVANCE_RIP_AND_FINISH();
10439 IEM_MC_END();
10440}
10441
10442
10443/** Opcode 0xdd 11/3. */
10444FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
10445{
10446 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
10447 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
10448}
10449
10450
10451/** Opcode 0xdd 11/4. */
10452FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
10453{
10454 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
10455 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
10456}
10457
10458
10459/**
10460 * @opcode 0xdd
10461 */
10462FNIEMOP_DEF(iemOp_EscF5)
10463{
10464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10465 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
10466 if (IEM_IS_MODRM_REG_MODE(bRm))
10467 {
10468 switch (IEM_GET_MODRM_REG_8(bRm))
10469 {
10470 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
10471 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
10472 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
10473 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
10474 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
10475 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
10476 case 6: return IEMOP_RAISE_INVALID_OPCODE();
10477 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10479 }
10480 }
10481 else
10482 {
10483 switch (IEM_GET_MODRM_REG_8(bRm))
10484 {
10485 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
10486 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
10487 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
10488 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
10489 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
10490 case 5: return IEMOP_RAISE_INVALID_OPCODE();
10491 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
10492 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
10493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10494 }
10495 }
10496}
10497
10498
10499/** Opcode 0xde 11/0. */
10500FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
10501{
10502 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
10503 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
10504}
10505
10506
10507/** Opcode 0xde 11/0. */
10508FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
10509{
10510 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
10511 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
10512}
10513
10514
10515/** Opcode 0xde 0xd9. */
10516FNIEMOP_DEF(iemOp_fcompp)
10517{
10518 IEMOP_MNEMONIC(fcompp, "fcompp");
10519 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
10520}
10521
10522
10523/** Opcode 0xde 11/4. */
10524FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
10525{
10526 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
10527 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
10528}
10529
10530
10531/** Opcode 0xde 11/5. */
10532FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
10533{
10534 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
10535 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
10536}
10537
10538
10539/** Opcode 0xde 11/6. */
10540FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
10541{
10542 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
10543 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
10544}
10545
10546
10547/** Opcode 0xde 11/7. */
10548FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
10549{
10550 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
10551 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
10552}
10553
10554
10555/**
10556 * Common worker for FPU instructions working on ST0 and an m16i, and storing
10557 * the result in ST0.
10558 *
10559 * @param bRm Mod R/M byte.
10560 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10561 */
10562FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
10563{
10564 IEM_MC_BEGIN(3, 3);
10565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10566 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10567 IEM_MC_LOCAL(int16_t, i16Val2);
10568 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10570 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10571
10572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10574
10575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10577 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10578
10579 IEM_MC_PREPARE_FPU_USAGE();
10580 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10581 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
10582 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10583 } IEM_MC_ELSE() {
10584 IEM_MC_FPU_STACK_UNDERFLOW(0);
10585 } IEM_MC_ENDIF();
10586 IEM_MC_ADVANCE_RIP_AND_FINISH();
10587
10588 IEM_MC_END();
10589}
10590
10591
10592/** Opcode 0xde !11/0. */
10593FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
10594{
10595 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
10596 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
10597}
10598
10599
10600/** Opcode 0xde !11/1. */
10601FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
10602{
10603 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
10604 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
10605}
10606
10607
10608/** Opcode 0xde !11/2. */
10609FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
10610{
10611 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
10612
10613 IEM_MC_BEGIN(3, 3);
10614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10615 IEM_MC_LOCAL(uint16_t, u16Fsw);
10616 IEM_MC_LOCAL(int16_t, i16Val2);
10617 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10618 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10619 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10620
10621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10623
10624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10625 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10626 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10627
10628 IEM_MC_PREPARE_FPU_USAGE();
10629 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10630 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10631 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10632 } IEM_MC_ELSE() {
10633 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10634 } IEM_MC_ENDIF();
10635 IEM_MC_ADVANCE_RIP_AND_FINISH();
10636
10637 IEM_MC_END();
10638}
10639
10640
10641/** Opcode 0xde !11/3. */
10642FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
10643{
10644 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
10645
10646 IEM_MC_BEGIN(3, 3);
10647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10648 IEM_MC_LOCAL(uint16_t, u16Fsw);
10649 IEM_MC_LOCAL(int16_t, i16Val2);
10650 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10651 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10652 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10653
10654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10656
10657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10659 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10660
10661 IEM_MC_PREPARE_FPU_USAGE();
10662 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10663 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10664 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10665 } IEM_MC_ELSE() {
10666 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10667 } IEM_MC_ENDIF();
10668 IEM_MC_ADVANCE_RIP_AND_FINISH();
10669
10670 IEM_MC_END();
10671}
10672
10673
10674/** Opcode 0xde !11/4. */
10675FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
10676{
10677 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
10678 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
10679}
10680
10681
10682/** Opcode 0xde !11/5. */
10683FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
10684{
10685 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
10686 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
10687}
10688
10689
10690/** Opcode 0xde !11/6. */
10691FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
10692{
10693 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
10694 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
10695}
10696
10697
10698/** Opcode 0xde !11/7. */
10699FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
10700{
10701 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
10702 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
10703}
10704
10705
10706/**
10707 * @opcode 0xde
10708 */
10709FNIEMOP_DEF(iemOp_EscF6)
10710{
10711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10712 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
10713 if (IEM_IS_MODRM_REG_MODE(bRm))
10714 {
10715 switch (IEM_GET_MODRM_REG_8(bRm))
10716 {
10717 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
10718 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
10719 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10720 case 3: if (bRm == 0xd9)
10721 return FNIEMOP_CALL(iemOp_fcompp);
10722 return IEMOP_RAISE_INVALID_OPCODE();
10723 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
10724 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
10725 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
10726 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
10727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10728 }
10729 }
10730 else
10731 {
10732 switch (IEM_GET_MODRM_REG_8(bRm))
10733 {
10734 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
10735 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
10736 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
10737 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
10738 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
10739 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
10740 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
10741 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
10742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10743 }
10744 }
10745}
10746
10747
10748/** Opcode 0xdf 11/0.
10749 * Undocument instruction, assumed to work like ffree + fincstp. */
10750FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
10751{
10752 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
10753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10754
10755 IEM_MC_BEGIN(0, 0);
10756
10757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10759
10760 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10761 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10762 IEM_MC_FPU_STACK_INC_TOP();
10763 IEM_MC_UPDATE_FPU_OPCODE_IP();
10764
10765 IEM_MC_ADVANCE_RIP_AND_FINISH();
10766 IEM_MC_END();
10767}
10768
10769
10770/** Opcode 0xdf 0xe0. */
10771FNIEMOP_DEF(iemOp_fnstsw_ax)
10772{
10773 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
10774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10775
10776 IEM_MC_BEGIN(0, 1);
10777 IEM_MC_LOCAL(uint16_t, u16Tmp);
10778 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10779 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10780 IEM_MC_FETCH_FSW(u16Tmp);
10781 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10782 IEM_MC_ADVANCE_RIP_AND_FINISH();
10783 IEM_MC_END();
10784}
10785
10786
10787/** Opcode 0xdf 11/5. */
10788FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
10789{
10790 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
10791 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
10792}
10793
10794
10795/** Opcode 0xdf 11/6. */
10796FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
10797{
10798 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
10799 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
10800}
10801
10802
10803/** Opcode 0xdf !11/0. */
10804FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
10805{
10806 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
10807
10808 IEM_MC_BEGIN(2, 3);
10809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10810 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10811 IEM_MC_LOCAL(int16_t, i16Val);
10812 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10813 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
10814
10815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10817
10818 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10819 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10820 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10821
10822 IEM_MC_PREPARE_FPU_USAGE();
10823 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10824 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10825 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10826 } IEM_MC_ELSE() {
10827 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10828 } IEM_MC_ENDIF();
10829 IEM_MC_ADVANCE_RIP_AND_FINISH();
10830
10831 IEM_MC_END();
10832}
10833
10834
10835/** Opcode 0xdf !11/1. */
10836FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10837{
10838 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10839 IEM_MC_BEGIN(3, 2);
10840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10841 IEM_MC_LOCAL(uint16_t, u16Fsw);
10842 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10843 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10844 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10845
10846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10850
10851 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10852 IEM_MC_PREPARE_FPU_USAGE();
10853 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10854 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10855 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10856 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10857 } IEM_MC_ELSE() {
10858 IEM_MC_IF_FCW_IM() {
10859 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10860 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10861 } IEM_MC_ENDIF();
10862 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10863 } IEM_MC_ENDIF();
10864 IEM_MC_ADVANCE_RIP_AND_FINISH();
10865
10866 IEM_MC_END();
10867}
10868
10869
10870/** Opcode 0xdf !11/2. */
10871FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10872{
10873 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10874 IEM_MC_BEGIN(3, 2);
10875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10876 IEM_MC_LOCAL(uint16_t, u16Fsw);
10877 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10878 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10879 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10880
10881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10883 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10884 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10885
10886 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10887 IEM_MC_PREPARE_FPU_USAGE();
10888 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10889 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10890 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10891 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10892 } IEM_MC_ELSE() {
10893 IEM_MC_IF_FCW_IM() {
10894 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10895 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10896 } IEM_MC_ENDIF();
10897 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10898 } IEM_MC_ENDIF();
10899 IEM_MC_ADVANCE_RIP_AND_FINISH();
10900
10901 IEM_MC_END();
10902}
10903
10904
10905/** Opcode 0xdf !11/3. */
10906FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10907{
10908 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10909 IEM_MC_BEGIN(3, 2);
10910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10911 IEM_MC_LOCAL(uint16_t, u16Fsw);
10912 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10913 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10914 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10915
10916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10918 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10919 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10920
10921 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10922 IEM_MC_PREPARE_FPU_USAGE();
10923 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10924 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10925 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10926 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10927 } IEM_MC_ELSE() {
10928 IEM_MC_IF_FCW_IM() {
10929 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10930 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10931 } IEM_MC_ENDIF();
10932 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10933 } IEM_MC_ENDIF();
10934 IEM_MC_ADVANCE_RIP_AND_FINISH();
10935
10936 IEM_MC_END();
10937}
10938
10939
10940/** Opcode 0xdf !11/4. */
10941FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10942{
10943 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10944
10945 IEM_MC_BEGIN(2, 3);
10946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10947 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10948 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10949 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10950 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10951
10952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10954
10955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10957 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10958
10959 IEM_MC_PREPARE_FPU_USAGE();
10960 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10961 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10962 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10963 } IEM_MC_ELSE() {
10964 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10965 } IEM_MC_ENDIF();
10966 IEM_MC_ADVANCE_RIP_AND_FINISH();
10967
10968 IEM_MC_END();
10969}
10970
10971
10972/** Opcode 0xdf !11/5. */
10973FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10974{
10975 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10976
10977 IEM_MC_BEGIN(2, 3);
10978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10979 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10980 IEM_MC_LOCAL(int64_t, i64Val);
10981 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10982 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10983
10984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10986
10987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10988 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10989 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10990
10991 IEM_MC_PREPARE_FPU_USAGE();
10992 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10993 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
10994 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10995 } IEM_MC_ELSE() {
10996 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10997 } IEM_MC_ENDIF();
10998 IEM_MC_ADVANCE_RIP_AND_FINISH();
10999
11000 IEM_MC_END();
11001}
11002
11003
11004/** Opcode 0xdf !11/6. */
11005FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11006{
11007 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11008 IEM_MC_BEGIN(3, 2);
11009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11010 IEM_MC_LOCAL(uint16_t, u16Fsw);
11011 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11012 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11013 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11014
11015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11019
11020 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11021 IEM_MC_PREPARE_FPU_USAGE();
11022 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11023 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11024 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11025 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11026 } IEM_MC_ELSE() {
11027 IEM_MC_IF_FCW_IM() {
11028 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11029 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11030 } IEM_MC_ENDIF();
11031 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11032 } IEM_MC_ENDIF();
11033 IEM_MC_ADVANCE_RIP_AND_FINISH();
11034
11035 IEM_MC_END();
11036}
11037
11038
11039/** Opcode 0xdf !11/7. */
11040FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11041{
11042 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11043 IEM_MC_BEGIN(3, 2);
11044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11045 IEM_MC_LOCAL(uint16_t, u16Fsw);
11046 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11047 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11048 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11049
11050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11053 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11054
11055 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11056 IEM_MC_PREPARE_FPU_USAGE();
11057 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11058 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11059 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11060 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11061 } IEM_MC_ELSE() {
11062 IEM_MC_IF_FCW_IM() {
11063 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11064 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11065 } IEM_MC_ENDIF();
11066 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11067 } IEM_MC_ENDIF();
11068 IEM_MC_ADVANCE_RIP_AND_FINISH();
11069
11070 IEM_MC_END();
11071}
11072
11073
11074/**
11075 * @opcode 0xdf
11076 */
11077FNIEMOP_DEF(iemOp_EscF7)
11078{
11079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11080 if (IEM_IS_MODRM_REG_MODE(bRm))
11081 {
11082 switch (IEM_GET_MODRM_REG_8(bRm))
11083 {
11084 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11085 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11086 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11087 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11088 case 4: if (bRm == 0xe0)
11089 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11090 return IEMOP_RAISE_INVALID_OPCODE();
11091 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11092 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11093 case 7: return IEMOP_RAISE_INVALID_OPCODE();
11094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11095 }
11096 }
11097 else
11098 {
11099 switch (IEM_GET_MODRM_REG_8(bRm))
11100 {
11101 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11102 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11103 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11104 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11105 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11106 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11107 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11108 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11110 }
11111 }
11112}
11113
11114
11115/**
11116 * @opcode 0xe0
11117 */
11118FNIEMOP_DEF(iemOp_loopne_Jb)
11119{
11120 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11121 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11123 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11124
11125 switch (pVCpu->iem.s.enmEffAddrMode)
11126 {
11127 case IEMMODE_16BIT:
11128 IEM_MC_BEGIN(0,0);
11129 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11130 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11131 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11132 } IEM_MC_ELSE() {
11133 IEM_MC_ADVANCE_RIP_AND_FINISH();
11134 } IEM_MC_ENDIF();
11135 IEM_MC_END();
11136 break;
11137
11138 case IEMMODE_32BIT:
11139 IEM_MC_BEGIN(0,0);
11140 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11141 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11142 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11143 } IEM_MC_ELSE() {
11144 IEM_MC_ADVANCE_RIP_AND_FINISH();
11145 } IEM_MC_ENDIF();
11146 IEM_MC_END();
11147 break;
11148
11149 case IEMMODE_64BIT:
11150 IEM_MC_BEGIN(0,0);
11151 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11152 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11153 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11154 } IEM_MC_ELSE() {
11155 IEM_MC_ADVANCE_RIP_AND_FINISH();
11156 } IEM_MC_ENDIF();
11157 IEM_MC_END();
11158 break;
11159
11160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11161 }
11162}
11163
11164
11165/**
11166 * @opcode 0xe1
11167 */
11168FNIEMOP_DEF(iemOp_loope_Jb)
11169{
11170 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11171 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11174
11175 switch (pVCpu->iem.s.enmEffAddrMode)
11176 {
11177 case IEMMODE_16BIT:
11178 IEM_MC_BEGIN(0,0);
11179 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11180 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11181 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11182 } IEM_MC_ELSE() {
11183 IEM_MC_ADVANCE_RIP_AND_FINISH();
11184 } IEM_MC_ENDIF();
11185 IEM_MC_END();
11186 break;
11187
11188 case IEMMODE_32BIT:
11189 IEM_MC_BEGIN(0,0);
11190 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11191 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11192 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11193 } IEM_MC_ELSE() {
11194 IEM_MC_ADVANCE_RIP_AND_FINISH();
11195 } IEM_MC_ENDIF();
11196 IEM_MC_END();
11197 break;
11198
11199 case IEMMODE_64BIT:
11200 IEM_MC_BEGIN(0,0);
11201 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11202 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11203 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11204 } IEM_MC_ELSE() {
11205 IEM_MC_ADVANCE_RIP_AND_FINISH();
11206 } IEM_MC_ENDIF();
11207 IEM_MC_END();
11208 break;
11209
11210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11211 }
11212}
11213
11214
11215/**
11216 * @opcode 0xe2
11217 */
11218FNIEMOP_DEF(iemOp_loop_Jb)
11219{
11220 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11221 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11223 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11224
11225 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11226 * using the 32-bit operand size override. How can that be restarted? See
11227 * weird pseudo code in intel manual. */
11228
11229 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11230 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11231 * the loop causes guest crashes, but when logging it's nice to skip a few million
11232 * lines of useless output. */
11233#if defined(LOG_ENABLED)
11234 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11235 switch (pVCpu->iem.s.enmEffAddrMode)
11236 {
11237 case IEMMODE_16BIT:
11238 IEM_MC_BEGIN(0,0);
11239 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11240 IEM_MC_ADVANCE_RIP_AND_FINISH();
11241 IEM_MC_END();
11242 break;
11243
11244 case IEMMODE_32BIT:
11245 IEM_MC_BEGIN(0,0);
11246 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11247 IEM_MC_ADVANCE_RIP_AND_FINISH();
11248 IEM_MC_END();
11249 break;
11250
11251 case IEMMODE_64BIT:
11252 IEM_MC_BEGIN(0,0);
11253 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11254 IEM_MC_ADVANCE_RIP_AND_FINISH();
11255 IEM_MC_END();
11256 break;
11257
11258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11259 }
11260#endif
11261
11262 switch (pVCpu->iem.s.enmEffAddrMode)
11263 {
11264 case IEMMODE_16BIT:
11265 IEM_MC_BEGIN(0,0);
11266
11267 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11268 IEM_MC_IF_CX_IS_NZ() {
11269 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11270 } IEM_MC_ELSE() {
11271 IEM_MC_ADVANCE_RIP_AND_FINISH();
11272 } IEM_MC_ENDIF();
11273 IEM_MC_END();
11274 break;
11275
11276 case IEMMODE_32BIT:
11277 IEM_MC_BEGIN(0,0);
11278 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11279 IEM_MC_IF_ECX_IS_NZ() {
11280 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11281 } IEM_MC_ELSE() {
11282 IEM_MC_ADVANCE_RIP_AND_FINISH();
11283 } IEM_MC_ENDIF();
11284 IEM_MC_END();
11285 break;
11286
11287 case IEMMODE_64BIT:
11288 IEM_MC_BEGIN(0,0);
11289 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11290 IEM_MC_IF_RCX_IS_NZ() {
11291 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11292 } IEM_MC_ELSE() {
11293 IEM_MC_ADVANCE_RIP_AND_FINISH();
11294 } IEM_MC_ENDIF();
11295 IEM_MC_END();
11296 break;
11297
11298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11299 }
11300}
11301
11302
11303/**
11304 * @opcode 0xe3
11305 */
11306FNIEMOP_DEF(iemOp_jecxz_Jb)
11307{
11308 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11309 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11311 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11312
11313 switch (pVCpu->iem.s.enmEffAddrMode)
11314 {
11315 case IEMMODE_16BIT:
11316 IEM_MC_BEGIN(0,0);
11317 IEM_MC_IF_CX_IS_NZ() {
11318 IEM_MC_ADVANCE_RIP_AND_FINISH();
11319 } IEM_MC_ELSE() {
11320 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11321 } IEM_MC_ENDIF();
11322 IEM_MC_END();
11323 break;
11324
11325 case IEMMODE_32BIT:
11326 IEM_MC_BEGIN(0,0);
11327 IEM_MC_IF_ECX_IS_NZ() {
11328 IEM_MC_ADVANCE_RIP_AND_FINISH();
11329 } IEM_MC_ELSE() {
11330 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11331 } IEM_MC_ENDIF();
11332 IEM_MC_END();
11333 break;
11334
11335 case IEMMODE_64BIT:
11336 IEM_MC_BEGIN(0,0);
11337 IEM_MC_IF_RCX_IS_NZ() {
11338 IEM_MC_ADVANCE_RIP_AND_FINISH();
11339 } IEM_MC_ELSE() {
11340 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11341 } IEM_MC_ENDIF();
11342 IEM_MC_END();
11343 break;
11344
11345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11346 }
11347}
11348
11349
11350/** Opcode 0xe4 */
11351FNIEMOP_DEF(iemOp_in_AL_Ib)
11352{
11353 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
11354 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11356 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
11357}
11358
11359
11360/** Opcode 0xe5 */
11361FNIEMOP_DEF(iemOp_in_eAX_Ib)
11362{
11363 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
11364 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11366 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
11367}
11368
11369
11370/** Opcode 0xe6 */
11371FNIEMOP_DEF(iemOp_out_Ib_AL)
11372{
11373 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
11374 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11376 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
11377}
11378
11379
11380/** Opcode 0xe7 */
11381FNIEMOP_DEF(iemOp_out_Ib_eAX)
11382{
11383 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
11384 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11386 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
11387}
11388
11389
11390/**
11391 * @opcode 0xe8
11392 */
11393FNIEMOP_DEF(iemOp_call_Jv)
11394{
11395 IEMOP_MNEMONIC(call_Jv, "call Jv");
11396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11397 switch (pVCpu->iem.s.enmEffOpSize)
11398 {
11399 case IEMMODE_16BIT:
11400 {
11401 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11402 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
11403 }
11404
11405 case IEMMODE_32BIT:
11406 {
11407 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11408 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
11409 }
11410
11411 case IEMMODE_64BIT:
11412 {
11413 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11414 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
11415 }
11416
11417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11418 }
11419}
11420
11421
11422/**
11423 * @opcode 0xe9
11424 */
11425FNIEMOP_DEF(iemOp_jmp_Jv)
11426{
11427 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
11428 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11429 switch (pVCpu->iem.s.enmEffOpSize)
11430 {
11431 case IEMMODE_16BIT:
11432 {
11433 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
11434 IEM_MC_BEGIN(0, 0);
11435 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
11436 IEM_MC_END();
11437 break;
11438 }
11439
11440 case IEMMODE_64BIT:
11441 case IEMMODE_32BIT:
11442 {
11443 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
11444 IEM_MC_BEGIN(0, 0);
11445 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
11446 IEM_MC_END();
11447 break;
11448 }
11449
11450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11451 }
11452}
11453
11454
11455/**
11456 * @opcode 0xea
11457 */
11458FNIEMOP_DEF(iemOp_jmp_Ap)
11459{
11460 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
11461 IEMOP_HLP_NO_64BIT();
11462
11463 /* Decode the far pointer address and pass it on to the far call C implementation. */
11464 uint32_t offSeg;
11465 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11466 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11467 else
11468 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11469 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11471 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11472}
11473
11474
11475/**
11476 * @opcode 0xeb
11477 */
11478FNIEMOP_DEF(iemOp_jmp_Jb)
11479{
11480 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
11481 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11483 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11484
11485 IEM_MC_BEGIN(0, 0);
11486 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11487 IEM_MC_END();
11488}
11489
11490
11491/** Opcode 0xec */
11492FNIEMOP_DEF(iemOp_in_AL_DX)
11493{
11494 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
11495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11496 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
11497}
11498
11499
11500/** Opcode 0xed */
11501FNIEMOP_DEF(iemOp_in_eAX_DX)
11502{
11503 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
11504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11505 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
11506}
11507
11508
11509/** Opcode 0xee */
11510FNIEMOP_DEF(iemOp_out_DX_AL)
11511{
11512 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
11513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11514 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
11515}
11516
11517
11518/** Opcode 0xef */
11519FNIEMOP_DEF(iemOp_out_DX_eAX)
11520{
11521 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
11522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11523 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
11524}
11525
11526
11527/**
11528 * @opcode 0xf0
11529 */
11530FNIEMOP_DEF(iemOp_lock)
11531{
11532 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
11533 if (!pVCpu->iem.s.fDisregardLock)
11534 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
11535
11536 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11537 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11538}
11539
11540
11541/**
11542 * @opcode 0xf1
11543 */
11544FNIEMOP_DEF(iemOp_int1)
11545{
11546 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
11547 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
11548 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
11549 * LOADALL memo. Needs some testing. */
11550 IEMOP_HLP_MIN_386();
11551 /** @todo testcase! */
11552 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
11553}
11554
11555
11556/**
11557 * @opcode 0xf2
11558 */
11559FNIEMOP_DEF(iemOp_repne)
11560{
11561 /* This overrides any previous REPE prefix. */
11562 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
11563 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
11564 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
11565
11566 /* For the 4 entry opcode tables, REPNZ overrides any previous
11567 REPZ and operand size prefixes. */
11568 pVCpu->iem.s.idxPrefix = 3;
11569
11570 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11571 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11572}
11573
11574
11575/**
11576 * @opcode 0xf3
11577 */
11578FNIEMOP_DEF(iemOp_repe)
11579{
11580 /* This overrides any previous REPNE prefix. */
11581 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
11582 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
11583 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
11584
11585 /* For the 4 entry opcode tables, REPNZ overrides any previous
11586 REPNZ and operand size prefixes. */
11587 pVCpu->iem.s.idxPrefix = 2;
11588
11589 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11590 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11591}
11592
11593
11594/**
11595 * @opcode 0xf4
11596 */
11597FNIEMOP_DEF(iemOp_hlt)
11598{
11599 IEMOP_MNEMONIC(hlt, "hlt");
11600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11601 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
11602}
11603
11604
11605/**
11606 * @opcode 0xf5
11607 */
11608FNIEMOP_DEF(iemOp_cmc)
11609{
11610 IEMOP_MNEMONIC(cmc, "cmc");
11611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11612 IEM_MC_BEGIN(0, 0);
11613 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
11614 IEM_MC_ADVANCE_RIP_AND_FINISH();
11615 IEM_MC_END();
11616}
11617
11618
11619/**
11620 * Body for of 'inc/dec/not/neg Eb'.
11621 */
11622#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
11623 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
11624 { \
11625 /* register access */ \
11626 IEMOP_HLP_DONE_DECODING(); \
11627 IEM_MC_BEGIN(2, 0); \
11628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11629 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11630 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
11631 IEM_MC_REF_EFLAGS(pEFlags); \
11632 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11633 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11634 IEM_MC_END(); \
11635 } \
11636 else \
11637 { \
11638 /* memory access. */ \
11639 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11640 { \
11641 IEM_MC_BEGIN(2, 2); \
11642 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11643 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11645 \
11646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11647 IEMOP_HLP_DONE_DECODING(); \
11648 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11649 IEM_MC_FETCH_EFLAGS(EFlags); \
11650 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11651 \
11652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11653 IEM_MC_COMMIT_EFLAGS(EFlags); \
11654 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11655 IEM_MC_END(); \
11656 } \
11657 else \
11658 { \
11659 IEM_MC_BEGIN(2, 2); \
11660 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11661 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11663 \
11664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11665 IEMOP_HLP_DONE_DECODING(); \
11666 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11667 IEM_MC_FETCH_EFLAGS(EFlags); \
11668 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
11669 \
11670 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11671 IEM_MC_COMMIT_EFLAGS(EFlags); \
11672 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11673 IEM_MC_END(); \
11674 } \
11675 } \
11676 (void)0
11677
11678
11679/**
11680 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
11681 */
11682#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11683 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11684 { \
11685 /* \
11686 * Register target \
11687 */ \
11688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11689 switch (pVCpu->iem.s.enmEffOpSize) \
11690 { \
11691 case IEMMODE_16BIT: \
11692 IEM_MC_BEGIN(2, 0); \
11693 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11694 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11695 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11696 IEM_MC_REF_EFLAGS(pEFlags); \
11697 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11699 IEM_MC_END(); \
11700 break; \
11701 \
11702 case IEMMODE_32BIT: \
11703 IEM_MC_BEGIN(2, 0); \
11704 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11705 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11706 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11707 IEM_MC_REF_EFLAGS(pEFlags); \
11708 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11709 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
11710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11711 IEM_MC_END(); \
11712 break; \
11713 \
11714 case IEMMODE_64BIT: \
11715 IEM_MC_BEGIN(2, 0); \
11716 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11717 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11718 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11719 IEM_MC_REF_EFLAGS(pEFlags); \
11720 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11721 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11722 IEM_MC_END(); \
11723 break; \
11724 \
11725 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11726 } \
11727 } \
11728 else \
11729 { \
11730 /* \
11731 * Memory target. \
11732 */ \
11733 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11734 { \
11735 switch (pVCpu->iem.s.enmEffOpSize) \
11736 { \
11737 case IEMMODE_16BIT: \
11738 IEM_MC_BEGIN(2, 2); \
11739 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11740 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11742 \
11743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11744 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11745 IEM_MC_FETCH_EFLAGS(EFlags); \
11746 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11747 \
11748 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11749 IEM_MC_COMMIT_EFLAGS(EFlags); \
11750 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11751 IEM_MC_END(); \
11752 break; \
11753 \
11754 case IEMMODE_32BIT: \
11755 IEM_MC_BEGIN(2, 2); \
11756 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11757 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11759 \
11760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11761 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11762 IEM_MC_FETCH_EFLAGS(EFlags); \
11763 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11764 \
11765 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11766 IEM_MC_COMMIT_EFLAGS(EFlags); \
11767 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11768 IEM_MC_END(); \
11769 break; \
11770 \
11771 case IEMMODE_64BIT: \
11772 IEM_MC_BEGIN(2, 2); \
11773 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11776 \
11777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11778 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11779 IEM_MC_FETCH_EFLAGS(EFlags); \
11780 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11781 \
11782 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11783 IEM_MC_COMMIT_EFLAGS(EFlags); \
11784 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11785 IEM_MC_END(); \
11786 break; \
11787 \
11788 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11789 } \
11790 } \
11791 else \
11792 { \
11793 (void)0
11794
11795#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11796 switch (pVCpu->iem.s.enmEffOpSize) \
11797 { \
11798 case IEMMODE_16BIT: \
11799 IEM_MC_BEGIN(2, 2); \
11800 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11801 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11803 \
11804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11805 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11806 IEM_MC_FETCH_EFLAGS(EFlags); \
11807 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
11808 \
11809 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11810 IEM_MC_COMMIT_EFLAGS(EFlags); \
11811 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11812 IEM_MC_END(); \
11813 break; \
11814 \
11815 case IEMMODE_32BIT: \
11816 IEM_MC_BEGIN(2, 2); \
11817 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11818 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11820 \
11821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11822 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11823 IEM_MC_FETCH_EFLAGS(EFlags); \
11824 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
11825 \
11826 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11827 IEM_MC_COMMIT_EFLAGS(EFlags); \
11828 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11829 IEM_MC_END(); \
11830 break; \
11831 \
11832 case IEMMODE_64BIT: \
11833 IEM_MC_BEGIN(2, 2); \
11834 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11835 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11837 \
11838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11839 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11840 IEM_MC_FETCH_EFLAGS(EFlags); \
11841 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
11842 \
11843 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11844 IEM_MC_COMMIT_EFLAGS(EFlags); \
11845 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11846 IEM_MC_END(); \
11847 break; \
11848 \
11849 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11850 } \
11851 } \
11852 } \
11853 (void)0
11854
11855
11856/**
11857 * @opmaps grp3_f6
11858 * @opcode /0
11859 * @todo also /1
11860 */
11861FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
11862{
11863 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
11864 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11865
11866 if (IEM_IS_MODRM_REG_MODE(bRm))
11867 {
11868 /* register access */
11869 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11871
11872 IEM_MC_BEGIN(3, 0);
11873 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11874 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
11875 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11876 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11877 IEM_MC_REF_EFLAGS(pEFlags);
11878 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11879 IEM_MC_ADVANCE_RIP_AND_FINISH();
11880 IEM_MC_END();
11881 }
11882 else
11883 {
11884 /* memory access. */
11885 IEM_MC_BEGIN(3, 2);
11886 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11887 IEM_MC_ARG(uint8_t, u8Src, 1);
11888 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11890
11891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11892 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11893 IEM_MC_ASSIGN(u8Src, u8Imm);
11894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11895 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11896 IEM_MC_FETCH_EFLAGS(EFlags);
11897 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11898
11899 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
11900 IEM_MC_COMMIT_EFLAGS(EFlags);
11901 IEM_MC_ADVANCE_RIP_AND_FINISH();
11902 IEM_MC_END();
11903 }
11904}
11905
11906
11907/** Opcode 0xf6 /4, /5, /6 and /7. */
11908FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11909{
11910 if (IEM_IS_MODRM_REG_MODE(bRm))
11911 {
11912 /* register access */
11913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11914 IEM_MC_BEGIN(3, 1);
11915 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11916 IEM_MC_ARG(uint8_t, u8Value, 1);
11917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11918 IEM_MC_LOCAL(int32_t, rc);
11919
11920 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11921 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11922 IEM_MC_REF_EFLAGS(pEFlags);
11923 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11924 IEM_MC_IF_LOCAL_IS_Z(rc) {
11925 IEM_MC_ADVANCE_RIP_AND_FINISH();
11926 } IEM_MC_ELSE() {
11927 IEM_MC_RAISE_DIVIDE_ERROR();
11928 } IEM_MC_ENDIF();
11929
11930 IEM_MC_END();
11931 }
11932 else
11933 {
11934 /* memory access. */
11935 IEM_MC_BEGIN(3, 2);
11936 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11937 IEM_MC_ARG(uint8_t, u8Value, 1);
11938 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11940 IEM_MC_LOCAL(int32_t, rc);
11941
11942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11944 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11945 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11946 IEM_MC_REF_EFLAGS(pEFlags);
11947 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11948 IEM_MC_IF_LOCAL_IS_Z(rc) {
11949 IEM_MC_ADVANCE_RIP_AND_FINISH();
11950 } IEM_MC_ELSE() {
11951 IEM_MC_RAISE_DIVIDE_ERROR();
11952 } IEM_MC_ENDIF();
11953
11954 IEM_MC_END();
11955 }
11956}
11957
11958
11959/** Opcode 0xf7 /4, /5, /6 and /7. */
11960FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11961{
11962 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11963
11964 if (IEM_IS_MODRM_REG_MODE(bRm))
11965 {
11966 /* register access */
11967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11968 switch (pVCpu->iem.s.enmEffOpSize)
11969 {
11970 case IEMMODE_16BIT:
11971 {
11972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11973 IEM_MC_BEGIN(4, 1);
11974 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11975 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11976 IEM_MC_ARG(uint16_t, u16Value, 2);
11977 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11978 IEM_MC_LOCAL(int32_t, rc);
11979
11980 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11981 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11982 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11983 IEM_MC_REF_EFLAGS(pEFlags);
11984 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11985 IEM_MC_IF_LOCAL_IS_Z(rc) {
11986 IEM_MC_ADVANCE_RIP_AND_FINISH();
11987 } IEM_MC_ELSE() {
11988 IEM_MC_RAISE_DIVIDE_ERROR();
11989 } IEM_MC_ENDIF();
11990
11991 IEM_MC_END();
11992 break;
11993 }
11994
11995 case IEMMODE_32BIT:
11996 {
11997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11998 IEM_MC_BEGIN(4, 1);
11999 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12000 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12001 IEM_MC_ARG(uint32_t, u32Value, 2);
12002 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12003 IEM_MC_LOCAL(int32_t, rc);
12004
12005 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12006 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12007 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12008 IEM_MC_REF_EFLAGS(pEFlags);
12009 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12010 IEM_MC_IF_LOCAL_IS_Z(rc) {
12011 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12012 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12013 IEM_MC_ADVANCE_RIP_AND_FINISH();
12014 } IEM_MC_ELSE() {
12015 IEM_MC_RAISE_DIVIDE_ERROR();
12016 } IEM_MC_ENDIF();
12017
12018 IEM_MC_END();
12019 break;
12020 }
12021
12022 case IEMMODE_64BIT:
12023 {
12024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12025 IEM_MC_BEGIN(4, 1);
12026 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12027 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12028 IEM_MC_ARG(uint64_t, u64Value, 2);
12029 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12030 IEM_MC_LOCAL(int32_t, rc);
12031
12032 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12033 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12034 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12035 IEM_MC_REF_EFLAGS(pEFlags);
12036 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12037 IEM_MC_IF_LOCAL_IS_Z(rc) {
12038 IEM_MC_ADVANCE_RIP_AND_FINISH();
12039 } IEM_MC_ELSE() {
12040 IEM_MC_RAISE_DIVIDE_ERROR();
12041 } IEM_MC_ENDIF();
12042
12043 IEM_MC_END();
12044 break;
12045 }
12046
12047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12048 }
12049 }
12050 else
12051 {
12052 /* memory access. */
12053 switch (pVCpu->iem.s.enmEffOpSize)
12054 {
12055 case IEMMODE_16BIT:
12056 {
12057 IEM_MC_BEGIN(4, 2);
12058 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12059 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12060 IEM_MC_ARG(uint16_t, u16Value, 2);
12061 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12063 IEM_MC_LOCAL(int32_t, rc);
12064
12065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12067 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12068 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12069 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12070 IEM_MC_REF_EFLAGS(pEFlags);
12071 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12072 IEM_MC_IF_LOCAL_IS_Z(rc) {
12073 IEM_MC_ADVANCE_RIP_AND_FINISH();
12074 } IEM_MC_ELSE() {
12075 IEM_MC_RAISE_DIVIDE_ERROR();
12076 } IEM_MC_ENDIF();
12077
12078 IEM_MC_END();
12079 break;
12080 }
12081
12082 case IEMMODE_32BIT:
12083 {
12084 IEM_MC_BEGIN(4, 2);
12085 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12086 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12087 IEM_MC_ARG(uint32_t, u32Value, 2);
12088 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12090 IEM_MC_LOCAL(int32_t, rc);
12091
12092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12094 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12095 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12096 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12097 IEM_MC_REF_EFLAGS(pEFlags);
12098 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12099 IEM_MC_IF_LOCAL_IS_Z(rc) {
12100 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12101 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12102 IEM_MC_ADVANCE_RIP_AND_FINISH();
12103 } IEM_MC_ELSE() {
12104 IEM_MC_RAISE_DIVIDE_ERROR();
12105 } IEM_MC_ENDIF();
12106
12107 IEM_MC_END();
12108 break;
12109 }
12110
12111 case IEMMODE_64BIT:
12112 {
12113 IEM_MC_BEGIN(4, 2);
12114 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12115 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12116 IEM_MC_ARG(uint64_t, u64Value, 2);
12117 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12119 IEM_MC_LOCAL(int32_t, rc);
12120
12121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12123 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12124 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12125 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12126 IEM_MC_REF_EFLAGS(pEFlags);
12127 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12128 IEM_MC_IF_LOCAL_IS_Z(rc) {
12129 IEM_MC_ADVANCE_RIP_AND_FINISH();
12130 } IEM_MC_ELSE() {
12131 IEM_MC_RAISE_DIVIDE_ERROR();
12132 } IEM_MC_ENDIF();
12133
12134 IEM_MC_END();
12135 break;
12136 }
12137
12138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12139 }
12140 }
12141}
12142
12143
12144/**
12145 * @opmaps grp3_f6
12146 * @opcode /2
12147 */
12148FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12149{
12150 IEMOP_MNEMONIC(not_Eb, "not Eb");
12151 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12152}
12153
12154
12155/**
12156 * @opmaps grp3_f6
12157 * @opcode /3
12158 */
12159FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12160{
12161 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12162 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12163}
12164
12165
12166/**
12167 * @opcode 0xf6
12168 */
12169FNIEMOP_DEF(iemOp_Grp3_Eb)
12170{
12171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12172 switch (IEM_GET_MODRM_REG_8(bRm))
12173 {
12174 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12175 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12176 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12177 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12178 case 4:
12179 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12180 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12181 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12182 case 5:
12183 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12185 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12186 case 6:
12187 IEMOP_MNEMONIC(div_Eb, "div Eb");
12188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12189 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12190 case 7:
12191 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12192 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12193 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12195 }
12196}
12197
12198
12199/** Opcode 0xf7 /0. */
12200FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12201{
12202 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12204
12205 if (IEM_IS_MODRM_REG_MODE(bRm))
12206 {
12207 /* register access */
12208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12209 switch (pVCpu->iem.s.enmEffOpSize)
12210 {
12211 case IEMMODE_16BIT:
12212 {
12213 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12214 IEM_MC_BEGIN(3, 0);
12215 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12216 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12217 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12218 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12219 IEM_MC_REF_EFLAGS(pEFlags);
12220 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12221 IEM_MC_ADVANCE_RIP_AND_FINISH();
12222 IEM_MC_END();
12223 break;
12224 }
12225
12226 case IEMMODE_32BIT:
12227 {
12228 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12229 IEM_MC_BEGIN(3, 0);
12230 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12231 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12232 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12233 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12234 IEM_MC_REF_EFLAGS(pEFlags);
12235 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12236 /* No clearing the high dword here - test doesn't write back the result. */
12237 IEM_MC_ADVANCE_RIP_AND_FINISH();
12238 IEM_MC_END();
12239 break;
12240 }
12241
12242 case IEMMODE_64BIT:
12243 {
12244 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12245 IEM_MC_BEGIN(3, 0);
12246 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12247 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12249 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12250 IEM_MC_REF_EFLAGS(pEFlags);
12251 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12252 IEM_MC_ADVANCE_RIP_AND_FINISH();
12253 IEM_MC_END();
12254 break;
12255 }
12256
12257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12258 }
12259 }
12260 else
12261 {
12262 /* memory access. */
12263 switch (pVCpu->iem.s.enmEffOpSize)
12264 {
12265 case IEMMODE_16BIT:
12266 {
12267 IEM_MC_BEGIN(3, 2);
12268 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12269 IEM_MC_ARG(uint16_t, u16Src, 1);
12270 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12272
12273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12274 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12275 IEM_MC_ASSIGN(u16Src, u16Imm);
12276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12277 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12278 IEM_MC_FETCH_EFLAGS(EFlags);
12279 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12280
12281 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
12282 IEM_MC_COMMIT_EFLAGS(EFlags);
12283 IEM_MC_ADVANCE_RIP_AND_FINISH();
12284 IEM_MC_END();
12285 break;
12286 }
12287
12288 case IEMMODE_32BIT:
12289 {
12290 IEM_MC_BEGIN(3, 2);
12291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12292 IEM_MC_ARG(uint32_t, u32Src, 1);
12293 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12295
12296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12297 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12298 IEM_MC_ASSIGN(u32Src, u32Imm);
12299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12300 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12301 IEM_MC_FETCH_EFLAGS(EFlags);
12302 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12303
12304 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
12305 IEM_MC_COMMIT_EFLAGS(EFlags);
12306 IEM_MC_ADVANCE_RIP_AND_FINISH();
12307 IEM_MC_END();
12308 break;
12309 }
12310
12311 case IEMMODE_64BIT:
12312 {
12313 IEM_MC_BEGIN(3, 2);
12314 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12315 IEM_MC_ARG(uint64_t, u64Src, 1);
12316 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12318
12319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12320 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12321 IEM_MC_ASSIGN(u64Src, u64Imm);
12322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12323 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12324 IEM_MC_FETCH_EFLAGS(EFlags);
12325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12326
12327 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
12328 IEM_MC_COMMIT_EFLAGS(EFlags);
12329 IEM_MC_ADVANCE_RIP_AND_FINISH();
12330 IEM_MC_END();
12331 break;
12332 }
12333
12334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12335 }
12336 }
12337}
12338
12339
12340/** Opcode 0xf7 /2. */
12341FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
12342{
12343 IEMOP_MNEMONIC(not_Ev, "not Ev");
12344 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
12345 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
12346}
12347
12348
12349/** Opcode 0xf7 /3. */
12350FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
12351{
12352 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
12353 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
12354 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
12355}
12356
12357
12358/**
12359 * @opcode 0xf7
12360 */
12361FNIEMOP_DEF(iemOp_Grp3_Ev)
12362{
12363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12364 switch (IEM_GET_MODRM_REG_8(bRm))
12365 {
12366 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12367 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12368 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
12369 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
12370 case 4:
12371 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
12372 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12373 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
12374 case 5:
12375 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
12376 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12377 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
12378 case 6:
12379 IEMOP_MNEMONIC(div_Ev, "div Ev");
12380 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12381 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
12382 case 7:
12383 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
12384 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12385 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
12386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12387 }
12388}
12389
12390
12391/**
12392 * @opcode 0xf8
12393 */
12394FNIEMOP_DEF(iemOp_clc)
12395{
12396 IEMOP_MNEMONIC(clc, "clc");
12397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12398 IEM_MC_BEGIN(0, 0);
12399 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
12400 IEM_MC_ADVANCE_RIP_AND_FINISH();
12401 IEM_MC_END();
12402}
12403
12404
12405/**
12406 * @opcode 0xf9
12407 */
12408FNIEMOP_DEF(iemOp_stc)
12409{
12410 IEMOP_MNEMONIC(stc, "stc");
12411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12412 IEM_MC_BEGIN(0, 0);
12413 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
12414 IEM_MC_ADVANCE_RIP_AND_FINISH();
12415 IEM_MC_END();
12416}
12417
12418
12419/**
12420 * @opcode 0xfa
12421 */
12422FNIEMOP_DEF(iemOp_cli)
12423{
12424 IEMOP_MNEMONIC(cli, "cli");
12425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12426 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
12427}
12428
12429
12430FNIEMOP_DEF(iemOp_sti)
12431{
12432 IEMOP_MNEMONIC(sti, "sti");
12433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12434 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
12435}
12436
12437
12438/**
12439 * @opcode 0xfc
12440 */
12441FNIEMOP_DEF(iemOp_cld)
12442{
12443 IEMOP_MNEMONIC(cld, "cld");
12444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12445 IEM_MC_BEGIN(0, 0);
12446 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
12447 IEM_MC_ADVANCE_RIP_AND_FINISH();
12448 IEM_MC_END();
12449}
12450
12451
12452/**
12453 * @opcode 0xfd
12454 */
12455FNIEMOP_DEF(iemOp_std)
12456{
12457 IEMOP_MNEMONIC(std, "std");
12458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12459 IEM_MC_BEGIN(0, 0);
12460 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
12461 IEM_MC_ADVANCE_RIP_AND_FINISH();
12462 IEM_MC_END();
12463}
12464
12465
12466/**
12467 * @opmaps grp4
12468 * @opcode /0
12469 */
12470FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
12471{
12472 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
12473 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
12474}
12475
12476
12477/**
12478 * @opmaps grp4
12479 * @opcode /1
12480 */
12481FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
12482{
12483 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
12484 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
12485}
12486
12487
12488/**
12489 * @opcode 0xfe
12490 */
12491FNIEMOP_DEF(iemOp_Grp4)
12492{
12493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12494 switch (IEM_GET_MODRM_REG_8(bRm))
12495 {
12496 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
12497 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
12498 default:
12499 /** @todo is the eff-addr decoded? */
12500 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
12501 return IEMOP_RAISE_INVALID_OPCODE();
12502 }
12503}
12504
12505/** Opcode 0xff /0. */
12506FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
12507{
12508 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
12509 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
12510 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
12511}
12512
12513
12514/** Opcode 0xff /1. */
12515FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
12516{
12517 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
12518 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
12519 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
12520}
12521
12522
12523/**
12524 * Opcode 0xff /2.
12525 * @param bRm The RM byte.
12526 */
12527FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
12528{
12529 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
12530 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12531
12532 if (IEM_IS_MODRM_REG_MODE(bRm))
12533 {
12534 /* The new RIP is taken from a register. */
12535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12536 switch (pVCpu->iem.s.enmEffOpSize)
12537 {
12538 case IEMMODE_16BIT:
12539 IEM_MC_BEGIN(1, 0);
12540 IEM_MC_ARG(uint16_t, u16Target, 0);
12541 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12542 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
12543 IEM_MC_END();
12544 break;
12545
12546 case IEMMODE_32BIT:
12547 IEM_MC_BEGIN(1, 0);
12548 IEM_MC_ARG(uint32_t, u32Target, 0);
12549 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12550 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
12551 IEM_MC_END();
12552 break;
12553
12554 case IEMMODE_64BIT:
12555 IEM_MC_BEGIN(1, 0);
12556 IEM_MC_ARG(uint64_t, u64Target, 0);
12557 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12558 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
12559 IEM_MC_END();
12560 break;
12561
12562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12563 }
12564 }
12565 else
12566 {
12567 /* The new RIP is taken from a register. */
12568 switch (pVCpu->iem.s.enmEffOpSize)
12569 {
12570 case IEMMODE_16BIT:
12571 IEM_MC_BEGIN(1, 1);
12572 IEM_MC_ARG(uint16_t, u16Target, 0);
12573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12576 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12577 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
12578 IEM_MC_END();
12579 break;
12580
12581 case IEMMODE_32BIT:
12582 IEM_MC_BEGIN(1, 1);
12583 IEM_MC_ARG(uint32_t, u32Target, 0);
12584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12587 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12588 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
12589 IEM_MC_END();
12590 break;
12591
12592 case IEMMODE_64BIT:
12593 IEM_MC_BEGIN(1, 1);
12594 IEM_MC_ARG(uint64_t, u64Target, 0);
12595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12598 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12599 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
12600 IEM_MC_END();
12601 break;
12602
12603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12604 }
12605 }
12606}
12607
12608FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, PFNIEMCIMPLFARBRANCH, pfnCImpl)
12609{
12610 /* Registers? How?? */
12611 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
12612 { /* likely */ }
12613 else
12614 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
12615
12616 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */
12617 /** @todo what does VIA do? */
12618 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu))
12619 { /* likely */ }
12620 else
12621 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT;
12622
12623 /* Far pointer loaded from memory. */
12624 switch (pVCpu->iem.s.enmEffOpSize)
12625 {
12626 case IEMMODE_16BIT:
12627 IEM_MC_BEGIN(3, 1);
12628 IEM_MC_ARG(uint16_t, u16Sel, 0);
12629 IEM_MC_ARG(uint16_t, offSeg, 1);
12630 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
12631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12634 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12635 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
12636 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
12637 IEM_MC_END();
12638 break;
12639
12640 case IEMMODE_32BIT:
12641 IEM_MC_BEGIN(3, 1);
12642 IEM_MC_ARG(uint16_t, u16Sel, 0);
12643 IEM_MC_ARG(uint32_t, offSeg, 1);
12644 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
12645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12648 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12649 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
12650 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
12651 IEM_MC_END();
12652 break;
12653
12654 case IEMMODE_64BIT:
12655 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu));
12656 IEM_MC_BEGIN(3, 1);
12657 IEM_MC_ARG(uint16_t, u16Sel, 0);
12658 IEM_MC_ARG(uint64_t, offSeg, 1);
12659 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2);
12660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12663 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12664 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
12665 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
12666 IEM_MC_END();
12667 break;
12668
12669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12670 }
12671}
12672
12673
12674/**
12675 * Opcode 0xff /3.
12676 * @param bRm The RM byte.
12677 */
12678FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
12679{
12680 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
12681 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
12682}
12683
12684
12685/**
12686 * Opcode 0xff /4.
12687 * @param bRm The RM byte.
12688 */
12689FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
12690{
12691 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
12692 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12693
12694 if (IEM_IS_MODRM_REG_MODE(bRm))
12695 {
12696 /* The new RIP is taken from a register. */
12697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12698 switch (pVCpu->iem.s.enmEffOpSize)
12699 {
12700 case IEMMODE_16BIT:
12701 IEM_MC_BEGIN(0, 1);
12702 IEM_MC_LOCAL(uint16_t, u16Target);
12703 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12704 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12705 IEM_MC_END();
12706 break;
12707
12708 case IEMMODE_32BIT:
12709 IEM_MC_BEGIN(0, 1);
12710 IEM_MC_LOCAL(uint32_t, u32Target);
12711 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12712 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12713 IEM_MC_END();
12714 break;
12715
12716 case IEMMODE_64BIT:
12717 IEM_MC_BEGIN(0, 1);
12718 IEM_MC_LOCAL(uint64_t, u64Target);
12719 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12720 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12721 IEM_MC_END();
12722 break;
12723
12724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12725 }
12726 }
12727 else
12728 {
12729 /* The new RIP is taken from a memory location. */
12730 switch (pVCpu->iem.s.enmEffOpSize)
12731 {
12732 case IEMMODE_16BIT:
12733 IEM_MC_BEGIN(0, 2);
12734 IEM_MC_LOCAL(uint16_t, u16Target);
12735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12738 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12739 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12740 IEM_MC_END();
12741 break;
12742
12743 case IEMMODE_32BIT:
12744 IEM_MC_BEGIN(0, 2);
12745 IEM_MC_LOCAL(uint32_t, u32Target);
12746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12749 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12750 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12751 IEM_MC_END();
12752 break;
12753
12754 case IEMMODE_64BIT:
12755 IEM_MC_BEGIN(0, 2);
12756 IEM_MC_LOCAL(uint64_t, u64Target);
12757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12760 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12761 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12762 IEM_MC_END();
12763 break;
12764
12765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12766 }
12767 }
12768}
12769
12770
12771/**
12772 * Opcode 0xff /5.
12773 * @param bRm The RM byte.
12774 */
12775FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
12776{
12777 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
12778 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
12779}
12780
12781
12782/**
12783 * Opcode 0xff /6.
12784 * @param bRm The RM byte.
12785 */
12786FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
12787{
12788 IEMOP_MNEMONIC(push_Ev, "push Ev");
12789
12790 /* Registers are handled by a common worker. */
12791 if (IEM_IS_MODRM_REG_MODE(bRm))
12792 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
12793
12794 /* Memory we do here. */
12795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12796 switch (pVCpu->iem.s.enmEffOpSize)
12797 {
12798 case IEMMODE_16BIT:
12799 IEM_MC_BEGIN(0, 2);
12800 IEM_MC_LOCAL(uint16_t, u16Src);
12801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12804 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12805 IEM_MC_PUSH_U16(u16Src);
12806 IEM_MC_ADVANCE_RIP_AND_FINISH();
12807 IEM_MC_END();
12808 break;
12809
12810 case IEMMODE_32BIT:
12811 IEM_MC_BEGIN(0, 2);
12812 IEM_MC_LOCAL(uint32_t, u32Src);
12813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12816 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12817 IEM_MC_PUSH_U32(u32Src);
12818 IEM_MC_ADVANCE_RIP_AND_FINISH();
12819 IEM_MC_END();
12820 break;
12821
12822 case IEMMODE_64BIT:
12823 IEM_MC_BEGIN(0, 2);
12824 IEM_MC_LOCAL(uint64_t, u64Src);
12825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12828 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12829 IEM_MC_PUSH_U64(u64Src);
12830 IEM_MC_ADVANCE_RIP_AND_FINISH();
12831 IEM_MC_END();
12832 break;
12833
12834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12835 }
12836}
12837
12838
12839/**
12840 * @opcode 0xff
12841 */
12842FNIEMOP_DEF(iemOp_Grp5)
12843{
12844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12845 switch (IEM_GET_MODRM_REG_8(bRm))
12846 {
12847 case 0:
12848 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
12849 case 1:
12850 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
12851 case 2:
12852 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
12853 case 3:
12854 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
12855 case 4:
12856 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
12857 case 5:
12858 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
12859 case 6:
12860 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
12861 case 7:
12862 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
12863 return IEMOP_RAISE_INVALID_OPCODE();
12864 }
12865 AssertFailedReturn(VERR_IEM_IPE_3);
12866}
12867
12868
12869
12870const PFNIEMOP g_apfnOneByteMap[256] =
12871{
12872 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
12873 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
12874 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
12875 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
12876 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
12877 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
12878 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
12879 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
12880 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
12881 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
12882 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
12883 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
12884 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
12885 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
12886 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
12887 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
12888 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
12889 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
12890 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
12891 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
12892 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
12893 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
12894 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
12895 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
12896 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
12897 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
12898 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
12899 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
12900 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
12901 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
12902 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
12903 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
12904 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
12905 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
12906 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
12907 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
12908 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
12909 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
12910 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
12911 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
12912 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
12913 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
12914 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
12915 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
12916 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
12917 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
12918 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
12919 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
12920 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
12921 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
12922 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
12923 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
12924 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
12925 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
12926 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
12927 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
12928 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
12929 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
12930 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
12931 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
12932 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
12933 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
12934 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
12935 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
12936};
12937
12938
12939/** @} */
12940
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette