VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 99984

Last change on this file since 99984 was 99984, checked in by vboxsync, 20 months ago

VMM/IEM: Fixed a few places in IEMAllCImpl.cpp and IEMAllCImplSvmInstr.cpp where decoder state was used directly instead of being passed as arguments. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 439.9 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 99984 2023-05-26 01:20:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
74 \
75 IEM_MC_BEGIN(3, 0); \
76 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
77 IEM_MC_ARG(uint8_t, u8Src, 1); \
78 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
79 \
80 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
81 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
82 IEM_MC_REF_EFLAGS(pEFlags); \
83 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
84 \
85 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
86 IEM_MC_END(); \
87 } \
88 else \
89 { \
90 /* \
91 * We're accessing memory. \
92 * Note! We're putting the eflags on the stack here so we can commit them \
93 * after the memory. \
94 */ \
95 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
96 { \
97 IEM_MC_BEGIN(3, 2); \
98 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
99 IEM_MC_ARG(uint8_t, u8Src, 1); \
100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
120 IEMOP_HLP_DONE_DECODING(); \
121 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
122 } \
123 } \
124 (void)0
125
126#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
127 IEM_MC_BEGIN(3, 2); \
128 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
129 IEM_MC_ARG(uint8_t, u8Src, 1); \
130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
132 \
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
134 IEMOP_HLP_DONE_DECODING(); \
135 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
136 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
137 IEM_MC_FETCH_EFLAGS(EFlags); \
138 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
139 \
140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
141 IEM_MC_COMMIT_EFLAGS(EFlags); \
142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
143 IEM_MC_END(); \
144 } \
145 } \
146 (void)0
147
148/**
149 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
150 * destination.
151 */
152#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
154 \
155 /* \
156 * If rm is denoting a register, no more instruction bytes. \
157 */ \
158 if (IEM_IS_MODRM_REG_MODE(bRm)) \
159 { \
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
161 IEM_MC_BEGIN(3, 0); \
162 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
163 IEM_MC_ARG(uint8_t, u8Src, 1); \
164 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
165 \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
167 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
168 IEM_MC_REF_EFLAGS(pEFlags); \
169 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
170 \
171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
172 IEM_MC_END(); \
173 } \
174 else \
175 { \
176 /* \
177 * We're accessing memory. \
178 */ \
179 IEM_MC_BEGIN(3, 1); \
180 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
181 IEM_MC_ARG(uint8_t, u8Src, 1); \
182 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
184 \
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
187 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
188 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
189 IEM_MC_REF_EFLAGS(pEFlags); \
190 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
191 \
192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
193 IEM_MC_END(); \
194 } \
195 (void)0
196
197
198/**
199 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
200 * memory/register as the destination.
201 */
202#define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
204 \
205 /* \
206 * If rm is denoting a register, no more instruction bytes. \
207 */ \
208 if (IEM_IS_MODRM_REG_MODE(bRm)) \
209 { \
210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
211 switch (pVCpu->iem.s.enmEffOpSize) \
212 { \
213 case IEMMODE_16BIT: \
214 IEM_MC_BEGIN(3, 0); \
215 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
216 IEM_MC_ARG(uint16_t, u16Src, 1); \
217 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
218 \
219 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
221 IEM_MC_REF_EFLAGS(pEFlags); \
222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
223 \
224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
225 IEM_MC_END(); \
226 break; \
227 \
228 case IEMMODE_32BIT: \
229 IEM_MC_BEGIN(3, 0); \
230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
231 IEM_MC_ARG(uint32_t, u32Src, 1); \
232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
233 \
234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
238 \
239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \
240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
242 IEM_MC_END(); \
243 break; \
244 \
245 case IEMMODE_64BIT: \
246 IEM_MC_BEGIN(3, 0); \
247 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
248 IEM_MC_ARG(uint64_t, u64Src, 1); \
249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
250 \
251 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
252 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
253 IEM_MC_REF_EFLAGS(pEFlags); \
254 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
255 \
256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
257 IEM_MC_END(); \
258 break; \
259 \
260 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
261 } \
262 } \
263 else \
264 { \
265 /* \
266 * We're accessing memory. \
267 * Note! We're putting the eflags on the stack here so we can commit them \
268 * after the memory. \
269 */ \
270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
271 { \
272 switch (pVCpu->iem.s.enmEffOpSize) \
273 { \
274 case IEMMODE_16BIT: \
275 IEM_MC_BEGIN(3, 2); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
280 \
281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
282 IEMOP_HLP_DONE_DECODING(); \
283 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
284 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
285 IEM_MC_FETCH_EFLAGS(EFlags); \
286 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
287 \
288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
289 IEM_MC_COMMIT_EFLAGS(EFlags); \
290 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
291 IEM_MC_END(); \
292 break; \
293 \
294 case IEMMODE_32BIT: \
295 IEM_MC_BEGIN(3, 2); \
296 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
297 IEM_MC_ARG(uint32_t, u32Src, 1); \
298 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
300 \
301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
302 IEMOP_HLP_DONE_DECODING(); \
303 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
304 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
305 IEM_MC_FETCH_EFLAGS(EFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
307 \
308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
309 IEM_MC_COMMIT_EFLAGS(EFlags); \
310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
311 IEM_MC_END(); \
312 break; \
313 \
314 case IEMMODE_64BIT: \
315 IEM_MC_BEGIN(3, 2); \
316 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
317 IEM_MC_ARG(uint64_t, u64Src, 1); \
318 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
320 \
321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
322 IEMOP_HLP_DONE_DECODING(); \
323 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
324 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
325 IEM_MC_FETCH_EFLAGS(EFlags); \
326 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
327 \
328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
329 IEM_MC_COMMIT_EFLAGS(EFlags); \
330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
331 IEM_MC_END(); \
332 break; \
333 \
334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
335 } \
336 } \
337 else \
338 { \
339 (void)0
340
341#define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \
342 IEMOP_HLP_DONE_DECODING(); \
343 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
344 } \
345 } \
346 (void)0
347
348#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
349 switch (pVCpu->iem.s.enmEffOpSize) \
350 { \
351 case IEMMODE_16BIT: \
352 IEM_MC_BEGIN(3, 2); \
353 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
354 IEM_MC_ARG(uint16_t, u16Src, 1); \
355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
357 \
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
359 IEMOP_HLP_DONE_DECODING(); \
360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
361 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
362 IEM_MC_FETCH_EFLAGS(EFlags); \
363 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
364 \
365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
366 IEM_MC_COMMIT_EFLAGS(EFlags); \
367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
368 IEM_MC_END(); \
369 break; \
370 \
371 case IEMMODE_32BIT: \
372 IEM_MC_BEGIN(3, 2); \
373 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
374 IEM_MC_ARG(uint32_t, u32Src, 1); \
375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
377 \
378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
379 IEMOP_HLP_DONE_DECODING(); \
380 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
381 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
382 IEM_MC_FETCH_EFLAGS(EFlags); \
383 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
384 \
385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \
386 IEM_MC_COMMIT_EFLAGS(EFlags); \
387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
388 IEM_MC_END(); \
389 break; \
390 \
391 case IEMMODE_64BIT: \
392 IEM_MC_BEGIN(3, 2); \
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
394 IEM_MC_ARG(uint64_t, u64Src, 1); \
395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
397 \
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
399 IEMOP_HLP_DONE_DECODING(); \
400 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
401 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
402 IEM_MC_FETCH_EFLAGS(EFlags); \
403 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
404 \
405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
406 IEM_MC_COMMIT_EFLAGS(EFlags); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
412 } \
413 } \
414 } \
415 (void)0
416
417
418/**
419 * Body for instructions like ADD, AND, OR, ++ with working on AL with
420 * a byte immediate.
421 */
422#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
423 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
425 \
426 IEM_MC_BEGIN(3, 0); \
427 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
428 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
429 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
430 \
431 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
432 IEM_MC_REF_EFLAGS(pEFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
434 \
435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
436 IEM_MC_END()
437
438/**
439 * Body for instructions like ADD, AND, OR, ++ with working on
440 * AX/EAX/RAX with a word/dword immediate.
441 */
442#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
443 switch (pVCpu->iem.s.enmEffOpSize) \
444 { \
445 case IEMMODE_16BIT: \
446 { \
447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
449 \
450 IEM_MC_BEGIN(3, 0); \
451 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
452 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
453 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
454 \
455 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
456 IEM_MC_REF_EFLAGS(pEFlags); \
457 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
458 \
459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
460 IEM_MC_END(); \
461 } \
462 \
463 case IEMMODE_32BIT: \
464 { \
465 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
467 \
468 IEM_MC_BEGIN(3, 0); \
469 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
470 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
471 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
472 \
473 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
474 IEM_MC_REF_EFLAGS(pEFlags); \
475 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
476 \
477 if (a_fModifiesDstReg) \
478 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
480 IEM_MC_END(); \
481 } \
482 \
483 case IEMMODE_64BIT: \
484 { \
485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
487 \
488 IEM_MC_BEGIN(3, 0); \
489 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
490 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
491 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
492 \
493 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
494 IEM_MC_REF_EFLAGS(pEFlags); \
495 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
496 \
497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
498 IEM_MC_END(); \
499 } \
500 \
501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
502 } \
503 (void)0
504
505
506
507/* Instruction specification format - work in progress: */
508
509/**
510 * @opcode 0x00
511 * @opmnemonic add
512 * @op1 rm:Eb
513 * @op2 reg:Gb
514 * @opmaps one
515 * @openc ModR/M
516 * @opflmodify cf,pf,af,zf,sf,of
517 * @ophints harmless ignores_op_sizes
518 * @opstats add_Eb_Gb
519 * @opgroup og_gen_arith_bin
520 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
521 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
522 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
523 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
524 */
525FNIEMOP_DEF(iemOp_add_Eb_Gb)
526{
527 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
528 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
529 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
530}
531
532
533/**
534 * @opcode 0x01
535 * @opgroup og_gen_arith_bin
536 * @opflmodify cf,pf,af,zf,sf,of
537 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
538 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
539 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
540 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
541 */
542FNIEMOP_DEF(iemOp_add_Ev_Gv)
543{
544 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
545 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
546 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
547}
548
549
550/**
551 * @opcode 0x02
552 * @opgroup og_gen_arith_bin
553 * @opflmodify cf,pf,af,zf,sf,of
554 * @opcopytests iemOp_add_Eb_Gb
555 */
556FNIEMOP_DEF(iemOp_add_Gb_Eb)
557{
558 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
559 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
560}
561
562
563/**
564 * @opcode 0x03
565 * @opgroup og_gen_arith_bin
566 * @opflmodify cf,pf,af,zf,sf,of
567 * @opcopytests iemOp_add_Ev_Gv
568 */
569FNIEMOP_DEF(iemOp_add_Gv_Ev)
570{
571 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
572 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
573}
574
575
576/**
577 * @opcode 0x04
578 * @opgroup og_gen_arith_bin
579 * @opflmodify cf,pf,af,zf,sf,of
580 * @opcopytests iemOp_add_Eb_Gb
581 */
582FNIEMOP_DEF(iemOp_add_Al_Ib)
583{
584 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
585 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
586}
587
588
589/**
590 * @opcode 0x05
591 * @opgroup og_gen_arith_bin
592 * @opflmodify cf,pf,af,zf,sf,of
593 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
594 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
595 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
596 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
597 */
598FNIEMOP_DEF(iemOp_add_eAX_Iz)
599{
600 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
601 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
602}
603
604
605/**
606 * @opcode 0x06
607 * @opgroup og_stack_sreg
608 */
609FNIEMOP_DEF(iemOp_push_ES)
610{
611 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
612 IEMOP_HLP_NO_64BIT();
613 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
614}
615
616
617/**
618 * @opcode 0x07
619 * @opgroup og_stack_sreg
620 */
621FNIEMOP_DEF(iemOp_pop_ES)
622{
623 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
624 IEMOP_HLP_NO_64BIT();
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
627}
628
629
630/**
631 * @opcode 0x08
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
637 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
638 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
639 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
640 */
641FNIEMOP_DEF(iemOp_or_Eb_Gb)
642{
643 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
645 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
646 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
647}
648
649
650/*
651 * @opcode 0x09
652 * @opgroup og_gen_arith_bin
653 * @opflmodify cf,pf,af,zf,sf,of
654 * @opflundef af
655 * @opflclear of,cf
656 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
657 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
658 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
659 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
660 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
661 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
662 */
663FNIEMOP_DEF(iemOp_or_Ev_Gv)
664{
665 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
667 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
668 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
669}
670
671
672/**
673 * @opcode 0x0a
674 * @opgroup og_gen_arith_bin
675 * @opflmodify cf,pf,af,zf,sf,of
676 * @opflundef af
677 * @opflclear of,cf
678 * @opcopytests iemOp_or_Eb_Gb
679 */
680FNIEMOP_DEF(iemOp_or_Gb_Eb)
681{
682 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
684 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
685}
686
687
688/**
689 * @opcode 0x0b
690 * @opgroup og_gen_arith_bin
691 * @opflmodify cf,pf,af,zf,sf,of
692 * @opflundef af
693 * @opflclear of,cf
694 * @opcopytests iemOp_or_Ev_Gv
695 */
696FNIEMOP_DEF(iemOp_or_Gv_Ev)
697{
698 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
700 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
701}
702
703
704/**
705 * @opcode 0x0c
706 * @opgroup og_gen_arith_bin
707 * @opflmodify cf,pf,af,zf,sf,of
708 * @opflundef af
709 * @opflclear of,cf
710 * @opcopytests iemOp_or_Eb_Gb
711 */
712FNIEMOP_DEF(iemOp_or_Al_Ib)
713{
714 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
716 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
717}
718
719
720/**
721 * @opcode 0x0d
722 * @opgroup og_gen_arith_bin
723 * @opflmodify cf,pf,af,zf,sf,of
724 * @opflundef af
725 * @opflclear of,cf
726 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
727 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
728 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
729 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
730 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
731 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
732 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
733 */
734FNIEMOP_DEF(iemOp_or_eAX_Iz)
735{
736 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
738 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
739}
740
741
742/**
743 * @opcode 0x0e
744 * @opgroup og_stack_sreg
745 */
746FNIEMOP_DEF(iemOp_push_CS)
747{
748 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
749 IEMOP_HLP_NO_64BIT();
750 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
751}
752
753
754/**
755 * @opcode 0x0f
756 * @opmnemonic EscTwo0f
757 * @openc two0f
758 * @opdisenum OP_2B_ESC
759 * @ophints harmless
760 * @opgroup og_escapes
761 */
762FNIEMOP_DEF(iemOp_2byteEscape)
763{
764#ifdef VBOX_STRICT
765 /* Sanity check the table the first time around. */
766 static bool s_fTested = false;
767 if (RT_LIKELY(s_fTested)) { /* likely */ }
768 else
769 {
770 s_fTested = true;
771 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
772 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
773 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
774 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
775 }
776#endif
777
778 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
779 {
780 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
783 }
784 /* @opdone */
785
786 /*
787 * On the 8086 this is a POP CS instruction.
788 * For the time being we don't specify this this.
789 */
790 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
791 IEMOP_HLP_NO_64BIT();
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
794}
795
796/**
797 * @opcode 0x10
798 * @opgroup og_gen_arith_bin
799 * @opfltest cf
800 * @opflmodify cf,pf,af,zf,sf,of
801 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
802 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
803 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
804 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
805 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
806 */
807FNIEMOP_DEF(iemOp_adc_Eb_Gb)
808{
809 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
810 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
811 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
812}
813
814
815/**
816 * @opcode 0x11
817 * @opgroup og_gen_arith_bin
818 * @opfltest cf
819 * @opflmodify cf,pf,af,zf,sf,of
820 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
821 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
822 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
823 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
824 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
825 */
826FNIEMOP_DEF(iemOp_adc_Ev_Gv)
827{
828 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
829 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
830 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
831}
832
833
834/**
835 * @opcode 0x12
836 * @opgroup og_gen_arith_bin
837 * @opfltest cf
838 * @opflmodify cf,pf,af,zf,sf,of
839 * @opcopytests iemOp_adc_Eb_Gb
840 */
841FNIEMOP_DEF(iemOp_adc_Gb_Eb)
842{
843 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
844 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
845}
846
847
848/**
849 * @opcode 0x13
850 * @opgroup og_gen_arith_bin
851 * @opfltest cf
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opcopytests iemOp_adc_Ev_Gv
854 */
855FNIEMOP_DEF(iemOp_adc_Gv_Ev)
856{
857 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
858 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
859}
860
861
862/**
863 * @opcode 0x14
864 * @opgroup og_gen_arith_bin
865 * @opfltest cf
866 * @opflmodify cf,pf,af,zf,sf,of
867 * @opcopytests iemOp_adc_Eb_Gb
868 */
869FNIEMOP_DEF(iemOp_adc_Al_Ib)
870{
871 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
872 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
873}
874
875
876/**
877 * @opcode 0x15
878 * @opgroup og_gen_arith_bin
879 * @opfltest cf
880 * @opflmodify cf,pf,af,zf,sf,of
881 * @opcopytests iemOp_adc_Ev_Gv
882 */
883FNIEMOP_DEF(iemOp_adc_eAX_Iz)
884{
885 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
886 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
887}
888
889
890/**
891 * @opcode 0x16
892 */
893FNIEMOP_DEF(iemOp_push_SS)
894{
895 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
896 IEMOP_HLP_NO_64BIT();
897 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
898}
899
900
901/**
902 * @opcode 0x17
903 * @opgroup og_gen_arith_bin
904 * @opfltest cf
905 * @opflmodify cf,pf,af,zf,sf,of
906 */
907FNIEMOP_DEF(iemOp_pop_SS)
908{
909 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
911 IEMOP_HLP_NO_64BIT();
912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
913}
914
915
916/**
917 * @opcode 0x18
918 * @opgroup og_gen_arith_bin
919 * @opfltest cf
920 * @opflmodify cf,pf,af,zf,sf,of
921 */
922FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
923{
924 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
925 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
926 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
927}
928
929
930/**
931 * @opcode 0x19
932 * @opgroup og_gen_arith_bin
933 * @opfltest cf
934 * @opflmodify cf,pf,af,zf,sf,of
935 */
936FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
937{
938 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
939 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
940 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
941}
942
943
944/**
945 * @opcode 0x1a
946 * @opgroup og_gen_arith_bin
947 * @opfltest cf
948 * @opflmodify cf,pf,af,zf,sf,of
949 */
950FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
951{
952 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
953 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
954}
955
956
957/**
958 * @opcode 0x1b
959 * @opgroup og_gen_arith_bin
960 * @opfltest cf
961 * @opflmodify cf,pf,af,zf,sf,of
962 */
963FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
964{
965 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
966 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
967}
968
969
970/**
971 * @opcode 0x1c
972 * @opgroup og_gen_arith_bin
973 * @opfltest cf
974 * @opflmodify cf,pf,af,zf,sf,of
975 */
976FNIEMOP_DEF(iemOp_sbb_Al_Ib)
977{
978 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
979 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
980}
981
982
983/**
984 * @opcode 0x1d
985 * @opgroup og_gen_arith_bin
986 * @opfltest cf
987 * @opflmodify cf,pf,af,zf,sf,of
988 */
989FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
990{
991 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
992 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
993}
994
995
996/**
997 * @opcode 0x1e
998 * @opgroup og_stack_sreg
999 */
1000FNIEMOP_DEF(iemOp_push_DS)
1001{
1002 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1003 IEMOP_HLP_NO_64BIT();
1004 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1005}
1006
1007
1008/**
1009 * @opcode 0x1f
1010 * @opgroup og_stack_sreg
1011 */
1012FNIEMOP_DEF(iemOp_pop_DS)
1013{
1014 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1016 IEMOP_HLP_NO_64BIT();
1017 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1018}
1019
1020
1021/**
1022 * @opcode 0x20
1023 * @opgroup og_gen_arith_bin
1024 * @opflmodify cf,pf,af,zf,sf,of
1025 * @opflundef af
1026 * @opflclear of,cf
1027 */
1028FNIEMOP_DEF(iemOp_and_Eb_Gb)
1029{
1030 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1031 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1032 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
1033 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1034}
1035
1036
1037/**
1038 * @opcode 0x21
1039 * @opgroup og_gen_arith_bin
1040 * @opflmodify cf,pf,af,zf,sf,of
1041 * @opflundef af
1042 * @opflclear of,cf
1043 */
1044FNIEMOP_DEF(iemOp_and_Ev_Gv)
1045{
1046 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1047 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1048 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
1049 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1050}
1051
1052
1053/**
1054 * @opcode 0x22
1055 * @opgroup og_gen_arith_bin
1056 * @opflmodify cf,pf,af,zf,sf,of
1057 * @opflundef af
1058 * @opflclear of,cf
1059 */
1060FNIEMOP_DEF(iemOp_and_Gb_Eb)
1061{
1062 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1063 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x23
1070 * @opgroup og_gen_arith_bin
1071 * @opflmodify cf,pf,af,zf,sf,of
1072 * @opflundef af
1073 * @opflclear of,cf
1074 */
1075FNIEMOP_DEF(iemOp_and_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1079 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1080}
1081
1082
1083/**
1084 * @opcode 0x24
1085 * @opgroup og_gen_arith_bin
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opflundef af
1088 * @opflclear of,cf
1089 */
1090FNIEMOP_DEF(iemOp_and_Al_Ib)
1091{
1092 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1093 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1094 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1095}
1096
1097
1098/**
1099 * @opcode 0x25
1100 * @opgroup og_gen_arith_bin
1101 * @opflmodify cf,pf,af,zf,sf,of
1102 * @opflundef af
1103 * @opflclear of,cf
1104 */
1105FNIEMOP_DEF(iemOp_and_eAX_Iz)
1106{
1107 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1108 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1109 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1110}
1111
1112
1113/**
1114 * @opcode 0x26
1115 * @opmnemonic SEG
1116 * @op1 ES
1117 * @opgroup og_prefix
1118 * @openc prefix
1119 * @opdisenum OP_SEG
1120 * @ophints harmless
1121 */
1122FNIEMOP_DEF(iemOp_seg_ES)
1123{
1124 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1126 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1127
1128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1130}
1131
1132
1133/**
1134 * @opcode 0x27
1135 * @opfltest af,cf
1136 * @opflmodify cf,pf,af,zf,sf,of
1137 * @opflundef of
1138 */
1139FNIEMOP_DEF(iemOp_daa)
1140{
1141 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1142 IEMOP_HLP_NO_64BIT();
1143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1145 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
1146}
1147
1148
1149/**
1150 * @opcode 0x28
1151 * @opgroup og_gen_arith_bin
1152 * @opflmodify cf,pf,af,zf,sf,of
1153 */
1154FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1155{
1156 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1157 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
1158 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1159}
1160
1161
1162/**
1163 * @opcode 0x29
1164 * @opgroup og_gen_arith_bin
1165 * @opflmodify cf,pf,af,zf,sf,of
1166 */
1167FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1168{
1169 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1170 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
1171 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1172}
1173
1174
1175/**
1176 * @opcode 0x2a
1177 * @opgroup og_gen_arith_bin
1178 * @opflmodify cf,pf,af,zf,sf,of
1179 */
1180FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1181{
1182 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1183 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1184}
1185
1186
1187/**
1188 * @opcode 0x2b
1189 * @opgroup og_gen_arith_bin
1190 * @opflmodify cf,pf,af,zf,sf,of
1191 */
1192FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1193{
1194 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1195 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1196}
1197
1198
1199/**
1200 * @opcode 0x2c
1201 * @opgroup og_gen_arith_bin
1202 * @opflmodify cf,pf,af,zf,sf,of
1203 */
1204FNIEMOP_DEF(iemOp_sub_Al_Ib)
1205{
1206 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1207 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1208}
1209
1210
1211/**
1212 * @opcode 0x2d
1213 * @opgroup og_gen_arith_bin
1214 * @opflmodify cf,pf,af,zf,sf,of
1215 */
1216FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1217{
1218 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1219 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1220}
1221
1222
1223/**
1224 * @opcode 0x2e
1225 * @opmnemonic SEG
1226 * @op1 CS
1227 * @opgroup og_prefix
1228 * @openc prefix
1229 * @opdisenum OP_SEG
1230 * @ophints harmless
1231 */
1232FNIEMOP_DEF(iemOp_seg_CS)
1233{
1234 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1235 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1236 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1237
1238 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1239 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1240}
1241
1242
1243/**
1244 * @opcode 0x2f
1245 * @opfltest af,cf
1246 * @opflmodify cf,pf,af,zf,sf,of
1247 * @opflundef of
1248 */
1249FNIEMOP_DEF(iemOp_das)
1250{
1251 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1252 IEMOP_HLP_NO_64BIT();
1253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1254 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1255 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
1256}
1257
1258
1259/**
1260 * @opcode 0x30
1261 * @opgroup og_gen_arith_bin
1262 * @opflmodify cf,pf,af,zf,sf,of
1263 * @opflundef af
1264 * @opflclear of,cf
1265 */
1266FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1267{
1268 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1269 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1270 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
1271 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1272}
1273
1274
1275/**
1276 * @opcode 0x31
1277 * @opgroup og_gen_arith_bin
1278 * @opflmodify cf,pf,af,zf,sf,of
1279 * @opflundef af
1280 * @opflclear of,cf
1281 */
1282FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1283{
1284 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1286 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
1287 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1288}
1289
1290
1291/**
1292 * @opcode 0x32
1293 * @opgroup og_gen_arith_bin
1294 * @opflmodify cf,pf,af,zf,sf,of
1295 * @opflundef af
1296 * @opflclear of,cf
1297 */
1298FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1299{
1300 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1301 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1302 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1303}
1304
1305
1306/**
1307 * @opcode 0x33
1308 * @opgroup og_gen_arith_bin
1309 * @opflmodify cf,pf,af,zf,sf,of
1310 * @opflundef af
1311 * @opflclear of,cf
1312 */
1313FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1314{
1315 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1317 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1318}
1319
1320
1321/**
1322 * @opcode 0x34
1323 * @opgroup og_gen_arith_bin
1324 * @opflmodify cf,pf,af,zf,sf,of
1325 * @opflundef af
1326 * @opflclear of,cf
1327 */
1328FNIEMOP_DEF(iemOp_xor_Al_Ib)
1329{
1330 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1332 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1333}
1334
1335
1336/**
1337 * @opcode 0x35
1338 * @opgroup og_gen_arith_bin
1339 * @opflmodify cf,pf,af,zf,sf,of
1340 * @opflundef af
1341 * @opflclear of,cf
1342 */
1343FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1344{
1345 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1346 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1347 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1348}
1349
1350
1351/**
1352 * @opcode 0x36
1353 * @opmnemonic SEG
1354 * @op1 SS
1355 * @opgroup og_prefix
1356 * @openc prefix
1357 * @opdisenum OP_SEG
1358 * @ophints harmless
1359 */
1360FNIEMOP_DEF(iemOp_seg_SS)
1361{
1362 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1363 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1364 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1365
1366 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1367 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1368}
1369
1370
1371/**
1372 * @opcode 0x37
1373 * @opfltest af,cf
1374 * @opflmodify cf,pf,af,zf,sf,of
1375 * @opflundef pf,zf,sf,of
1376 * @opgroup og_gen_arith_dec
1377 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1378 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1379 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1380 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1381 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1382 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1383 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1384 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1385 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1386 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1387 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1388 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1389 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1390 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1391 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1392 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1393 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1394 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1395 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1396 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1397 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1398 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1399 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1400 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1401 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1402 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1403 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1404 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1405 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1406 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1407 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1408 */
1409FNIEMOP_DEF(iemOp_aaa)
1410{
1411 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1412 IEMOP_HLP_NO_64BIT();
1413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1414 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1415
1416 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
1417}
1418
1419
1420/**
1421 * @opcode 0x38
1422 */
1423FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1424{
1425 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1426 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
1427 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1428}
1429
1430
1431/**
1432 * @opcode 0x39
1433 */
1434FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1435{
1436 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1437 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
1438 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
1439}
1440
1441
1442/**
1443 * @opcode 0x3a
1444 */
1445FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1446{
1447 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1448 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1449}
1450
1451
1452/**
1453 * @opcode 0x3b
1454 */
1455FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1456{
1457 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1458 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1459}
1460
1461
1462/**
1463 * @opcode 0x3c
1464 */
1465FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1466{
1467 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1468 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1469}
1470
1471
1472/**
1473 * @opcode 0x3d
1474 */
1475FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1476{
1477 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1478 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1479}
1480
1481
1482/**
1483 * @opcode 0x3e
1484 */
1485FNIEMOP_DEF(iemOp_seg_DS)
1486{
1487 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1488 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1489 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1490
1491 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1492 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1493}
1494
1495
1496/**
1497 * @opcode 0x3f
1498 * @opfltest af,cf
1499 * @opflmodify cf,pf,af,zf,sf,of
1500 * @opflundef pf,zf,sf,of
1501 * @opgroup og_gen_arith_dec
1502 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1503 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1504 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1505 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1506 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1507 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1508 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1509 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1510 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1511 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1512 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1513 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1514 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1515 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1516 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1517 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1518 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1519 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1520 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1521 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1522 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1523 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1524 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1525 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1526 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1527 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1528 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1529 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1530 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1531 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1532 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1533 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1534 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1535 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1536 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1537 */
1538FNIEMOP_DEF(iemOp_aas)
1539{
1540 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1541 IEMOP_HLP_NO_64BIT();
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1543 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1544
1545 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1546}
1547
1548
1549/**
1550 * Common 'inc/dec register' helper.
1551 *
1552 * Not for 64-bit code, only for what became the rex prefixes.
1553 */
1554#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1556 switch (pVCpu->iem.s.enmEffOpSize) \
1557 { \
1558 case IEMMODE_16BIT: \
1559 IEM_MC_BEGIN(2, 0); \
1560 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1561 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1562 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1563 IEM_MC_REF_EFLAGS(pEFlags); \
1564 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1565 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1566 IEM_MC_END(); \
1567 break; \
1568 \
1569 case IEMMODE_32BIT: \
1570 IEM_MC_BEGIN(2, 0); \
1571 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1572 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1573 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1574 IEM_MC_REF_EFLAGS(pEFlags); \
1575 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1576 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1577 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1578 IEM_MC_END(); \
1579 break; \
1580 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1581 } \
1582 (void)0
1583
1584/**
1585 * @opcode 0x40
1586 */
1587FNIEMOP_DEF(iemOp_inc_eAX)
1588{
1589 /*
1590 * This is a REX prefix in 64-bit mode.
1591 */
1592 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1593 {
1594 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1595 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1596
1597 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1598 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1599 }
1600
1601 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1602 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1603}
1604
1605
1606/**
1607 * @opcode 0x41
1608 */
1609FNIEMOP_DEF(iemOp_inc_eCX)
1610{
1611 /*
1612 * This is a REX prefix in 64-bit mode.
1613 */
1614 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1615 {
1616 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1617 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1618 pVCpu->iem.s.uRexB = 1 << 3;
1619
1620 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1621 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1622 }
1623
1624 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1625 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1626}
1627
1628
1629/**
1630 * @opcode 0x42
1631 */
1632FNIEMOP_DEF(iemOp_inc_eDX)
1633{
1634 /*
1635 * This is a REX prefix in 64-bit mode.
1636 */
1637 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1638 {
1639 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1640 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1641 pVCpu->iem.s.uRexIndex = 1 << 3;
1642
1643 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1644 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1645 }
1646
1647 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1648 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1649}
1650
1651
1652
1653/**
1654 * @opcode 0x43
1655 */
1656FNIEMOP_DEF(iemOp_inc_eBX)
1657{
1658 /*
1659 * This is a REX prefix in 64-bit mode.
1660 */
1661 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1662 {
1663 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1664 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1665 pVCpu->iem.s.uRexB = 1 << 3;
1666 pVCpu->iem.s.uRexIndex = 1 << 3;
1667
1668 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1669 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1670 }
1671
1672 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1673 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1674}
1675
1676
1677/**
1678 * @opcode 0x44
1679 */
1680FNIEMOP_DEF(iemOp_inc_eSP)
1681{
1682 /*
1683 * This is a REX prefix in 64-bit mode.
1684 */
1685 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1686 {
1687 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1688 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1689 pVCpu->iem.s.uRexReg = 1 << 3;
1690
1691 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1692 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1693 }
1694
1695 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1696 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1697}
1698
1699
1700/**
1701 * @opcode 0x45
1702 */
1703FNIEMOP_DEF(iemOp_inc_eBP)
1704{
1705 /*
1706 * This is a REX prefix in 64-bit mode.
1707 */
1708 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1709 {
1710 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1711 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1712 pVCpu->iem.s.uRexReg = 1 << 3;
1713 pVCpu->iem.s.uRexB = 1 << 3;
1714
1715 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1716 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1717 }
1718
1719 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1720 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1721}
1722
1723
1724/**
1725 * @opcode 0x46
1726 */
1727FNIEMOP_DEF(iemOp_inc_eSI)
1728{
1729 /*
1730 * This is a REX prefix in 64-bit mode.
1731 */
1732 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1733 {
1734 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1735 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1736 pVCpu->iem.s.uRexReg = 1 << 3;
1737 pVCpu->iem.s.uRexIndex = 1 << 3;
1738
1739 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1740 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1741 }
1742
1743 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1744 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1745}
1746
1747
1748/**
1749 * @opcode 0x47
1750 */
1751FNIEMOP_DEF(iemOp_inc_eDI)
1752{
1753 /*
1754 * This is a REX prefix in 64-bit mode.
1755 */
1756 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1757 {
1758 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1759 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1760 pVCpu->iem.s.uRexReg = 1 << 3;
1761 pVCpu->iem.s.uRexB = 1 << 3;
1762 pVCpu->iem.s.uRexIndex = 1 << 3;
1763
1764 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1765 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1766 }
1767
1768 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1769 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1770}
1771
1772
1773/**
1774 * @opcode 0x48
1775 */
1776FNIEMOP_DEF(iemOp_dec_eAX)
1777{
1778 /*
1779 * This is a REX prefix in 64-bit mode.
1780 */
1781 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1782 {
1783 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1784 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1785 iemRecalEffOpSize(pVCpu);
1786
1787 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1788 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1789 }
1790
1791 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1792 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1793}
1794
1795
1796/**
1797 * @opcode 0x49
1798 */
1799FNIEMOP_DEF(iemOp_dec_eCX)
1800{
1801 /*
1802 * This is a REX prefix in 64-bit mode.
1803 */
1804 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1805 {
1806 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1807 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1808 pVCpu->iem.s.uRexB = 1 << 3;
1809 iemRecalEffOpSize(pVCpu);
1810
1811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1813 }
1814
1815 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1816 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1817}
1818
1819
1820/**
1821 * @opcode 0x4a
1822 */
1823FNIEMOP_DEF(iemOp_dec_eDX)
1824{
1825 /*
1826 * This is a REX prefix in 64-bit mode.
1827 */
1828 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1829 {
1830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1832 pVCpu->iem.s.uRexIndex = 1 << 3;
1833 iemRecalEffOpSize(pVCpu);
1834
1835 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1836 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1837 }
1838
1839 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1840 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
1841}
1842
1843
1844/**
1845 * @opcode 0x4b
1846 */
1847FNIEMOP_DEF(iemOp_dec_eBX)
1848{
1849 /*
1850 * This is a REX prefix in 64-bit mode.
1851 */
1852 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1853 {
1854 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1855 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1856 pVCpu->iem.s.uRexB = 1 << 3;
1857 pVCpu->iem.s.uRexIndex = 1 << 3;
1858 iemRecalEffOpSize(pVCpu);
1859
1860 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1861 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1862 }
1863
1864 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1865 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
1866}
1867
1868
1869/**
1870 * @opcode 0x4c
1871 */
1872FNIEMOP_DEF(iemOp_dec_eSP)
1873{
1874 /*
1875 * This is a REX prefix in 64-bit mode.
1876 */
1877 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1878 {
1879 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1880 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1881 pVCpu->iem.s.uRexReg = 1 << 3;
1882 iemRecalEffOpSize(pVCpu);
1883
1884 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1885 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1886 }
1887
1888 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1889 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
1890}
1891
1892
1893/**
1894 * @opcode 0x4d
1895 */
1896FNIEMOP_DEF(iemOp_dec_eBP)
1897{
1898 /*
1899 * This is a REX prefix in 64-bit mode.
1900 */
1901 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1902 {
1903 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1904 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1905 pVCpu->iem.s.uRexReg = 1 << 3;
1906 pVCpu->iem.s.uRexB = 1 << 3;
1907 iemRecalEffOpSize(pVCpu);
1908
1909 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1910 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1911 }
1912
1913 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1914 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
1915}
1916
1917
1918/**
1919 * @opcode 0x4e
1920 */
1921FNIEMOP_DEF(iemOp_dec_eSI)
1922{
1923 /*
1924 * This is a REX prefix in 64-bit mode.
1925 */
1926 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1927 {
1928 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1929 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1930 pVCpu->iem.s.uRexReg = 1 << 3;
1931 pVCpu->iem.s.uRexIndex = 1 << 3;
1932 iemRecalEffOpSize(pVCpu);
1933
1934 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1935 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1936 }
1937
1938 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1939 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
1940}
1941
1942
1943/**
1944 * @opcode 0x4f
1945 */
1946FNIEMOP_DEF(iemOp_dec_eDI)
1947{
1948 /*
1949 * This is a REX prefix in 64-bit mode.
1950 */
1951 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1952 {
1953 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1954 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1955 pVCpu->iem.s.uRexReg = 1 << 3;
1956 pVCpu->iem.s.uRexB = 1 << 3;
1957 pVCpu->iem.s.uRexIndex = 1 << 3;
1958 iemRecalEffOpSize(pVCpu);
1959
1960 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1961 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1962 }
1963
1964 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1965 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
1966}
1967
1968
1969/**
1970 * Common 'push register' helper.
1971 */
1972FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1973{
1974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1975 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1976 {
1977 iReg |= pVCpu->iem.s.uRexB;
1978 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1979 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1980 }
1981
1982 switch (pVCpu->iem.s.enmEffOpSize)
1983 {
1984 case IEMMODE_16BIT:
1985 IEM_MC_BEGIN(0, 1);
1986 IEM_MC_LOCAL(uint16_t, u16Value);
1987 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1988 IEM_MC_PUSH_U16(u16Value);
1989 IEM_MC_ADVANCE_RIP_AND_FINISH();
1990 IEM_MC_END();
1991 break;
1992
1993 case IEMMODE_32BIT:
1994 IEM_MC_BEGIN(0, 1);
1995 IEM_MC_LOCAL(uint32_t, u32Value);
1996 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1997 IEM_MC_PUSH_U32(u32Value);
1998 IEM_MC_ADVANCE_RIP_AND_FINISH();
1999 IEM_MC_END();
2000 break;
2001
2002 case IEMMODE_64BIT:
2003 IEM_MC_BEGIN(0, 1);
2004 IEM_MC_LOCAL(uint64_t, u64Value);
2005 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2006 IEM_MC_PUSH_U64(u64Value);
2007 IEM_MC_ADVANCE_RIP_AND_FINISH();
2008 IEM_MC_END();
2009 break;
2010
2011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2012 }
2013}
2014
2015
2016/**
2017 * @opcode 0x50
2018 */
2019FNIEMOP_DEF(iemOp_push_eAX)
2020{
2021 IEMOP_MNEMONIC(push_rAX, "push rAX");
2022 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2023}
2024
2025
2026/**
2027 * @opcode 0x51
2028 */
2029FNIEMOP_DEF(iemOp_push_eCX)
2030{
2031 IEMOP_MNEMONIC(push_rCX, "push rCX");
2032 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2033}
2034
2035
2036/**
2037 * @opcode 0x52
2038 */
2039FNIEMOP_DEF(iemOp_push_eDX)
2040{
2041 IEMOP_MNEMONIC(push_rDX, "push rDX");
2042 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2043}
2044
2045
2046/**
2047 * @opcode 0x53
2048 */
2049FNIEMOP_DEF(iemOp_push_eBX)
2050{
2051 IEMOP_MNEMONIC(push_rBX, "push rBX");
2052 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2053}
2054
2055
2056/**
2057 * @opcode 0x54
2058 */
2059FNIEMOP_DEF(iemOp_push_eSP)
2060{
2061 IEMOP_MNEMONIC(push_rSP, "push rSP");
2062 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2063 {
2064 IEM_MC_BEGIN(0, 1);
2065 IEM_MC_LOCAL(uint16_t, u16Value);
2066 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2067 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2068 IEM_MC_PUSH_U16(u16Value);
2069 IEM_MC_ADVANCE_RIP_AND_FINISH();
2070 IEM_MC_END();
2071 }
2072 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2073}
2074
2075
2076/**
2077 * @opcode 0x55
2078 */
2079FNIEMOP_DEF(iemOp_push_eBP)
2080{
2081 IEMOP_MNEMONIC(push_rBP, "push rBP");
2082 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2083}
2084
2085
2086/**
2087 * @opcode 0x56
2088 */
2089FNIEMOP_DEF(iemOp_push_eSI)
2090{
2091 IEMOP_MNEMONIC(push_rSI, "push rSI");
2092 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2093}
2094
2095
2096/**
2097 * @opcode 0x57
2098 */
2099FNIEMOP_DEF(iemOp_push_eDI)
2100{
2101 IEMOP_MNEMONIC(push_rDI, "push rDI");
2102 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2103}
2104
2105
2106/**
2107 * Common 'pop register' helper.
2108 */
2109FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2110{
2111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2112 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2113 {
2114 iReg |= pVCpu->iem.s.uRexB;
2115 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2116 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2117 }
2118
2119 switch (pVCpu->iem.s.enmEffOpSize)
2120 {
2121 case IEMMODE_16BIT:
2122 IEM_MC_BEGIN(0, 1);
2123 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2124 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2125 IEM_MC_POP_U16(pu16Dst);
2126 IEM_MC_ADVANCE_RIP_AND_FINISH();
2127 IEM_MC_END();
2128 break;
2129
2130 case IEMMODE_32BIT:
2131 IEM_MC_BEGIN(0, 1);
2132 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2133 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2134 IEM_MC_POP_U32(pu32Dst);
2135 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2136 IEM_MC_ADVANCE_RIP_AND_FINISH();
2137 IEM_MC_END();
2138 break;
2139
2140 case IEMMODE_64BIT:
2141 IEM_MC_BEGIN(0, 1);
2142 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2143 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2144 IEM_MC_POP_U64(pu64Dst);
2145 IEM_MC_ADVANCE_RIP_AND_FINISH();
2146 IEM_MC_END();
2147 break;
2148
2149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2150 }
2151}
2152
2153
2154/**
2155 * @opcode 0x58
2156 */
2157FNIEMOP_DEF(iemOp_pop_eAX)
2158{
2159 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2160 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2161}
2162
2163
2164/**
2165 * @opcode 0x59
2166 */
2167FNIEMOP_DEF(iemOp_pop_eCX)
2168{
2169 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2170 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2171}
2172
2173
2174/**
2175 * @opcode 0x5a
2176 */
2177FNIEMOP_DEF(iemOp_pop_eDX)
2178{
2179 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2180 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2181}
2182
2183
2184/**
2185 * @opcode 0x5b
2186 */
2187FNIEMOP_DEF(iemOp_pop_eBX)
2188{
2189 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2190 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2191}
2192
2193
2194/**
2195 * @opcode 0x5c
2196 */
2197FNIEMOP_DEF(iemOp_pop_eSP)
2198{
2199 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2200 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2201 {
2202 if (pVCpu->iem.s.uRexB)
2203 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2204 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2205 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2206 }
2207
2208 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2209 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2210 /** @todo add testcase for this instruction. */
2211 switch (pVCpu->iem.s.enmEffOpSize)
2212 {
2213 case IEMMODE_16BIT:
2214 IEM_MC_BEGIN(0, 1);
2215 IEM_MC_LOCAL(uint16_t, u16Dst);
2216 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2217 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2218 IEM_MC_ADVANCE_RIP_AND_FINISH();
2219 IEM_MC_END();
2220 break;
2221
2222 case IEMMODE_32BIT:
2223 IEM_MC_BEGIN(0, 1);
2224 IEM_MC_LOCAL(uint32_t, u32Dst);
2225 IEM_MC_POP_U32(&u32Dst);
2226 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 break;
2230
2231 case IEMMODE_64BIT:
2232 IEM_MC_BEGIN(0, 1);
2233 IEM_MC_LOCAL(uint64_t, u64Dst);
2234 IEM_MC_POP_U64(&u64Dst);
2235 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2236 IEM_MC_ADVANCE_RIP_AND_FINISH();
2237 IEM_MC_END();
2238 break;
2239
2240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2241 }
2242}
2243
2244
2245/**
2246 * @opcode 0x5d
2247 */
2248FNIEMOP_DEF(iemOp_pop_eBP)
2249{
2250 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2251 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2252}
2253
2254
2255/**
2256 * @opcode 0x5e
2257 */
2258FNIEMOP_DEF(iemOp_pop_eSI)
2259{
2260 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2261 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2262}
2263
2264
2265/**
2266 * @opcode 0x5f
2267 */
2268FNIEMOP_DEF(iemOp_pop_eDI)
2269{
2270 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2271 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2272}
2273
2274
2275/**
2276 * @opcode 0x60
2277 */
2278FNIEMOP_DEF(iemOp_pusha)
2279{
2280 IEMOP_MNEMONIC(pusha, "pusha");
2281 IEMOP_HLP_MIN_186();
2282 IEMOP_HLP_NO_64BIT();
2283 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2284 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
2285 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2286 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
2287}
2288
2289
2290/**
2291 * @opcode 0x61
2292 */
2293FNIEMOP_DEF(iemOp_popa__mvex)
2294{
2295 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
2296 {
2297 IEMOP_MNEMONIC(popa, "popa");
2298 IEMOP_HLP_MIN_186();
2299 IEMOP_HLP_NO_64BIT();
2300 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2301 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
2302 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2303 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
2304 }
2305 IEMOP_MNEMONIC(mvex, "mvex");
2306 Log(("mvex prefix is not supported!\n"));
2307 return IEMOP_RAISE_INVALID_OPCODE();
2308}
2309
2310
2311/**
2312 * @opcode 0x62
2313 * @opmnemonic bound
2314 * @op1 Gv_RO
2315 * @op2 Ma
2316 * @opmincpu 80186
2317 * @ophints harmless x86_invalid_64
2318 * @optest op1=0 op2=0 ->
2319 * @optest op1=1 op2=0 -> value.xcpt=5
2320 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2321 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2322 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2323 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2324 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2325 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2326 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2327 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2328 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2329 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2330 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2331 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2332 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2333 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2334 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2335 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2336 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2337 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2338 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2339 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2340 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2341 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2342 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2343 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2344 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2345 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2346 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2347 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2348 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2349 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2350 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2351 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2352 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2353 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2354 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2355 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2356 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2357 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2358 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2359 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2360 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2361 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2362 */
2363FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2364{
2365 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2366 compatability mode it is invalid with MOD=3.
2367
2368 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2369 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2370 given as R and X without an exact description, so we assume it builds on
2371 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2372 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2373 uint8_t bRm;
2374 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
2375 {
2376 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2377 IEMOP_HLP_MIN_186();
2378 IEM_OPCODE_GET_NEXT_U8(&bRm);
2379 if (IEM_IS_MODRM_MEM_MODE(bRm))
2380 {
2381 /** @todo testcase: check that there are two memory accesses involved. Check
2382 * whether they're both read before the \#BR triggers. */
2383 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2384 {
2385 IEM_MC_BEGIN(3, 1);
2386 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2387 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2388 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2390
2391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2393
2394 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2395 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2396 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2397
2398 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2399 IEM_MC_END();
2400 }
2401 else /* 32-bit operands */
2402 {
2403 IEM_MC_BEGIN(3, 1);
2404 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2405 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2406 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2408
2409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2411
2412 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2413 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2414 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2415
2416 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2417 IEM_MC_END();
2418 }
2419 }
2420
2421 /*
2422 * @opdone
2423 */
2424 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2425 {
2426 /* Note that there is no need for the CPU to fetch further bytes
2427 here because MODRM.MOD == 3. */
2428 Log(("evex not supported by the guest CPU!\n"));
2429 return IEMOP_RAISE_INVALID_OPCODE();
2430 }
2431 }
2432 else
2433 {
2434 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2435 * does modr/m read, whereas AMD probably doesn't... */
2436 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2437 {
2438 Log(("evex not supported by the guest CPU!\n"));
2439 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2440 }
2441 IEM_OPCODE_GET_NEXT_U8(&bRm);
2442 }
2443
2444 IEMOP_MNEMONIC(evex, "evex");
2445 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2446 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2447 Log(("evex prefix is not implemented!\n"));
2448 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2449}
2450
2451
2452/** Opcode 0x63 - non-64-bit modes. */
2453FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2454{
2455 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2456 IEMOP_HLP_MIN_286();
2457 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2459
2460 if (IEM_IS_MODRM_REG_MODE(bRm))
2461 {
2462 /* Register */
2463 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2464 IEM_MC_BEGIN(3, 0);
2465 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2466 IEM_MC_ARG(uint16_t, u16Src, 1);
2467 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2468
2469 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2470 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2471 IEM_MC_REF_EFLAGS(pEFlags);
2472 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2473
2474 IEM_MC_ADVANCE_RIP_AND_FINISH();
2475 IEM_MC_END();
2476 }
2477 else
2478 {
2479 /* Memory */
2480 IEM_MC_BEGIN(3, 2);
2481 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2482 IEM_MC_ARG(uint16_t, u16Src, 1);
2483 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2485
2486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2487 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2488 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2490 IEM_MC_FETCH_EFLAGS(EFlags);
2491 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2492
2493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2494 IEM_MC_COMMIT_EFLAGS(EFlags);
2495 IEM_MC_ADVANCE_RIP_AND_FINISH();
2496 IEM_MC_END();
2497 }
2498}
2499
2500
2501/**
2502 * @opcode 0x63
2503 *
2504 * @note This is a weird one. It works like a regular move instruction if
2505 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2506 * @todo This definitely needs a testcase to verify the odd cases. */
2507FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2508{
2509 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2510
2511 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2513
2514 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2515 {
2516 if (IEM_IS_MODRM_REG_MODE(bRm))
2517 {
2518 /*
2519 * Register to register.
2520 */
2521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2522 IEM_MC_BEGIN(0, 1);
2523 IEM_MC_LOCAL(uint64_t, u64Value);
2524 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2525 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2526 IEM_MC_ADVANCE_RIP_AND_FINISH();
2527 IEM_MC_END();
2528 }
2529 else
2530 {
2531 /*
2532 * We're loading a register from memory.
2533 */
2534 IEM_MC_BEGIN(0, 2);
2535 IEM_MC_LOCAL(uint64_t, u64Value);
2536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2539 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2540 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2541 IEM_MC_ADVANCE_RIP_AND_FINISH();
2542 IEM_MC_END();
2543 }
2544 }
2545 else
2546 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2547}
2548
2549
2550/**
2551 * @opcode 0x64
2552 * @opmnemonic segfs
2553 * @opmincpu 80386
2554 * @opgroup og_prefixes
2555 */
2556FNIEMOP_DEF(iemOp_seg_FS)
2557{
2558 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2559 IEMOP_HLP_MIN_386();
2560
2561 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2562 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2563
2564 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2565 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2566}
2567
2568
2569/**
2570 * @opcode 0x65
2571 * @opmnemonic seggs
2572 * @opmincpu 80386
2573 * @opgroup og_prefixes
2574 */
2575FNIEMOP_DEF(iemOp_seg_GS)
2576{
2577 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2578 IEMOP_HLP_MIN_386();
2579
2580 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2581 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2582
2583 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2584 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2585}
2586
2587
2588/**
2589 * @opcode 0x66
2590 * @opmnemonic opsize
2591 * @openc prefix
2592 * @opmincpu 80386
2593 * @ophints harmless
2594 * @opgroup og_prefixes
2595 */
2596FNIEMOP_DEF(iemOp_op_size)
2597{
2598 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2599 IEMOP_HLP_MIN_386();
2600
2601 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2602 iemRecalEffOpSize(pVCpu);
2603
2604 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2605 when REPZ or REPNZ are present. */
2606 if (pVCpu->iem.s.idxPrefix == 0)
2607 pVCpu->iem.s.idxPrefix = 1;
2608
2609 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2610 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2611}
2612
2613
2614/**
2615 * @opcode 0x67
2616 * @opmnemonic addrsize
2617 * @openc prefix
2618 * @opmincpu 80386
2619 * @ophints harmless
2620 * @opgroup og_prefixes
2621 */
2622FNIEMOP_DEF(iemOp_addr_size)
2623{
2624 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2625 IEMOP_HLP_MIN_386();
2626
2627 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2628 switch (pVCpu->iem.s.enmDefAddrMode)
2629 {
2630 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2631 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2632 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2633 default: AssertFailed();
2634 }
2635
2636 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2637 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2638}
2639
2640
2641/**
2642 * @opcode 0x68
2643 */
2644FNIEMOP_DEF(iemOp_push_Iz)
2645{
2646 IEMOP_MNEMONIC(push_Iz, "push Iz");
2647 IEMOP_HLP_MIN_186();
2648 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2649 switch (pVCpu->iem.s.enmEffOpSize)
2650 {
2651 case IEMMODE_16BIT:
2652 {
2653 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2655 IEM_MC_BEGIN(0,0);
2656 IEM_MC_PUSH_U16(u16Imm);
2657 IEM_MC_ADVANCE_RIP_AND_FINISH();
2658 IEM_MC_END();
2659 break;
2660 }
2661
2662 case IEMMODE_32BIT:
2663 {
2664 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2666 IEM_MC_BEGIN(0,0);
2667 IEM_MC_PUSH_U32(u32Imm);
2668 IEM_MC_ADVANCE_RIP_AND_FINISH();
2669 IEM_MC_END();
2670 break;
2671 }
2672
2673 case IEMMODE_64BIT:
2674 {
2675 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2677 IEM_MC_BEGIN(0,0);
2678 IEM_MC_PUSH_U64(u64Imm);
2679 IEM_MC_ADVANCE_RIP_AND_FINISH();
2680 IEM_MC_END();
2681 break;
2682 }
2683
2684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2685 }
2686}
2687
2688
2689/**
2690 * @opcode 0x69
2691 */
2692FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2693{
2694 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2695 IEMOP_HLP_MIN_186();
2696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2697 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2698
2699 switch (pVCpu->iem.s.enmEffOpSize)
2700 {
2701 case IEMMODE_16BIT:
2702 {
2703 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2704 if (IEM_IS_MODRM_REG_MODE(bRm))
2705 {
2706 /* register operand */
2707 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709
2710 IEM_MC_BEGIN(3, 1);
2711 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2712 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2713 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2714 IEM_MC_LOCAL(uint16_t, u16Tmp);
2715
2716 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2717 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2718 IEM_MC_REF_EFLAGS(pEFlags);
2719 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2720 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2721
2722 IEM_MC_ADVANCE_RIP_AND_FINISH();
2723 IEM_MC_END();
2724 }
2725 else
2726 {
2727 /* memory operand */
2728 IEM_MC_BEGIN(3, 2);
2729 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2730 IEM_MC_ARG(uint16_t, u16Src, 1);
2731 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2732 IEM_MC_LOCAL(uint16_t, u16Tmp);
2733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2734
2735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2736 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2737 IEM_MC_ASSIGN(u16Src, u16Imm);
2738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2739 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2740 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2741 IEM_MC_REF_EFLAGS(pEFlags);
2742 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2743 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2744
2745 IEM_MC_ADVANCE_RIP_AND_FINISH();
2746 IEM_MC_END();
2747 }
2748 break;
2749 }
2750
2751 case IEMMODE_32BIT:
2752 {
2753 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2754 if (IEM_IS_MODRM_REG_MODE(bRm))
2755 {
2756 /* register operand */
2757 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2759
2760 IEM_MC_BEGIN(3, 1);
2761 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2762 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2763 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2764 IEM_MC_LOCAL(uint32_t, u32Tmp);
2765
2766 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2767 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2768 IEM_MC_REF_EFLAGS(pEFlags);
2769 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2770 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2771
2772 IEM_MC_ADVANCE_RIP_AND_FINISH();
2773 IEM_MC_END();
2774 }
2775 else
2776 {
2777 /* memory operand */
2778 IEM_MC_BEGIN(3, 2);
2779 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2780 IEM_MC_ARG(uint32_t, u32Src, 1);
2781 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2782 IEM_MC_LOCAL(uint32_t, u32Tmp);
2783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2784
2785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2786 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2787 IEM_MC_ASSIGN(u32Src, u32Imm);
2788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2789 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2790 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2791 IEM_MC_REF_EFLAGS(pEFlags);
2792 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2793 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2794
2795 IEM_MC_ADVANCE_RIP_AND_FINISH();
2796 IEM_MC_END();
2797 }
2798 break;
2799 }
2800
2801 case IEMMODE_64BIT:
2802 {
2803 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2804 if (IEM_IS_MODRM_REG_MODE(bRm))
2805 {
2806 /* register operand */
2807 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2809
2810 IEM_MC_BEGIN(3, 1);
2811 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2812 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2813 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2814 IEM_MC_LOCAL(uint64_t, u64Tmp);
2815
2816 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2817 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2818 IEM_MC_REF_EFLAGS(pEFlags);
2819 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2820 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2821
2822 IEM_MC_ADVANCE_RIP_AND_FINISH();
2823 IEM_MC_END();
2824 }
2825 else
2826 {
2827 /* memory operand */
2828 IEM_MC_BEGIN(3, 2);
2829 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2830 IEM_MC_ARG(uint64_t, u64Src, 1);
2831 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2832 IEM_MC_LOCAL(uint64_t, u64Tmp);
2833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2834
2835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2836 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2837 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
2838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2839 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2840 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2841 IEM_MC_REF_EFLAGS(pEFlags);
2842 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2843 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2844
2845 IEM_MC_ADVANCE_RIP_AND_FINISH();
2846 IEM_MC_END();
2847 }
2848 break;
2849 }
2850
2851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2852 }
2853}
2854
2855
2856/**
2857 * @opcode 0x6a
2858 */
2859FNIEMOP_DEF(iemOp_push_Ib)
2860{
2861 IEMOP_MNEMONIC(push_Ib, "push Ib");
2862 IEMOP_HLP_MIN_186();
2863 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2865 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2866
2867 switch (pVCpu->iem.s.enmEffOpSize)
2868 {
2869 case IEMMODE_16BIT:
2870 IEM_MC_BEGIN(0,0);
2871 IEM_MC_PUSH_U16(i8Imm);
2872 IEM_MC_ADVANCE_RIP_AND_FINISH();
2873 IEM_MC_END();
2874 break;
2875 case IEMMODE_32BIT:
2876 IEM_MC_BEGIN(0,0);
2877 IEM_MC_PUSH_U32(i8Imm);
2878 IEM_MC_ADVANCE_RIP_AND_FINISH();
2879 IEM_MC_END();
2880 break;
2881 case IEMMODE_64BIT:
2882 IEM_MC_BEGIN(0,0);
2883 IEM_MC_PUSH_U64(i8Imm);
2884 IEM_MC_ADVANCE_RIP_AND_FINISH();
2885 IEM_MC_END();
2886 break;
2887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2888 }
2889}
2890
2891
2892/**
2893 * @opcode 0x6b
2894 */
2895FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2896{
2897 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2898 IEMOP_HLP_MIN_186();
2899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2901
2902 switch (pVCpu->iem.s.enmEffOpSize)
2903 {
2904 case IEMMODE_16BIT:
2905 {
2906 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2907 if (IEM_IS_MODRM_REG_MODE(bRm))
2908 {
2909 /* register operand */
2910 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2912
2913 IEM_MC_BEGIN(3, 1);
2914 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2915 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2916 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2917 IEM_MC_LOCAL(uint16_t, u16Tmp);
2918
2919 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2920 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2921 IEM_MC_REF_EFLAGS(pEFlags);
2922 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2923 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2924
2925 IEM_MC_ADVANCE_RIP_AND_FINISH();
2926 IEM_MC_END();
2927 }
2928 else
2929 {
2930 /* memory operand */
2931 IEM_MC_BEGIN(3, 2);
2932 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2933 IEM_MC_ARG(uint16_t, u16Src, 1);
2934 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2935 IEM_MC_LOCAL(uint16_t, u16Tmp);
2936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2937
2938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2939 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2940 IEM_MC_ASSIGN(u16Src, u16Imm);
2941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2942 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2943 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2944 IEM_MC_REF_EFLAGS(pEFlags);
2945 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2946 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2947
2948 IEM_MC_ADVANCE_RIP_AND_FINISH();
2949 IEM_MC_END();
2950 }
2951 break;
2952 }
2953
2954 case IEMMODE_32BIT:
2955 {
2956 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2957 if (IEM_IS_MODRM_REG_MODE(bRm))
2958 {
2959 /* register operand */
2960 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962
2963 IEM_MC_BEGIN(3, 1);
2964 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2965 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2967 IEM_MC_LOCAL(uint32_t, u32Tmp);
2968
2969 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2970 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2971 IEM_MC_REF_EFLAGS(pEFlags);
2972 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2973 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2974
2975 IEM_MC_ADVANCE_RIP_AND_FINISH();
2976 IEM_MC_END();
2977 }
2978 else
2979 {
2980 /* memory operand */
2981 IEM_MC_BEGIN(3, 2);
2982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2983 IEM_MC_ARG(uint32_t, u32Src, 1);
2984 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2985 IEM_MC_LOCAL(uint32_t, u32Tmp);
2986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2987
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2989 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2990 IEM_MC_ASSIGN(u32Src, u32Imm);
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2992 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2993 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2994 IEM_MC_REF_EFLAGS(pEFlags);
2995 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2996 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2997
2998 IEM_MC_ADVANCE_RIP_AND_FINISH();
2999 IEM_MC_END();
3000 }
3001 break;
3002 }
3003
3004 case IEMMODE_64BIT:
3005 {
3006 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3007 if (IEM_IS_MODRM_REG_MODE(bRm))
3008 {
3009 /* register operand */
3010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3012
3013 IEM_MC_BEGIN(3, 1);
3014 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3015 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3016 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3017 IEM_MC_LOCAL(uint64_t, u64Tmp);
3018
3019 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3020 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3021 IEM_MC_REF_EFLAGS(pEFlags);
3022 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3023 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3024
3025 IEM_MC_ADVANCE_RIP_AND_FINISH();
3026 IEM_MC_END();
3027 }
3028 else
3029 {
3030 /* memory operand */
3031 IEM_MC_BEGIN(3, 2);
3032 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3033 IEM_MC_ARG(uint64_t, u64Src, 1);
3034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3035 IEM_MC_LOCAL(uint64_t, u64Tmp);
3036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3037
3038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3039 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3040 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3042 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3043 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3044 IEM_MC_REF_EFLAGS(pEFlags);
3045 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3046 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3047
3048 IEM_MC_ADVANCE_RIP_AND_FINISH();
3049 IEM_MC_END();
3050 }
3051 break;
3052 }
3053
3054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3055 }
3056}
3057
3058
3059/**
3060 * @opcode 0x6c
3061 */
3062FNIEMOP_DEF(iemOp_insb_Yb_DX)
3063{
3064 IEMOP_HLP_MIN_186();
3065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3066 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3067 {
3068 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3069 switch (pVCpu->iem.s.enmEffAddrMode)
3070 {
3071 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
3072 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
3073 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
3074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3075 }
3076 }
3077 else
3078 {
3079 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3080 switch (pVCpu->iem.s.enmEffAddrMode)
3081 {
3082 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
3083 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
3084 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
3085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3086 }
3087 }
3088}
3089
3090
3091/**
3092 * @opcode 0x6d
3093 */
3094FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3095{
3096 IEMOP_HLP_MIN_186();
3097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3098 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3099 {
3100 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3101 switch (pVCpu->iem.s.enmEffOpSize)
3102 {
3103 case IEMMODE_16BIT:
3104 switch (pVCpu->iem.s.enmEffAddrMode)
3105 {
3106 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
3107 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
3108 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
3109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3110 }
3111 break;
3112 case IEMMODE_64BIT:
3113 case IEMMODE_32BIT:
3114 switch (pVCpu->iem.s.enmEffAddrMode)
3115 {
3116 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
3117 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
3118 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
3119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3120 }
3121 break;
3122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3123 }
3124 }
3125 else
3126 {
3127 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3128 switch (pVCpu->iem.s.enmEffOpSize)
3129 {
3130 case IEMMODE_16BIT:
3131 switch (pVCpu->iem.s.enmEffAddrMode)
3132 {
3133 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
3134 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
3135 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
3136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3137 }
3138 break;
3139 case IEMMODE_64BIT:
3140 case IEMMODE_32BIT:
3141 switch (pVCpu->iem.s.enmEffAddrMode)
3142 {
3143 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
3144 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
3145 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148 break;
3149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3150 }
3151 }
3152}
3153
3154
3155/**
3156 * @opcode 0x6e
3157 */
3158FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3159{
3160 IEMOP_HLP_MIN_186();
3161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3162 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3163 {
3164 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3165 switch (pVCpu->iem.s.enmEffAddrMode)
3166 {
3167 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3168 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3169 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3171 }
3172 }
3173 else
3174 {
3175 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3176 switch (pVCpu->iem.s.enmEffAddrMode)
3177 {
3178 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3179 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3180 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3182 }
3183 }
3184}
3185
3186
3187/**
3188 * @opcode 0x6f
3189 */
3190FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3191{
3192 IEMOP_HLP_MIN_186();
3193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3194 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3195 {
3196 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3197 switch (pVCpu->iem.s.enmEffOpSize)
3198 {
3199 case IEMMODE_16BIT:
3200 switch (pVCpu->iem.s.enmEffAddrMode)
3201 {
3202 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3203 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3204 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3206 }
3207 break;
3208 case IEMMODE_64BIT:
3209 case IEMMODE_32BIT:
3210 switch (pVCpu->iem.s.enmEffAddrMode)
3211 {
3212 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3213 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3214 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3216 }
3217 break;
3218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3219 }
3220 }
3221 else
3222 {
3223 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3224 switch (pVCpu->iem.s.enmEffOpSize)
3225 {
3226 case IEMMODE_16BIT:
3227 switch (pVCpu->iem.s.enmEffAddrMode)
3228 {
3229 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3230 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3231 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3233 }
3234 break;
3235 case IEMMODE_64BIT:
3236 case IEMMODE_32BIT:
3237 switch (pVCpu->iem.s.enmEffAddrMode)
3238 {
3239 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3240 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3241 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3243 }
3244 break;
3245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3246 }
3247 }
3248}
3249
3250
3251/**
3252 * @opcode 0x70
3253 */
3254FNIEMOP_DEF(iemOp_jo_Jb)
3255{
3256 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3257 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3259 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3260
3261 IEM_MC_BEGIN(0, 0);
3262 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3263 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3264 } IEM_MC_ELSE() {
3265 IEM_MC_ADVANCE_RIP_AND_FINISH();
3266 } IEM_MC_ENDIF();
3267 IEM_MC_END();
3268}
3269
3270
3271/**
3272 * @opcode 0x71
3273 */
3274FNIEMOP_DEF(iemOp_jno_Jb)
3275{
3276 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3277 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3279 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3280
3281 IEM_MC_BEGIN(0, 0);
3282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3283 IEM_MC_ADVANCE_RIP_AND_FINISH();
3284 } IEM_MC_ELSE() {
3285 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3286 } IEM_MC_ENDIF();
3287 IEM_MC_END();
3288}
3289
3290/**
3291 * @opcode 0x72
3292 */
3293FNIEMOP_DEF(iemOp_jc_Jb)
3294{
3295 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3296 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3298 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3299
3300 IEM_MC_BEGIN(0, 0);
3301 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3302 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3303 } IEM_MC_ELSE() {
3304 IEM_MC_ADVANCE_RIP_AND_FINISH();
3305 } IEM_MC_ENDIF();
3306 IEM_MC_END();
3307}
3308
3309
3310/**
3311 * @opcode 0x73
3312 */
3313FNIEMOP_DEF(iemOp_jnc_Jb)
3314{
3315 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3316 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3319
3320 IEM_MC_BEGIN(0, 0);
3321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3322 IEM_MC_ADVANCE_RIP_AND_FINISH();
3323 } IEM_MC_ELSE() {
3324 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3325 } IEM_MC_ENDIF();
3326 IEM_MC_END();
3327}
3328
3329
3330/**
3331 * @opcode 0x74
3332 */
3333FNIEMOP_DEF(iemOp_je_Jb)
3334{
3335 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3336 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3338 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3339
3340 IEM_MC_BEGIN(0, 0);
3341 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3342 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3343 } IEM_MC_ELSE() {
3344 IEM_MC_ADVANCE_RIP_AND_FINISH();
3345 } IEM_MC_ENDIF();
3346 IEM_MC_END();
3347}
3348
3349
3350/**
3351 * @opcode 0x75
3352 */
3353FNIEMOP_DEF(iemOp_jne_Jb)
3354{
3355 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3356 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3358 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3359
3360 IEM_MC_BEGIN(0, 0);
3361 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3362 IEM_MC_ADVANCE_RIP_AND_FINISH();
3363 } IEM_MC_ELSE() {
3364 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3365 } IEM_MC_ENDIF();
3366 IEM_MC_END();
3367}
3368
3369
3370/**
3371 * @opcode 0x76
3372 */
3373FNIEMOP_DEF(iemOp_jbe_Jb)
3374{
3375 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3376 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3378 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3379
3380 IEM_MC_BEGIN(0, 0);
3381 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3382 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3383 } IEM_MC_ELSE() {
3384 IEM_MC_ADVANCE_RIP_AND_FINISH();
3385 } IEM_MC_ENDIF();
3386 IEM_MC_END();
3387}
3388
3389
3390/**
3391 * @opcode 0x77
3392 */
3393FNIEMOP_DEF(iemOp_jnbe_Jb)
3394{
3395 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3396 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3398 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3399
3400 IEM_MC_BEGIN(0, 0);
3401 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3402 IEM_MC_ADVANCE_RIP_AND_FINISH();
3403 } IEM_MC_ELSE() {
3404 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3405 } IEM_MC_ENDIF();
3406 IEM_MC_END();
3407}
3408
3409
3410/**
3411 * @opcode 0x78
3412 */
3413FNIEMOP_DEF(iemOp_js_Jb)
3414{
3415 IEMOP_MNEMONIC(js_Jb, "js Jb");
3416 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3419
3420 IEM_MC_BEGIN(0, 0);
3421 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3422 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3423 } IEM_MC_ELSE() {
3424 IEM_MC_ADVANCE_RIP_AND_FINISH();
3425 } IEM_MC_ENDIF();
3426 IEM_MC_END();
3427}
3428
3429
3430/**
3431 * @opcode 0x79
3432 */
3433FNIEMOP_DEF(iemOp_jns_Jb)
3434{
3435 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3436 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3438 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3439
3440 IEM_MC_BEGIN(0, 0);
3441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3442 IEM_MC_ADVANCE_RIP_AND_FINISH();
3443 } IEM_MC_ELSE() {
3444 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3445 } IEM_MC_ENDIF();
3446 IEM_MC_END();
3447}
3448
3449
3450/**
3451 * @opcode 0x7a
3452 */
3453FNIEMOP_DEF(iemOp_jp_Jb)
3454{
3455 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3456 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3458 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3459
3460 IEM_MC_BEGIN(0, 0);
3461 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3462 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3463 } IEM_MC_ELSE() {
3464 IEM_MC_ADVANCE_RIP_AND_FINISH();
3465 } IEM_MC_ENDIF();
3466 IEM_MC_END();
3467}
3468
3469
3470/**
3471 * @opcode 0x7b
3472 */
3473FNIEMOP_DEF(iemOp_jnp_Jb)
3474{
3475 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3476 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3478 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3479
3480 IEM_MC_BEGIN(0, 0);
3481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3482 IEM_MC_ADVANCE_RIP_AND_FINISH();
3483 } IEM_MC_ELSE() {
3484 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3485 } IEM_MC_ENDIF();
3486 IEM_MC_END();
3487}
3488
3489
3490/**
3491 * @opcode 0x7c
3492 */
3493FNIEMOP_DEF(iemOp_jl_Jb)
3494{
3495 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3496 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3499
3500 IEM_MC_BEGIN(0, 0);
3501 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3502 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3503 } IEM_MC_ELSE() {
3504 IEM_MC_ADVANCE_RIP_AND_FINISH();
3505 } IEM_MC_ENDIF();
3506 IEM_MC_END();
3507}
3508
3509
3510/**
3511 * @opcode 0x7d
3512 */
3513FNIEMOP_DEF(iemOp_jnl_Jb)
3514{
3515 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3516 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3518 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3519
3520 IEM_MC_BEGIN(0, 0);
3521 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3522 IEM_MC_ADVANCE_RIP_AND_FINISH();
3523 } IEM_MC_ELSE() {
3524 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3525 } IEM_MC_ENDIF();
3526 IEM_MC_END();
3527}
3528
3529
3530/**
3531 * @opcode 0x7e
3532 */
3533FNIEMOP_DEF(iemOp_jle_Jb)
3534{
3535 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3536 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3538 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3539
3540 IEM_MC_BEGIN(0, 0);
3541 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3542 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3543 } IEM_MC_ELSE() {
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547}
3548
3549
3550/**
3551 * @opcode 0x7f
3552 */
3553FNIEMOP_DEF(iemOp_jnle_Jb)
3554{
3555 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3558 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3559
3560 IEM_MC_BEGIN(0, 0);
3561 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3562 IEM_MC_ADVANCE_RIP_AND_FINISH();
3563 } IEM_MC_ELSE() {
3564 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3565 } IEM_MC_ENDIF();
3566 IEM_MC_END();
3567}
3568
3569
3570/**
3571 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3572 * iemOp_Grp1_Eb_Ib_80.
3573 */
3574#define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \
3575 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3576 { \
3577 /* register target */ \
3578 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3580 IEM_MC_BEGIN(3, 0); \
3581 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3582 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3583 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3584 \
3585 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3586 IEM_MC_REF_EFLAGS(pEFlags); \
3587 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3588 \
3589 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3590 IEM_MC_END(); \
3591 } \
3592 else \
3593 { \
3594 /* memory target */ \
3595 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3596 { \
3597 IEM_MC_BEGIN(3, 2); \
3598 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3599 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3601 \
3602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3603 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3604 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3605 IEMOP_HLP_DONE_DECODING(); \
3606 \
3607 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3608 IEM_MC_FETCH_EFLAGS(EFlags); \
3609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3610 \
3611 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
3612 IEM_MC_COMMIT_EFLAGS(EFlags); \
3613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3614 IEM_MC_END(); \
3615 } \
3616 else \
3617 { \
3618 (void)0
3619
3620#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3621 IEMOP_HLP_DONE_DECODING(); \
3622 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
3623 } \
3624 } \
3625 (void)0
3626
3627#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3628 IEM_MC_BEGIN(3, 2); \
3629 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3630 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3632 \
3633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3635 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3636 IEMOP_HLP_DONE_DECODING(); \
3637 \
3638 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3639 IEM_MC_FETCH_EFLAGS(EFlags); \
3640 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3641 \
3642 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
3643 IEM_MC_COMMIT_EFLAGS(EFlags); \
3644 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3645 IEM_MC_END(); \
3646 } \
3647 } \
3648 (void)0
3649
3650
3651/**
3652 * @opmaps grp1_80,grp1_83
3653 * @opcode /0
3654 */
3655FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3656{
3657 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3658 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
3659 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3660}
3661
3662
3663/**
3664 * @opmaps grp1_80,grp1_83
3665 * @opcode /1
3666 */
3667FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3668{
3669 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3670 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
3671 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3672}
3673
3674
3675/**
3676 * @opmaps grp1_80,grp1_83
3677 * @opcode /2
3678 */
3679FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
3680{
3681 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
3682 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
3683 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
3684}
3685
3686
3687/**
3688 * @opmaps grp1_80,grp1_83
3689 * @opcode /3
3690 */
3691FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
3692{
3693 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
3694 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
3695 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
3696}
3697
3698
3699/**
3700 * @opmaps grp1_80,grp1_83
3701 * @opcode /4
3702 */
3703FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
3704{
3705 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
3706 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
3707 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
3708}
3709
3710
3711/**
3712 * @opmaps grp1_80,grp1_83
3713 * @opcode /5
3714 */
3715FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
3716{
3717 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
3718 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
3719 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
3720}
3721
3722
3723/**
3724 * @opmaps grp1_80,grp1_83
3725 * @opcode /6
3726 */
3727FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
3728{
3729 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
3730 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
3731 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
3732}
3733
3734
3735/**
3736 * @opmaps grp1_80,grp1_83
3737 * @opcode /7
3738 */
3739FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
3740{
3741 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
3742 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
3743 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
3744}
3745
3746
3747/**
3748 * @opcode 0x80
3749 */
3750FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3751{
3752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3753 switch (IEM_GET_MODRM_REG_8(bRm))
3754 {
3755 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
3756 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
3757 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
3758 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
3759 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
3760 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
3761 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
3762 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
3763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3764 }
3765}
3766
3767
3768/**
3769 * Body for a group 1 binary operator.
3770 */
3771#define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
3772 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3773 { \
3774 /* register target */ \
3775 switch (pVCpu->iem.s.enmEffOpSize) \
3776 { \
3777 case IEMMODE_16BIT: \
3778 { \
3779 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3780 IEMOP_HLP_DONE_DECODING(); \
3781 IEM_MC_BEGIN(3, 0); \
3782 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3783 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
3784 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3785 \
3786 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3787 IEM_MC_REF_EFLAGS(pEFlags); \
3788 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3789 \
3790 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3791 IEM_MC_END(); \
3792 break; \
3793 } \
3794 \
3795 case IEMMODE_32BIT: \
3796 { \
3797 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3798 IEMOP_HLP_DONE_DECODING(); \
3799 IEM_MC_BEGIN(3, 0); \
3800 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3801 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
3802 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3803 \
3804 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3805 IEM_MC_REF_EFLAGS(pEFlags); \
3806 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3807 if (a_fRW == IEM_ACCESS_DATA_RW) \
3808 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
3809 \
3810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3811 IEM_MC_END(); \
3812 break; \
3813 } \
3814 \
3815 case IEMMODE_64BIT: \
3816 { \
3817 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3819 IEM_MC_BEGIN(3, 0); \
3820 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3821 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
3822 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3823 \
3824 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3825 IEM_MC_REF_EFLAGS(pEFlags); \
3826 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3827 \
3828 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3829 IEM_MC_END(); \
3830 break; \
3831 } \
3832 \
3833 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3834 } \
3835 } \
3836 else \
3837 { \
3838 /* memory target */ \
3839 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3840 { \
3841 switch (pVCpu->iem.s.enmEffOpSize) \
3842 { \
3843 case IEMMODE_16BIT: \
3844 { \
3845 IEM_MC_BEGIN(3, 2); \
3846 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3847 IEM_MC_ARG(uint16_t, u16Src, 1); \
3848 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3850 \
3851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3852 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3853 IEM_MC_ASSIGN(u16Src, u16Imm); \
3854 IEMOP_HLP_DONE_DECODING(); \
3855 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3856 IEM_MC_FETCH_EFLAGS(EFlags); \
3857 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3858 \
3859 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
3860 IEM_MC_COMMIT_EFLAGS(EFlags); \
3861 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3862 IEM_MC_END(); \
3863 break; \
3864 } \
3865 \
3866 case IEMMODE_32BIT: \
3867 { \
3868 IEM_MC_BEGIN(3, 2); \
3869 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3870 IEM_MC_ARG(uint32_t, u32Src, 1); \
3871 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3873 \
3874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3875 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3876 IEM_MC_ASSIGN(u32Src, u32Imm); \
3877 IEMOP_HLP_DONE_DECODING(); \
3878 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3879 IEM_MC_FETCH_EFLAGS(EFlags); \
3880 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3881 \
3882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
3883 IEM_MC_COMMIT_EFLAGS(EFlags); \
3884 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3885 IEM_MC_END(); \
3886 break; \
3887 } \
3888 \
3889 case IEMMODE_64BIT: \
3890 { \
3891 IEM_MC_BEGIN(3, 2); \
3892 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3893 IEM_MC_ARG(uint64_t, u64Src, 1); \
3894 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3896 \
3897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3898 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3899 IEMOP_HLP_DONE_DECODING(); \
3900 IEM_MC_ASSIGN(u64Src, u64Imm); \
3901 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3902 IEM_MC_FETCH_EFLAGS(EFlags); \
3903 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3904 \
3905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
3906 IEM_MC_COMMIT_EFLAGS(EFlags); \
3907 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3908 IEM_MC_END(); \
3909 break; \
3910 } \
3911 \
3912 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3913 } \
3914 } \
3915 else \
3916 { \
3917 (void)0
3918
3919#define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \
3920 IEMOP_HLP_DONE_DECODING(); \
3921 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
3922 } \
3923 } \
3924 (void)0
3925
3926#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
3927 switch (pVCpu->iem.s.enmEffOpSize) \
3928 { \
3929 case IEMMODE_16BIT: \
3930 { \
3931 IEM_MC_BEGIN(3, 2); \
3932 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3933 IEM_MC_ARG(uint16_t, u16Src, 1); \
3934 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3936 \
3937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3938 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3939 IEM_MC_ASSIGN(u16Src, u16Imm); \
3940 IEMOP_HLP_DONE_DECODING(); \
3941 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3942 IEM_MC_FETCH_EFLAGS(EFlags); \
3943 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
3944 \
3945 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
3946 IEM_MC_COMMIT_EFLAGS(EFlags); \
3947 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3948 IEM_MC_END(); \
3949 break; \
3950 } \
3951 \
3952 case IEMMODE_32BIT: \
3953 { \
3954 IEM_MC_BEGIN(3, 2); \
3955 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3956 IEM_MC_ARG(uint32_t, u32Src, 1); \
3957 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3959 \
3960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3961 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3962 IEM_MC_ASSIGN(u32Src, u32Imm); \
3963 IEMOP_HLP_DONE_DECODING(); \
3964 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3965 IEM_MC_FETCH_EFLAGS(EFlags); \
3966 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
3967 \
3968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
3969 IEM_MC_COMMIT_EFLAGS(EFlags); \
3970 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3971 IEM_MC_END(); \
3972 break; \
3973 } \
3974 \
3975 case IEMMODE_64BIT: \
3976 { \
3977 IEM_MC_BEGIN(3, 2); \
3978 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3979 IEM_MC_ARG(uint64_t, u64Src, 1); \
3980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3982 \
3983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3984 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3985 IEMOP_HLP_DONE_DECODING(); \
3986 IEM_MC_ASSIGN(u64Src, u64Imm); \
3987 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3988 IEM_MC_FETCH_EFLAGS(EFlags); \
3989 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
3990 \
3991 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
3992 IEM_MC_COMMIT_EFLAGS(EFlags); \
3993 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3994 IEM_MC_END(); \
3995 break; \
3996 } \
3997 \
3998 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3999 } \
4000 } \
4001 } \
4002 (void)0
4003
4004
4005/**
4006 * @opmaps grp1_81
4007 * @opcode /0
4008 */
4009FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4010{
4011 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4012 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4013 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4014}
4015
4016
4017/**
4018 * @opmaps grp1_81
4019 * @opcode /1
4020 */
4021FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4022{
4023 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4024 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4025 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4026}
4027
4028
4029/**
4030 * @opmaps grp1_81
4031 * @opcode /2
4032 */
4033FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4034{
4035 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4036 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4037 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4038}
4039
4040
4041/**
4042 * @opmaps grp1_81
4043 * @opcode /3
4044 */
4045FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4046{
4047 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4048 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4049 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4050}
4051
4052
4053/**
4054 * @opmaps grp1_81
4055 * @opcode /4
4056 */
4057FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4058{
4059 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4060 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4061 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4062}
4063
4064
4065/**
4066 * @opmaps grp1_81
4067 * @opcode /5
4068 */
4069FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4070{
4071 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4072 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4073 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4074}
4075
4076
4077/**
4078 * @opmaps grp1_81
4079 * @opcode /6
4080 */
4081FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4082{
4083 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4084 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4085 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4086}
4087
4088
4089/**
4090 * @opmaps grp1_81
4091 * @opcode /7
4092 */
4093FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4094{
4095 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4096 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4097 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK();
4098}
4099
4100
4101/**
4102 * @opcode 0x81
4103 */
4104FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4105{
4106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4107 switch (IEM_GET_MODRM_REG_8(bRm))
4108 {
4109 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4110 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4111 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4112 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4113 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4114 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4115 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4116 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4118 }
4119}
4120
4121
4122/**
4123 * @opcode 0x82
4124 * @opmnemonic grp1_82
4125 * @opgroup og_groups
4126 */
4127FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4128{
4129 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4130 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4131}
4132
4133
4134/**
4135 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4136 * iemOp_Grp1_Ev_Ib.
4137 */
4138#define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
4139 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4140 { \
4141 /* \
4142 * Register target \
4143 */ \
4144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4145 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4146 switch (pVCpu->iem.s.enmEffOpSize) \
4147 { \
4148 case IEMMODE_16BIT: \
4149 { \
4150 IEM_MC_BEGIN(3, 0); \
4151 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4152 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4153 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4154 \
4155 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4156 IEM_MC_REF_EFLAGS(pEFlags); \
4157 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4158 \
4159 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4160 IEM_MC_END(); \
4161 break; \
4162 } \
4163 \
4164 case IEMMODE_32BIT: \
4165 { \
4166 IEM_MC_BEGIN(3, 0); \
4167 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4168 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4169 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4170 \
4171 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4172 IEM_MC_REF_EFLAGS(pEFlags); \
4173 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4174 if ((a_fRW) != IEM_ACCESS_DATA_R) \
4175 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4176 \
4177 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4178 IEM_MC_END(); \
4179 break; \
4180 } \
4181 \
4182 case IEMMODE_64BIT: \
4183 { \
4184 IEM_MC_BEGIN(3, 0); \
4185 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4186 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4187 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4188 \
4189 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4190 IEM_MC_REF_EFLAGS(pEFlags); \
4191 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4192 \
4193 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4194 IEM_MC_END(); \
4195 break; \
4196 } \
4197 \
4198 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4199 } \
4200 } \
4201 else \
4202 { \
4203 /* \
4204 * Memory target. \
4205 */ \
4206 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4207 { \
4208 switch (pVCpu->iem.s.enmEffOpSize) \
4209 { \
4210 case IEMMODE_16BIT: \
4211 { \
4212 IEM_MC_BEGIN(3, 2); \
4213 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4214 IEM_MC_ARG(uint16_t, u16Src, 1); \
4215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4217 \
4218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4219 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4220 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4221 IEMOP_HLP_DONE_DECODING(); \
4222 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4223 IEM_MC_FETCH_EFLAGS(EFlags); \
4224 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4225 \
4226 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
4227 IEM_MC_COMMIT_EFLAGS(EFlags); \
4228 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4229 IEM_MC_END(); \
4230 break; \
4231 } \
4232 \
4233 case IEMMODE_32BIT: \
4234 { \
4235 IEM_MC_BEGIN(3, 2); \
4236 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4237 IEM_MC_ARG(uint32_t, u32Src, 1); \
4238 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4240 \
4241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4242 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4243 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4244 IEMOP_HLP_DONE_DECODING(); \
4245 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4246 IEM_MC_FETCH_EFLAGS(EFlags); \
4247 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4248 \
4249 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
4250 IEM_MC_COMMIT_EFLAGS(EFlags); \
4251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4252 IEM_MC_END(); \
4253 break; \
4254 } \
4255 \
4256 case IEMMODE_64BIT: \
4257 { \
4258 IEM_MC_BEGIN(3, 2); \
4259 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4260 IEM_MC_ARG(uint64_t, u64Src, 1); \
4261 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4263 \
4264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4265 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4266 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4267 IEMOP_HLP_DONE_DECODING(); \
4268 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4269 IEM_MC_FETCH_EFLAGS(EFlags); \
4270 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4271 \
4272 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
4273 IEM_MC_COMMIT_EFLAGS(EFlags); \
4274 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4275 IEM_MC_END(); \
4276 break; \
4277 } \
4278 \
4279 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4280 } \
4281 } \
4282 else \
4283 { \
4284 (void)0
4285
4286#define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \
4287 IEMOP_HLP_DONE_DECODING(); \
4288 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
4289 } \
4290 } \
4291 (void)0
4292
4293#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4294 switch (pVCpu->iem.s.enmEffOpSize) \
4295 { \
4296 case IEMMODE_16BIT: \
4297 { \
4298 IEM_MC_BEGIN(3, 2); \
4299 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4300 IEM_MC_ARG(uint16_t, u16Src, 1); \
4301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4303 \
4304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4305 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4306 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4307 IEMOP_HLP_DONE_DECODING(); \
4308 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4309 IEM_MC_FETCH_EFLAGS(EFlags); \
4310 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4311 \
4312 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4313 IEM_MC_COMMIT_EFLAGS(EFlags); \
4314 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4315 IEM_MC_END(); \
4316 break; \
4317 } \
4318 \
4319 case IEMMODE_32BIT: \
4320 { \
4321 IEM_MC_BEGIN(3, 2); \
4322 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4323 IEM_MC_ARG(uint32_t, u32Src, 1); \
4324 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4326 \
4327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4328 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4329 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4330 IEMOP_HLP_DONE_DECODING(); \
4331 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4332 IEM_MC_FETCH_EFLAGS(EFlags); \
4333 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4334 \
4335 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4336 IEM_MC_COMMIT_EFLAGS(EFlags); \
4337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4338 IEM_MC_END(); \
4339 break; \
4340 } \
4341 \
4342 case IEMMODE_64BIT: \
4343 { \
4344 IEM_MC_BEGIN(3, 2); \
4345 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4346 IEM_MC_ARG(uint64_t, u64Src, 1); \
4347 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4349 \
4350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4351 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4352 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4353 IEMOP_HLP_DONE_DECODING(); \
4354 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4355 IEM_MC_FETCH_EFLAGS(EFlags); \
4356 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4357 \
4358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4359 IEM_MC_COMMIT_EFLAGS(EFlags); \
4360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4361 IEM_MC_END(); \
4362 break; \
4363 } \
4364 \
4365 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4366 } \
4367 } \
4368 } \
4369 (void)0
4370
4371/**
4372 * @opmaps grp1_83
4373 * @opcode /0
4374 */
4375FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4376{
4377 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4378 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4379 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4380}
4381
4382
4383/**
4384 * @opmaps grp1_83
4385 * @opcode /1
4386 */
4387FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4388{
4389 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4390 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4391 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4392}
4393
4394
4395/**
4396 * @opmaps grp1_83
4397 * @opcode /2
4398 */
4399FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
4400{
4401 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
4402 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4403 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4404}
4405
4406
4407/**
4408 * @opmaps grp1_83
4409 * @opcode /3
4410 */
4411FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
4412{
4413 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
4414 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4415 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4416}
4417
4418
4419/**
4420 * @opmaps grp1_83
4421 * @opcode /4
4422 */
4423FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
4424{
4425 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
4426 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4427 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4428}
4429
4430
4431/**
4432 * @opmaps grp1_83
4433 * @opcode /5
4434 */
4435FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
4436{
4437 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
4438 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4439 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4440}
4441
4442
4443/**
4444 * @opmaps grp1_83
4445 * @opcode /6
4446 */
4447FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
4448{
4449 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
4450 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4451 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4452}
4453
4454
4455/**
4456 * @opmaps grp1_83
4457 * @opcode /7
4458 */
4459FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
4460{
4461 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
4462 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4463 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK();
4464}
4465
4466
4467/**
4468 * @opcode 0x83
4469 */
4470FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4471{
4472 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
4473 to the 386 even if absent in the intel reference manuals and some
4474 3rd party opcode listings. */
4475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4476 switch (IEM_GET_MODRM_REG_8(bRm))
4477 {
4478 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
4479 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
4480 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
4481 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
4482 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
4483 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
4484 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
4485 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
4486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4487 }
4488}
4489
4490
4491/**
4492 * @opcode 0x84
4493 */
4494FNIEMOP_DEF(iemOp_test_Eb_Gb)
4495{
4496 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
4497 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4498 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R);
4499 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
4500}
4501
4502
4503/**
4504 * @opcode 0x85
4505 */
4506FNIEMOP_DEF(iemOp_test_Ev_Gv)
4507{
4508 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
4509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4510 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R);
4511 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
4512}
4513
4514
4515/**
4516 * @opcode 0x86
4517 */
4518FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4519{
4520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4521 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
4522
4523 /*
4524 * If rm is denoting a register, no more instruction bytes.
4525 */
4526 if (IEM_IS_MODRM_REG_MODE(bRm))
4527 {
4528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4529
4530 IEM_MC_BEGIN(0, 2);
4531 IEM_MC_LOCAL(uint8_t, uTmp1);
4532 IEM_MC_LOCAL(uint8_t, uTmp2);
4533
4534 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4535 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4536 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4537 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4538
4539 IEM_MC_ADVANCE_RIP_AND_FINISH();
4540 IEM_MC_END();
4541 }
4542 else
4543 {
4544 /*
4545 * We're accessing memory.
4546 */
4547/** @todo the register must be committed separately! */
4548 IEM_MC_BEGIN(2, 2);
4549 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4550 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4552
4553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4554 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4555 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4556 if (!pVCpu->iem.s.fDisregardLock)
4557 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
4558 else
4559 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
4560 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4561
4562 IEM_MC_ADVANCE_RIP_AND_FINISH();
4563 IEM_MC_END();
4564 }
4565}
4566
4567
4568/**
4569 * @opcode 0x87
4570 */
4571FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4572{
4573 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
4574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4575
4576 /*
4577 * If rm is denoting a register, no more instruction bytes.
4578 */
4579 if (IEM_IS_MODRM_REG_MODE(bRm))
4580 {
4581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4582
4583 switch (pVCpu->iem.s.enmEffOpSize)
4584 {
4585 case IEMMODE_16BIT:
4586 IEM_MC_BEGIN(0, 2);
4587 IEM_MC_LOCAL(uint16_t, uTmp1);
4588 IEM_MC_LOCAL(uint16_t, uTmp2);
4589
4590 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4591 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4592 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4593 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4594
4595 IEM_MC_ADVANCE_RIP_AND_FINISH();
4596 IEM_MC_END();
4597 break;
4598
4599 case IEMMODE_32BIT:
4600 IEM_MC_BEGIN(0, 2);
4601 IEM_MC_LOCAL(uint32_t, uTmp1);
4602 IEM_MC_LOCAL(uint32_t, uTmp2);
4603
4604 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4605 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4606 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4607 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4608
4609 IEM_MC_ADVANCE_RIP_AND_FINISH();
4610 IEM_MC_END();
4611 break;
4612
4613 case IEMMODE_64BIT:
4614 IEM_MC_BEGIN(0, 2);
4615 IEM_MC_LOCAL(uint64_t, uTmp1);
4616 IEM_MC_LOCAL(uint64_t, uTmp2);
4617
4618 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4619 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4620 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4621 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4622
4623 IEM_MC_ADVANCE_RIP_AND_FINISH();
4624 IEM_MC_END();
4625 break;
4626
4627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4628 }
4629 }
4630 else
4631 {
4632 /*
4633 * We're accessing memory.
4634 */
4635 switch (pVCpu->iem.s.enmEffOpSize)
4636 {
4637/** @todo the register must be committed separately! */
4638 case IEMMODE_16BIT:
4639 IEM_MC_BEGIN(2, 2);
4640 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4641 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4643
4644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4645 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4646 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4647 if (!pVCpu->iem.s.fDisregardLock)
4648 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
4649 else
4650 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
4651 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4652
4653 IEM_MC_ADVANCE_RIP_AND_FINISH();
4654 IEM_MC_END();
4655 break;
4656
4657 case IEMMODE_32BIT:
4658 IEM_MC_BEGIN(2, 2);
4659 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4660 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4662
4663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4664 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4665 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4666 if (!pVCpu->iem.s.fDisregardLock)
4667 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
4668 else
4669 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
4670 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4671
4672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4673 IEM_MC_ADVANCE_RIP_AND_FINISH();
4674 IEM_MC_END();
4675 break;
4676
4677 case IEMMODE_64BIT:
4678 IEM_MC_BEGIN(2, 2);
4679 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4680 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4682
4683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4684 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4685 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4686 if (!pVCpu->iem.s.fDisregardLock)
4687 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
4688 else
4689 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
4690 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4691
4692 IEM_MC_ADVANCE_RIP_AND_FINISH();
4693 IEM_MC_END();
4694 break;
4695
4696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4697 }
4698 }
4699}
4700
4701
4702/**
4703 * @opcode 0x88
4704 */
4705FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4706{
4707 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
4708
4709 uint8_t bRm;
4710 IEM_OPCODE_GET_NEXT_U8(&bRm);
4711
4712 /*
4713 * If rm is denoting a register, no more instruction bytes.
4714 */
4715 if (IEM_IS_MODRM_REG_MODE(bRm))
4716 {
4717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4718 IEM_MC_BEGIN(0, 1);
4719 IEM_MC_LOCAL(uint8_t, u8Value);
4720 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4721 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
4722 IEM_MC_ADVANCE_RIP_AND_FINISH();
4723 IEM_MC_END();
4724 }
4725 else
4726 {
4727 /*
4728 * We're writing a register to memory.
4729 */
4730 IEM_MC_BEGIN(0, 2);
4731 IEM_MC_LOCAL(uint8_t, u8Value);
4732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4735 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4736 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
4737 IEM_MC_ADVANCE_RIP_AND_FINISH();
4738 IEM_MC_END();
4739 }
4740}
4741
4742
4743/**
4744 * @opcode 0x89
4745 */
4746FNIEMOP_DEF(iemOp_mov_Ev_Gv)
4747{
4748 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
4749
4750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4751
4752 /*
4753 * If rm is denoting a register, no more instruction bytes.
4754 */
4755 if (IEM_IS_MODRM_REG_MODE(bRm))
4756 {
4757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4758 switch (pVCpu->iem.s.enmEffOpSize)
4759 {
4760 case IEMMODE_16BIT:
4761 IEM_MC_BEGIN(0, 1);
4762 IEM_MC_LOCAL(uint16_t, u16Value);
4763 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4764 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4765 IEM_MC_ADVANCE_RIP_AND_FINISH();
4766 IEM_MC_END();
4767 break;
4768
4769 case IEMMODE_32BIT:
4770 IEM_MC_BEGIN(0, 1);
4771 IEM_MC_LOCAL(uint32_t, u32Value);
4772 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4773 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4774 IEM_MC_ADVANCE_RIP_AND_FINISH();
4775 IEM_MC_END();
4776 break;
4777
4778 case IEMMODE_64BIT:
4779 IEM_MC_BEGIN(0, 1);
4780 IEM_MC_LOCAL(uint64_t, u64Value);
4781 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4782 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4783 IEM_MC_ADVANCE_RIP_AND_FINISH();
4784 IEM_MC_END();
4785 break;
4786
4787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4788 }
4789 }
4790 else
4791 {
4792 /*
4793 * We're writing a register to memory.
4794 */
4795 switch (pVCpu->iem.s.enmEffOpSize)
4796 {
4797 case IEMMODE_16BIT:
4798 IEM_MC_BEGIN(0, 2);
4799 IEM_MC_LOCAL(uint16_t, u16Value);
4800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4803 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4804 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4805 IEM_MC_ADVANCE_RIP_AND_FINISH();
4806 IEM_MC_END();
4807 break;
4808
4809 case IEMMODE_32BIT:
4810 IEM_MC_BEGIN(0, 2);
4811 IEM_MC_LOCAL(uint32_t, u32Value);
4812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4816 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
4817 IEM_MC_ADVANCE_RIP_AND_FINISH();
4818 IEM_MC_END();
4819 break;
4820
4821 case IEMMODE_64BIT:
4822 IEM_MC_BEGIN(0, 2);
4823 IEM_MC_LOCAL(uint64_t, u64Value);
4824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4827 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4828 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
4829 IEM_MC_ADVANCE_RIP_AND_FINISH();
4830 IEM_MC_END();
4831 break;
4832
4833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4834 }
4835 }
4836}
4837
4838
4839/**
4840 * @opcode 0x8a
4841 */
4842FNIEMOP_DEF(iemOp_mov_Gb_Eb)
4843{
4844 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
4845
4846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4847
4848 /*
4849 * If rm is denoting a register, no more instruction bytes.
4850 */
4851 if (IEM_IS_MODRM_REG_MODE(bRm))
4852 {
4853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4854 IEM_MC_BEGIN(0, 1);
4855 IEM_MC_LOCAL(uint8_t, u8Value);
4856 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4857 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4858 IEM_MC_ADVANCE_RIP_AND_FINISH();
4859 IEM_MC_END();
4860 }
4861 else
4862 {
4863 /*
4864 * We're loading a register from memory.
4865 */
4866 IEM_MC_BEGIN(0, 2);
4867 IEM_MC_LOCAL(uint8_t, u8Value);
4868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4872 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4873 IEM_MC_ADVANCE_RIP_AND_FINISH();
4874 IEM_MC_END();
4875 }
4876}
4877
4878
4879/**
4880 * @opcode 0x8b
4881 */
4882FNIEMOP_DEF(iemOp_mov_Gv_Ev)
4883{
4884 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
4885
4886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4887
4888 /*
4889 * If rm is denoting a register, no more instruction bytes.
4890 */
4891 if (IEM_IS_MODRM_REG_MODE(bRm))
4892 {
4893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4894 switch (pVCpu->iem.s.enmEffOpSize)
4895 {
4896 case IEMMODE_16BIT:
4897 IEM_MC_BEGIN(0, 1);
4898 IEM_MC_LOCAL(uint16_t, u16Value);
4899 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4900 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4901 IEM_MC_ADVANCE_RIP_AND_FINISH();
4902 IEM_MC_END();
4903 break;
4904
4905 case IEMMODE_32BIT:
4906 IEM_MC_BEGIN(0, 1);
4907 IEM_MC_LOCAL(uint32_t, u32Value);
4908 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4909 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4910 IEM_MC_ADVANCE_RIP_AND_FINISH();
4911 IEM_MC_END();
4912 break;
4913
4914 case IEMMODE_64BIT:
4915 IEM_MC_BEGIN(0, 1);
4916 IEM_MC_LOCAL(uint64_t, u64Value);
4917 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4918 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4919 IEM_MC_ADVANCE_RIP_AND_FINISH();
4920 IEM_MC_END();
4921 break;
4922
4923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4924 }
4925 }
4926 else
4927 {
4928 /*
4929 * We're loading a register from memory.
4930 */
4931 switch (pVCpu->iem.s.enmEffOpSize)
4932 {
4933 case IEMMODE_16BIT:
4934 IEM_MC_BEGIN(0, 2);
4935 IEM_MC_LOCAL(uint16_t, u16Value);
4936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4939 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4940 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4941 IEM_MC_ADVANCE_RIP_AND_FINISH();
4942 IEM_MC_END();
4943 break;
4944
4945 case IEMMODE_32BIT:
4946 IEM_MC_BEGIN(0, 2);
4947 IEM_MC_LOCAL(uint32_t, u32Value);
4948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4951 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4952 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4953 IEM_MC_ADVANCE_RIP_AND_FINISH();
4954 IEM_MC_END();
4955 break;
4956
4957 case IEMMODE_64BIT:
4958 IEM_MC_BEGIN(0, 2);
4959 IEM_MC_LOCAL(uint64_t, u64Value);
4960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4963 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4964 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4965 IEM_MC_ADVANCE_RIP_AND_FINISH();
4966 IEM_MC_END();
4967 break;
4968
4969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4970 }
4971 }
4972}
4973
4974
4975/**
4976 * opcode 0x63
4977 * @todo Table fixme
4978 */
4979FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4980{
4981 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4982 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4983 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4984 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4985 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4986}
4987
4988
4989/**
4990 * @opcode 0x8c
4991 */
4992FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4993{
4994 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4995
4996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4997
4998 /*
4999 * Check that the destination register exists. The REX.R prefix is ignored.
5000 */
5001 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5002 if ( iSegReg > X86_SREG_GS)
5003 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5004
5005 /*
5006 * If rm is denoting a register, no more instruction bytes.
5007 * In that case, the operand size is respected and the upper bits are
5008 * cleared (starting with some pentium).
5009 */
5010 if (IEM_IS_MODRM_REG_MODE(bRm))
5011 {
5012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5013 switch (pVCpu->iem.s.enmEffOpSize)
5014 {
5015 case IEMMODE_16BIT:
5016 IEM_MC_BEGIN(0, 1);
5017 IEM_MC_LOCAL(uint16_t, u16Value);
5018 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5019 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5020 IEM_MC_ADVANCE_RIP_AND_FINISH();
5021 IEM_MC_END();
5022 break;
5023
5024 case IEMMODE_32BIT:
5025 IEM_MC_BEGIN(0, 1);
5026 IEM_MC_LOCAL(uint32_t, u32Value);
5027 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5028 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5029 IEM_MC_ADVANCE_RIP_AND_FINISH();
5030 IEM_MC_END();
5031 break;
5032
5033 case IEMMODE_64BIT:
5034 IEM_MC_BEGIN(0, 1);
5035 IEM_MC_LOCAL(uint64_t, u64Value);
5036 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5037 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5038 IEM_MC_ADVANCE_RIP_AND_FINISH();
5039 IEM_MC_END();
5040 break;
5041
5042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5043 }
5044 }
5045 else
5046 {
5047 /*
5048 * We're saving the register to memory. The access is word sized
5049 * regardless of operand size prefixes.
5050 */
5051#if 0 /* not necessary */
5052 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5053#endif
5054 IEM_MC_BEGIN(0, 2);
5055 IEM_MC_LOCAL(uint16_t, u16Value);
5056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5059 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5060 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5061 IEM_MC_ADVANCE_RIP_AND_FINISH();
5062 IEM_MC_END();
5063 }
5064}
5065
5066
5067
5068
5069/**
5070 * @opcode 0x8d
5071 */
5072FNIEMOP_DEF(iemOp_lea_Gv_M)
5073{
5074 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5076 if (IEM_IS_MODRM_REG_MODE(bRm))
5077 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
5078
5079 switch (pVCpu->iem.s.enmEffOpSize)
5080 {
5081 case IEMMODE_16BIT:
5082 IEM_MC_BEGIN(0, 2);
5083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5084 IEM_MC_LOCAL(uint16_t, u16Cast);
5085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5087 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5088 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5089 IEM_MC_ADVANCE_RIP_AND_FINISH();
5090 IEM_MC_END();
5091 break;
5092
5093 case IEMMODE_32BIT:
5094 IEM_MC_BEGIN(0, 2);
5095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5096 IEM_MC_LOCAL(uint32_t, u32Cast);
5097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5099 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5100 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5101 IEM_MC_ADVANCE_RIP_AND_FINISH();
5102 IEM_MC_END();
5103 break;
5104
5105 case IEMMODE_64BIT:
5106 IEM_MC_BEGIN(0, 1);
5107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5110 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5111 IEM_MC_ADVANCE_RIP_AND_FINISH();
5112 IEM_MC_END();
5113 break;
5114
5115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5116 }
5117}
5118
5119
5120/**
5121 * @opcode 0x8e
5122 */
5123FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5124{
5125 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5126
5127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5128
5129 /*
5130 * The practical operand size is 16-bit.
5131 */
5132#if 0 /* not necessary */
5133 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5134#endif
5135
5136 /*
5137 * Check that the destination register exists and can be used with this
5138 * instruction. The REX.R prefix is ignored.
5139 */
5140 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5141 if ( iSegReg == X86_SREG_CS
5142 || iSegReg > X86_SREG_GS)
5143 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5144
5145 /*
5146 * If rm is denoting a register, no more instruction bytes.
5147 */
5148 if (IEM_IS_MODRM_REG_MODE(bRm))
5149 {
5150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5151 IEM_MC_BEGIN(2, 0);
5152 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5153 IEM_MC_ARG(uint16_t, u16Value, 1);
5154 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5155 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
5156 IEM_MC_END();
5157 }
5158 else
5159 {
5160 /*
5161 * We're loading the register from memory. The access is word sized
5162 * regardless of operand size prefixes.
5163 */
5164 IEM_MC_BEGIN(2, 1);
5165 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5166 IEM_MC_ARG(uint16_t, u16Value, 1);
5167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5170 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5171 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
5172 IEM_MC_END();
5173 }
5174}
5175
5176
5177/** Opcode 0x8f /0. */
5178FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5179{
5180 /* This bugger is rather annoying as it requires rSP to be updated before
5181 doing the effective address calculations. Will eventually require a
5182 split between the R/M+SIB decoding and the effective address
5183 calculation - which is something that is required for any attempt at
5184 reusing this code for a recompiler. It may also be good to have if we
5185 need to delay #UD exception caused by invalid lock prefixes.
5186
5187 For now, we'll do a mostly safe interpreter-only implementation here. */
5188 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5189 * now until tests show it's checked.. */
5190 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5191
5192 /* Register access is relatively easy and can share code. */
5193 if (IEM_IS_MODRM_REG_MODE(bRm))
5194 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5195
5196 /*
5197 * Memory target.
5198 *
5199 * Intel says that RSP is incremented before it's used in any effective
5200 * address calcuations. This means some serious extra annoyance here since
5201 * we decode and calculate the effective address in one step and like to
5202 * delay committing registers till everything is done.
5203 *
5204 * So, we'll decode and calculate the effective address twice. This will
5205 * require some recoding if turned into a recompiler.
5206 */
5207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5208
5209#ifndef TST_IEM_CHECK_MC
5210 /* Calc effective address with modified ESP. */
5211/** @todo testcase */
5212 RTGCPTR GCPtrEff;
5213 VBOXSTRICTRC rcStrict;
5214 switch (pVCpu->iem.s.enmEffOpSize)
5215 {
5216 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5217 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5218 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5220 }
5221 if (rcStrict != VINF_SUCCESS)
5222 return rcStrict;
5223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5224
5225 /* Perform the operation - this should be CImpl. */
5226 RTUINT64U TmpRsp;
5227 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5228 switch (pVCpu->iem.s.enmEffOpSize)
5229 {
5230 case IEMMODE_16BIT:
5231 {
5232 uint16_t u16Value;
5233 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5234 if (rcStrict == VINF_SUCCESS)
5235 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5236 break;
5237 }
5238
5239 case IEMMODE_32BIT:
5240 {
5241 uint32_t u32Value;
5242 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5243 if (rcStrict == VINF_SUCCESS)
5244 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5245 break;
5246 }
5247
5248 case IEMMODE_64BIT:
5249 {
5250 uint64_t u64Value;
5251 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5252 if (rcStrict == VINF_SUCCESS)
5253 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5254 break;
5255 }
5256
5257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5258 }
5259 if (rcStrict == VINF_SUCCESS)
5260 {
5261 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5262 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5263 }
5264 return rcStrict;
5265
5266#else
5267 return VERR_IEM_IPE_2;
5268#endif
5269}
5270
5271
5272/**
5273 * @opcode 0x8f
5274 */
5275FNIEMOP_DEF(iemOp_Grp1A__xop)
5276{
5277 /*
5278 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5279 * three byte VEX prefix, except that the mmmmm field cannot have the values
5280 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5281 */
5282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5283 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5284 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5285
5286 IEMOP_MNEMONIC(xop, "xop");
5287 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5288 {
5289 /** @todo Test when exctly the XOP conformance checks kick in during
5290 * instruction decoding and fetching (using \#PF). */
5291 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5292 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5293 if ( ( pVCpu->iem.s.fPrefixes
5294 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5295 == 0)
5296 {
5297 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5298 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
5299 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5300 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5301 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5302 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5303 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5304 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5305 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5306
5307 /** @todo XOP: Just use new tables and decoders. */
5308 switch (bRm & 0x1f)
5309 {
5310 case 8: /* xop opcode map 8. */
5311 IEMOP_BITCH_ABOUT_STUB();
5312 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5313
5314 case 9: /* xop opcode map 9. */
5315 IEMOP_BITCH_ABOUT_STUB();
5316 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5317
5318 case 10: /* xop opcode map 10. */
5319 IEMOP_BITCH_ABOUT_STUB();
5320 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5321
5322 default:
5323 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5324 return IEMOP_RAISE_INVALID_OPCODE();
5325 }
5326 }
5327 else
5328 Log(("XOP: Invalid prefix mix!\n"));
5329 }
5330 else
5331 Log(("XOP: XOP support disabled!\n"));
5332 return IEMOP_RAISE_INVALID_OPCODE();
5333}
5334
5335
5336/**
5337 * Common 'xchg reg,rAX' helper.
5338 */
5339FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5340{
5341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5342
5343 iReg |= pVCpu->iem.s.uRexB;
5344 switch (pVCpu->iem.s.enmEffOpSize)
5345 {
5346 case IEMMODE_16BIT:
5347 IEM_MC_BEGIN(0, 2);
5348 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5349 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5350 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5351 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5352 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5353 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5354 IEM_MC_ADVANCE_RIP_AND_FINISH();
5355 IEM_MC_END();
5356 break;
5357
5358 case IEMMODE_32BIT:
5359 IEM_MC_BEGIN(0, 2);
5360 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5361 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5362 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5363 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5364 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5365 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5366 IEM_MC_ADVANCE_RIP_AND_FINISH();
5367 IEM_MC_END();
5368 break;
5369
5370 case IEMMODE_64BIT:
5371 IEM_MC_BEGIN(0, 2);
5372 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5373 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5374 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5375 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5376 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5377 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5378 IEM_MC_ADVANCE_RIP_AND_FINISH();
5379 IEM_MC_END();
5380 break;
5381
5382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5383 }
5384}
5385
5386
5387/**
5388 * @opcode 0x90
5389 */
5390FNIEMOP_DEF(iemOp_nop)
5391{
5392 /* R8/R8D and RAX/EAX can be exchanged. */
5393 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
5394 {
5395 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
5396 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5397 }
5398
5399 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
5400 {
5401 IEMOP_MNEMONIC(pause, "pause");
5402#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5403 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
5404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
5405#endif
5406#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5407 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
5408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
5409#endif
5410 }
5411 else
5412 IEMOP_MNEMONIC(nop, "nop");
5413 IEM_MC_BEGIN(0, 0);
5414 IEM_MC_ADVANCE_RIP_AND_FINISH();
5415 IEM_MC_END();
5416}
5417
5418
5419/**
5420 * @opcode 0x91
5421 */
5422FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5423{
5424 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
5425 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5426}
5427
5428
5429/**
5430 * @opcode 0x92
5431 */
5432FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5433{
5434 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
5435 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5436}
5437
5438
5439/**
5440 * @opcode 0x93
5441 */
5442FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5443{
5444 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
5445 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5446}
5447
5448
5449/**
5450 * @opcode 0x94
5451 */
5452FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5453{
5454 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
5455 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5456}
5457
5458
5459/**
5460 * @opcode 0x95
5461 */
5462FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5463{
5464 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
5465 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5466}
5467
5468
5469/**
5470 * @opcode 0x96
5471 */
5472FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5473{
5474 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
5475 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5476}
5477
5478
5479/**
5480 * @opcode 0x97
5481 */
5482FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5483{
5484 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
5485 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5486}
5487
5488
5489/**
5490 * @opcode 0x98
5491 */
5492FNIEMOP_DEF(iemOp_cbw)
5493{
5494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5495 switch (pVCpu->iem.s.enmEffOpSize)
5496 {
5497 case IEMMODE_16BIT:
5498 IEMOP_MNEMONIC(cbw, "cbw");
5499 IEM_MC_BEGIN(0, 1);
5500 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
5501 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
5502 } IEM_MC_ELSE() {
5503 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
5504 } IEM_MC_ENDIF();
5505 IEM_MC_ADVANCE_RIP_AND_FINISH();
5506 IEM_MC_END();
5507 break;
5508
5509 case IEMMODE_32BIT:
5510 IEMOP_MNEMONIC(cwde, "cwde");
5511 IEM_MC_BEGIN(0, 1);
5512 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5513 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
5514 } IEM_MC_ELSE() {
5515 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
5516 } IEM_MC_ENDIF();
5517 IEM_MC_ADVANCE_RIP_AND_FINISH();
5518 IEM_MC_END();
5519 break;
5520
5521 case IEMMODE_64BIT:
5522 IEMOP_MNEMONIC(cdqe, "cdqe");
5523 IEM_MC_BEGIN(0, 1);
5524 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5525 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
5526 } IEM_MC_ELSE() {
5527 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
5528 } IEM_MC_ENDIF();
5529 IEM_MC_ADVANCE_RIP_AND_FINISH();
5530 IEM_MC_END();
5531 break;
5532
5533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5534 }
5535}
5536
5537
5538/**
5539 * @opcode 0x99
5540 */
5541FNIEMOP_DEF(iemOp_cwd)
5542{
5543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5544 switch (pVCpu->iem.s.enmEffOpSize)
5545 {
5546 case IEMMODE_16BIT:
5547 IEMOP_MNEMONIC(cwd, "cwd");
5548 IEM_MC_BEGIN(0, 1);
5549 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5550 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
5551 } IEM_MC_ELSE() {
5552 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
5553 } IEM_MC_ENDIF();
5554 IEM_MC_ADVANCE_RIP_AND_FINISH();
5555 IEM_MC_END();
5556 break;
5557
5558 case IEMMODE_32BIT:
5559 IEMOP_MNEMONIC(cdq, "cdq");
5560 IEM_MC_BEGIN(0, 1);
5561 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5562 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
5563 } IEM_MC_ELSE() {
5564 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
5565 } IEM_MC_ENDIF();
5566 IEM_MC_ADVANCE_RIP_AND_FINISH();
5567 IEM_MC_END();
5568 break;
5569
5570 case IEMMODE_64BIT:
5571 IEMOP_MNEMONIC(cqo, "cqo");
5572 IEM_MC_BEGIN(0, 1);
5573 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5574 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5575 } IEM_MC_ELSE() {
5576 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
5577 } IEM_MC_ENDIF();
5578 IEM_MC_ADVANCE_RIP_AND_FINISH();
5579 IEM_MC_END();
5580 break;
5581
5582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5583 }
5584}
5585
5586
5587/**
5588 * @opcode 0x9a
5589 */
5590FNIEMOP_DEF(iemOp_call_Ap)
5591{
5592 IEMOP_MNEMONIC(call_Ap, "call Ap");
5593 IEMOP_HLP_NO_64BIT();
5594
5595 /* Decode the far pointer address and pass it on to the far call C implementation. */
5596 uint32_t offSeg;
5597 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
5598 IEM_OPCODE_GET_NEXT_U32(&offSeg);
5599 else
5600 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
5601 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
5602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5603 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
5604}
5605
5606
5607/** Opcode 0x9b. (aka fwait) */
5608FNIEMOP_DEF(iemOp_wait)
5609{
5610 IEMOP_MNEMONIC(wait, "wait");
5611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5612
5613 IEM_MC_BEGIN(0, 0);
5614 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
5615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5616 IEM_MC_ADVANCE_RIP_AND_FINISH();
5617 IEM_MC_END();
5618}
5619
5620
5621/**
5622 * @opcode 0x9c
5623 */
5624FNIEMOP_DEF(iemOp_pushf_Fv)
5625{
5626 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
5627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5628 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5629 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
5630}
5631
5632
5633/**
5634 * @opcode 0x9d
5635 */
5636FNIEMOP_DEF(iemOp_popf_Fv)
5637{
5638 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5641 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
5642}
5643
5644
5645/**
5646 * @opcode 0x9e
5647 */
5648FNIEMOP_DEF(iemOp_sahf)
5649{
5650 IEMOP_MNEMONIC(sahf, "sahf");
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5653 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5654 return IEMOP_RAISE_INVALID_OPCODE();
5655 IEM_MC_BEGIN(0, 2);
5656 IEM_MC_LOCAL(uint32_t, u32Flags);
5657 IEM_MC_LOCAL(uint32_t, EFlags);
5658 IEM_MC_FETCH_EFLAGS(EFlags);
5659 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
5660 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5661 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
5662 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
5663 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
5664 IEM_MC_COMMIT_EFLAGS(EFlags);
5665 IEM_MC_ADVANCE_RIP_AND_FINISH();
5666 IEM_MC_END();
5667}
5668
5669
5670/**
5671 * @opcode 0x9f
5672 */
5673FNIEMOP_DEF(iemOp_lahf)
5674{
5675 IEMOP_MNEMONIC(lahf, "lahf");
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5678 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5679 return IEMOP_RAISE_INVALID_OPCODE();
5680 IEM_MC_BEGIN(0, 1);
5681 IEM_MC_LOCAL(uint8_t, u8Flags);
5682 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
5683 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
5684 IEM_MC_ADVANCE_RIP_AND_FINISH();
5685 IEM_MC_END();
5686}
5687
5688
5689/**
5690 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5691 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
5692 * prefixes. Will return on failures.
5693 * @param a_GCPtrMemOff The variable to store the offset in.
5694 */
5695#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5696 do \
5697 { \
5698 switch (pVCpu->iem.s.enmEffAddrMode) \
5699 { \
5700 case IEMMODE_16BIT: \
5701 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
5702 break; \
5703 case IEMMODE_32BIT: \
5704 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
5705 break; \
5706 case IEMMODE_64BIT: \
5707 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
5708 break; \
5709 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5710 } \
5711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5712 } while (0)
5713
5714/**
5715 * @opcode 0xa0
5716 */
5717FNIEMOP_DEF(iemOp_mov_AL_Ob)
5718{
5719 /*
5720 * Get the offset and fend off lock prefixes.
5721 */
5722 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
5723 RTGCPTR GCPtrMemOff;
5724 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5725
5726 /*
5727 * Fetch AL.
5728 */
5729 IEM_MC_BEGIN(0,1);
5730 IEM_MC_LOCAL(uint8_t, u8Tmp);
5731 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5732 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5733 IEM_MC_ADVANCE_RIP_AND_FINISH();
5734 IEM_MC_END();
5735}
5736
5737
5738/**
5739 * @opcode 0xa1
5740 */
5741FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5742{
5743 /*
5744 * Get the offset and fend off lock prefixes.
5745 */
5746 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
5747 RTGCPTR GCPtrMemOff;
5748 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5749
5750 /*
5751 * Fetch rAX.
5752 */
5753 switch (pVCpu->iem.s.enmEffOpSize)
5754 {
5755 case IEMMODE_16BIT:
5756 IEM_MC_BEGIN(0,1);
5757 IEM_MC_LOCAL(uint16_t, u16Tmp);
5758 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5759 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5760 IEM_MC_ADVANCE_RIP_AND_FINISH();
5761 IEM_MC_END();
5762 break;
5763
5764 case IEMMODE_32BIT:
5765 IEM_MC_BEGIN(0,1);
5766 IEM_MC_LOCAL(uint32_t, u32Tmp);
5767 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5768 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5769 IEM_MC_ADVANCE_RIP_AND_FINISH();
5770 IEM_MC_END();
5771 break;
5772
5773 case IEMMODE_64BIT:
5774 IEM_MC_BEGIN(0,1);
5775 IEM_MC_LOCAL(uint64_t, u64Tmp);
5776 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5777 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5778 IEM_MC_ADVANCE_RIP_AND_FINISH();
5779 IEM_MC_END();
5780 break;
5781
5782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5783 }
5784}
5785
5786
5787/**
5788 * @opcode 0xa2
5789 */
5790FNIEMOP_DEF(iemOp_mov_Ob_AL)
5791{
5792 /*
5793 * Get the offset and fend off lock prefixes.
5794 */
5795 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
5796 RTGCPTR GCPtrMemOff;
5797 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5798
5799 /*
5800 * Store AL.
5801 */
5802 IEM_MC_BEGIN(0,1);
5803 IEM_MC_LOCAL(uint8_t, u8Tmp);
5804 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5805 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
5806 IEM_MC_ADVANCE_RIP_AND_FINISH();
5807 IEM_MC_END();
5808}
5809
5810
5811/**
5812 * @opcode 0xa3
5813 */
5814FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5815{
5816 /*
5817 * Get the offset and fend off lock prefixes.
5818 */
5819 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
5820 RTGCPTR GCPtrMemOff;
5821 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5822
5823 /*
5824 * Store rAX.
5825 */
5826 switch (pVCpu->iem.s.enmEffOpSize)
5827 {
5828 case IEMMODE_16BIT:
5829 IEM_MC_BEGIN(0,1);
5830 IEM_MC_LOCAL(uint16_t, u16Tmp);
5831 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5832 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
5833 IEM_MC_ADVANCE_RIP_AND_FINISH();
5834 IEM_MC_END();
5835 break;
5836
5837 case IEMMODE_32BIT:
5838 IEM_MC_BEGIN(0,1);
5839 IEM_MC_LOCAL(uint32_t, u32Tmp);
5840 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5841 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
5842 IEM_MC_ADVANCE_RIP_AND_FINISH();
5843 IEM_MC_END();
5844 break;
5845
5846 case IEMMODE_64BIT:
5847 IEM_MC_BEGIN(0,1);
5848 IEM_MC_LOCAL(uint64_t, u64Tmp);
5849 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
5850 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
5851 IEM_MC_ADVANCE_RIP_AND_FINISH();
5852 IEM_MC_END();
5853 break;
5854
5855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5856 }
5857}
5858
5859/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
5860#define IEM_MOVS_CASE(ValBits, AddrBits) \
5861 IEM_MC_BEGIN(0, 2); \
5862 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5863 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5864 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5865 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5866 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5867 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5869 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5870 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5871 } IEM_MC_ELSE() { \
5872 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5873 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5874 } IEM_MC_ENDIF(); \
5875 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5876 IEM_MC_END() \
5877
5878/**
5879 * @opcode 0xa4
5880 */
5881FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
5882{
5883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5884
5885 /*
5886 * Use the C implementation if a repeat prefix is encountered.
5887 */
5888 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5889 {
5890 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
5891 switch (pVCpu->iem.s.enmEffAddrMode)
5892 {
5893 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
5894 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
5895 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
5896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5897 }
5898 }
5899 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
5900
5901 /*
5902 * Sharing case implementation with movs[wdq] below.
5903 */
5904 switch (pVCpu->iem.s.enmEffAddrMode)
5905 {
5906 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5907 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5908 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911}
5912
5913
5914/**
5915 * @opcode 0xa5
5916 */
5917FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5918{
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920
5921 /*
5922 * Use the C implementation if a repeat prefix is encountered.
5923 */
5924 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5925 {
5926 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5927 switch (pVCpu->iem.s.enmEffOpSize)
5928 {
5929 case IEMMODE_16BIT:
5930 switch (pVCpu->iem.s.enmEffAddrMode)
5931 {
5932 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5933 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5934 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5936 }
5937 break;
5938 case IEMMODE_32BIT:
5939 switch (pVCpu->iem.s.enmEffAddrMode)
5940 {
5941 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5942 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5943 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5945 }
5946 case IEMMODE_64BIT:
5947 switch (pVCpu->iem.s.enmEffAddrMode)
5948 {
5949 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5950 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5951 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5953 }
5954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5955 }
5956 }
5957 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5958
5959 /*
5960 * Annoying double switch here.
5961 * Using ugly macro for implementing the cases, sharing it with movsb.
5962 */
5963 switch (pVCpu->iem.s.enmEffOpSize)
5964 {
5965 case IEMMODE_16BIT:
5966 switch (pVCpu->iem.s.enmEffAddrMode)
5967 {
5968 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5969 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5970 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5972 }
5973 break;
5974
5975 case IEMMODE_32BIT:
5976 switch (pVCpu->iem.s.enmEffAddrMode)
5977 {
5978 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5979 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5980 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5982 }
5983 break;
5984
5985 case IEMMODE_64BIT:
5986 switch (pVCpu->iem.s.enmEffAddrMode)
5987 {
5988 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5989 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5990 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5992 }
5993 break;
5994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5995 }
5996}
5997
5998#undef IEM_MOVS_CASE
5999
6000/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6001#define IEM_CMPS_CASE(ValBits, AddrBits) \
6002 IEM_MC_BEGIN(3, 3); \
6003 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6004 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6005 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6006 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6007 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6008 \
6009 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6010 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6011 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6012 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6013 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6014 IEM_MC_REF_EFLAGS(pEFlags); \
6015 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6016 \
6017 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6018 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6019 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6020 } IEM_MC_ELSE() { \
6021 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6022 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6023 } IEM_MC_ENDIF(); \
6024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6025 IEM_MC_END() \
6026
6027/**
6028 * @opcode 0xa6
6029 */
6030FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6031{
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033
6034 /*
6035 * Use the C implementation if a repeat prefix is encountered.
6036 */
6037 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6038 {
6039 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6040 switch (pVCpu->iem.s.enmEffAddrMode)
6041 {
6042 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6043 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6044 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6046 }
6047 }
6048 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6049 {
6050 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6051 switch (pVCpu->iem.s.enmEffAddrMode)
6052 {
6053 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6054 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6055 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6057 }
6058 }
6059 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6060
6061 /*
6062 * Sharing case implementation with cmps[wdq] below.
6063 */
6064 switch (pVCpu->iem.s.enmEffAddrMode)
6065 {
6066 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6067 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6068 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6070 }
6071}
6072
6073
6074/**
6075 * @opcode 0xa7
6076 */
6077FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6078{
6079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6080
6081 /*
6082 * Use the C implementation if a repeat prefix is encountered.
6083 */
6084 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6085 {
6086 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6087 switch (pVCpu->iem.s.enmEffOpSize)
6088 {
6089 case IEMMODE_16BIT:
6090 switch (pVCpu->iem.s.enmEffAddrMode)
6091 {
6092 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6093 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6094 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6096 }
6097 break;
6098 case IEMMODE_32BIT:
6099 switch (pVCpu->iem.s.enmEffAddrMode)
6100 {
6101 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6102 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6103 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6105 }
6106 case IEMMODE_64BIT:
6107 switch (pVCpu->iem.s.enmEffAddrMode)
6108 {
6109 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6110 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6111 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6113 }
6114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6115 }
6116 }
6117
6118 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6119 {
6120 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6121 switch (pVCpu->iem.s.enmEffOpSize)
6122 {
6123 case IEMMODE_16BIT:
6124 switch (pVCpu->iem.s.enmEffAddrMode)
6125 {
6126 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6127 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6128 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6130 }
6131 break;
6132 case IEMMODE_32BIT:
6133 switch (pVCpu->iem.s.enmEffAddrMode)
6134 {
6135 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6136 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6137 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6139 }
6140 case IEMMODE_64BIT:
6141 switch (pVCpu->iem.s.enmEffAddrMode)
6142 {
6143 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6144 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6145 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6147 }
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 }
6151
6152 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6153
6154 /*
6155 * Annoying double switch here.
6156 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6157 */
6158 switch (pVCpu->iem.s.enmEffOpSize)
6159 {
6160 case IEMMODE_16BIT:
6161 switch (pVCpu->iem.s.enmEffAddrMode)
6162 {
6163 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6164 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6165 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6167 }
6168 break;
6169
6170 case IEMMODE_32BIT:
6171 switch (pVCpu->iem.s.enmEffAddrMode)
6172 {
6173 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6174 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6175 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6177 }
6178 break;
6179
6180 case IEMMODE_64BIT:
6181 switch (pVCpu->iem.s.enmEffAddrMode)
6182 {
6183 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6184 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6185 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6187 }
6188 break;
6189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6190 }
6191}
6192
6193#undef IEM_CMPS_CASE
6194
6195/**
6196 * @opcode 0xa8
6197 */
6198FNIEMOP_DEF(iemOp_test_AL_Ib)
6199{
6200 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6202 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6203}
6204
6205
6206/**
6207 * @opcode 0xa9
6208 */
6209FNIEMOP_DEF(iemOp_test_eAX_Iz)
6210{
6211 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6212 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6213 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6214}
6215
6216
6217/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6218#define IEM_STOS_CASE(ValBits, AddrBits) \
6219 IEM_MC_BEGIN(0, 2); \
6220 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6221 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6222 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6223 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6224 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6225 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6226 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6227 } IEM_MC_ELSE() { \
6228 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6229 } IEM_MC_ENDIF(); \
6230 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6231 IEM_MC_END() \
6232
6233/**
6234 * @opcode 0xaa
6235 */
6236FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6237{
6238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6239
6240 /*
6241 * Use the C implementation if a repeat prefix is encountered.
6242 */
6243 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6244 {
6245 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6246 switch (pVCpu->iem.s.enmEffAddrMode)
6247 {
6248 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
6249 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
6250 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
6251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6252 }
6253 }
6254 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6255
6256 /*
6257 * Sharing case implementation with stos[wdq] below.
6258 */
6259 switch (pVCpu->iem.s.enmEffAddrMode)
6260 {
6261 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6262 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6263 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6265 }
6266}
6267
6268
6269/**
6270 * @opcode 0xab
6271 */
6272FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6273{
6274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6275
6276 /*
6277 * Use the C implementation if a repeat prefix is encountered.
6278 */
6279 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6280 {
6281 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6282 switch (pVCpu->iem.s.enmEffOpSize)
6283 {
6284 case IEMMODE_16BIT:
6285 switch (pVCpu->iem.s.enmEffAddrMode)
6286 {
6287 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
6288 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
6289 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
6290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6291 }
6292 break;
6293 case IEMMODE_32BIT:
6294 switch (pVCpu->iem.s.enmEffAddrMode)
6295 {
6296 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
6297 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
6298 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
6299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6300 }
6301 case IEMMODE_64BIT:
6302 switch (pVCpu->iem.s.enmEffAddrMode)
6303 {
6304 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6305 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
6306 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
6307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6308 }
6309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6310 }
6311 }
6312 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
6313
6314 /*
6315 * Annoying double switch here.
6316 * Using ugly macro for implementing the cases, sharing it with stosb.
6317 */
6318 switch (pVCpu->iem.s.enmEffOpSize)
6319 {
6320 case IEMMODE_16BIT:
6321 switch (pVCpu->iem.s.enmEffAddrMode)
6322 {
6323 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6324 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6325 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6327 }
6328 break;
6329
6330 case IEMMODE_32BIT:
6331 switch (pVCpu->iem.s.enmEffAddrMode)
6332 {
6333 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6334 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6335 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6337 }
6338 break;
6339
6340 case IEMMODE_64BIT:
6341 switch (pVCpu->iem.s.enmEffAddrMode)
6342 {
6343 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6344 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6345 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6347 }
6348 break;
6349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6350 }
6351}
6352
6353#undef IEM_STOS_CASE
6354
6355/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6356#define IEM_LODS_CASE(ValBits, AddrBits) \
6357 IEM_MC_BEGIN(0, 2); \
6358 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6359 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6360 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6361 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6362 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6363 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6364 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6365 } IEM_MC_ELSE() { \
6366 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6367 } IEM_MC_ENDIF(); \
6368 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6369 IEM_MC_END() \
6370
6371/**
6372 * @opcode 0xac
6373 */
6374FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6375{
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6377
6378 /*
6379 * Use the C implementation if a repeat prefix is encountered.
6380 */
6381 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6382 {
6383 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
6384 switch (pVCpu->iem.s.enmEffAddrMode)
6385 {
6386 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
6387 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
6388 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
6389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6390 }
6391 }
6392 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
6393
6394 /*
6395 * Sharing case implementation with stos[wdq] below.
6396 */
6397 switch (pVCpu->iem.s.enmEffAddrMode)
6398 {
6399 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6400 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6401 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6403 }
6404}
6405
6406
6407/**
6408 * @opcode 0xad
6409 */
6410FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6411{
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413
6414 /*
6415 * Use the C implementation if a repeat prefix is encountered.
6416 */
6417 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6418 {
6419 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
6420 switch (pVCpu->iem.s.enmEffOpSize)
6421 {
6422 case IEMMODE_16BIT:
6423 switch (pVCpu->iem.s.enmEffAddrMode)
6424 {
6425 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
6426 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
6427 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
6428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6429 }
6430 break;
6431 case IEMMODE_32BIT:
6432 switch (pVCpu->iem.s.enmEffAddrMode)
6433 {
6434 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
6435 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
6436 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
6437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6438 }
6439 case IEMMODE_64BIT:
6440 switch (pVCpu->iem.s.enmEffAddrMode)
6441 {
6442 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
6443 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
6444 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
6445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6446 }
6447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6448 }
6449 }
6450 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
6451
6452 /*
6453 * Annoying double switch here.
6454 * Using ugly macro for implementing the cases, sharing it with lodsb.
6455 */
6456 switch (pVCpu->iem.s.enmEffOpSize)
6457 {
6458 case IEMMODE_16BIT:
6459 switch (pVCpu->iem.s.enmEffAddrMode)
6460 {
6461 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6462 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6463 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6465 }
6466 break;
6467
6468 case IEMMODE_32BIT:
6469 switch (pVCpu->iem.s.enmEffAddrMode)
6470 {
6471 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6472 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6473 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6475 }
6476 break;
6477
6478 case IEMMODE_64BIT:
6479 switch (pVCpu->iem.s.enmEffAddrMode)
6480 {
6481 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6482 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6483 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6485 }
6486 break;
6487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6488 }
6489}
6490
6491#undef IEM_LODS_CASE
6492
6493/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
6494#define IEM_SCAS_CASE(ValBits, AddrBits) \
6495 IEM_MC_BEGIN(3, 2); \
6496 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
6497 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
6498 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6499 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6500 \
6501 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6502 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
6503 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
6504 IEM_MC_REF_EFLAGS(pEFlags); \
6505 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
6506 \
6507 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6508 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6509 } IEM_MC_ELSE() { \
6510 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6511 } IEM_MC_ENDIF(); \
6512 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6513 IEM_MC_END();
6514
6515/**
6516 * @opcode 0xae
6517 */
6518FNIEMOP_DEF(iemOp_scasb_AL_Xb)
6519{
6520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6521
6522 /*
6523 * Use the C implementation if a repeat prefix is encountered.
6524 */
6525 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6526 {
6527 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
6528 switch (pVCpu->iem.s.enmEffAddrMode)
6529 {
6530 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
6531 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
6532 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
6533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6534 }
6535 }
6536 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6537 {
6538 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
6539 switch (pVCpu->iem.s.enmEffAddrMode)
6540 {
6541 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
6542 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
6543 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
6544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6545 }
6546 }
6547 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
6548
6549 /*
6550 * Sharing case implementation with stos[wdq] below.
6551 */
6552 switch (pVCpu->iem.s.enmEffAddrMode)
6553 {
6554 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
6555 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
6556 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
6557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6558 }
6559}
6560
6561
6562/**
6563 * @opcode 0xaf
6564 */
6565FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
6566{
6567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6568
6569 /*
6570 * Use the C implementation if a repeat prefix is encountered.
6571 */
6572 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6573 {
6574 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
6575 switch (pVCpu->iem.s.enmEffOpSize)
6576 {
6577 case IEMMODE_16BIT:
6578 switch (pVCpu->iem.s.enmEffAddrMode)
6579 {
6580 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
6581 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
6582 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
6583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6584 }
6585 break;
6586 case IEMMODE_32BIT:
6587 switch (pVCpu->iem.s.enmEffAddrMode)
6588 {
6589 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
6590 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
6591 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
6592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6593 }
6594 case IEMMODE_64BIT:
6595 switch (pVCpu->iem.s.enmEffAddrMode)
6596 {
6597 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
6598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
6599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
6600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6601 }
6602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6603 }
6604 }
6605 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6606 {
6607 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
6608 switch (pVCpu->iem.s.enmEffOpSize)
6609 {
6610 case IEMMODE_16BIT:
6611 switch (pVCpu->iem.s.enmEffAddrMode)
6612 {
6613 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
6614 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
6615 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
6616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6617 }
6618 break;
6619 case IEMMODE_32BIT:
6620 switch (pVCpu->iem.s.enmEffAddrMode)
6621 {
6622 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
6623 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
6624 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
6625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6626 }
6627 case IEMMODE_64BIT:
6628 switch (pVCpu->iem.s.enmEffAddrMode)
6629 {
6630 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
6631 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
6632 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
6633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6634 }
6635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6636 }
6637 }
6638 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
6639
6640 /*
6641 * Annoying double switch here.
6642 * Using ugly macro for implementing the cases, sharing it with scasb.
6643 */
6644 switch (pVCpu->iem.s.enmEffOpSize)
6645 {
6646 case IEMMODE_16BIT:
6647 switch (pVCpu->iem.s.enmEffAddrMode)
6648 {
6649 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
6650 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
6651 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
6652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6653 }
6654 break;
6655
6656 case IEMMODE_32BIT:
6657 switch (pVCpu->iem.s.enmEffAddrMode)
6658 {
6659 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
6660 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
6661 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
6662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6663 }
6664 break;
6665
6666 case IEMMODE_64BIT:
6667 switch (pVCpu->iem.s.enmEffAddrMode)
6668 {
6669 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6670 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
6671 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
6672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6673 }
6674 break;
6675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6676 }
6677}
6678
6679#undef IEM_SCAS_CASE
6680
6681/**
6682 * Common 'mov r8, imm8' helper.
6683 */
6684FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
6685{
6686 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6688
6689 IEM_MC_BEGIN(0, 1);
6690 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6691 IEM_MC_STORE_GREG_U8(iReg, u8Value);
6692 IEM_MC_ADVANCE_RIP_AND_FINISH();
6693 IEM_MC_END();
6694}
6695
6696
6697/**
6698 * @opcode 0xb0
6699 */
6700FNIEMOP_DEF(iemOp_mov_AL_Ib)
6701{
6702 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
6703 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6704}
6705
6706
6707/**
6708 * @opcode 0xb1
6709 */
6710FNIEMOP_DEF(iemOp_CL_Ib)
6711{
6712 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
6713 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6714}
6715
6716
6717/**
6718 * @opcode 0xb2
6719 */
6720FNIEMOP_DEF(iemOp_DL_Ib)
6721{
6722 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
6723 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6724}
6725
6726
6727/**
6728 * @opcode 0xb3
6729 */
6730FNIEMOP_DEF(iemOp_BL_Ib)
6731{
6732 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
6733 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6734}
6735
6736
6737/**
6738 * @opcode 0xb4
6739 */
6740FNIEMOP_DEF(iemOp_mov_AH_Ib)
6741{
6742 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
6743 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6744}
6745
6746
6747/**
6748 * @opcode 0xb5
6749 */
6750FNIEMOP_DEF(iemOp_CH_Ib)
6751{
6752 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
6753 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6754}
6755
6756
6757/**
6758 * @opcode 0xb6
6759 */
6760FNIEMOP_DEF(iemOp_DH_Ib)
6761{
6762 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
6763 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6764}
6765
6766
6767/**
6768 * @opcode 0xb7
6769 */
6770FNIEMOP_DEF(iemOp_BH_Ib)
6771{
6772 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
6773 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6774}
6775
6776
6777/**
6778 * Common 'mov regX,immX' helper.
6779 */
6780FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
6781{
6782 switch (pVCpu->iem.s.enmEffOpSize)
6783 {
6784 case IEMMODE_16BIT:
6785 {
6786 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6788
6789 IEM_MC_BEGIN(0, 1);
6790 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6791 IEM_MC_STORE_GREG_U16(iReg, u16Value);
6792 IEM_MC_ADVANCE_RIP_AND_FINISH();
6793 IEM_MC_END();
6794 break;
6795 }
6796
6797 case IEMMODE_32BIT:
6798 {
6799 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6801
6802 IEM_MC_BEGIN(0, 1);
6803 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6804 IEM_MC_STORE_GREG_U32(iReg, u32Value);
6805 IEM_MC_ADVANCE_RIP_AND_FINISH();
6806 IEM_MC_END();
6807 break;
6808 }
6809 case IEMMODE_64BIT:
6810 {
6811 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813
6814 IEM_MC_BEGIN(0, 1);
6815 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6816 IEM_MC_STORE_GREG_U64(iReg, u64Value);
6817 IEM_MC_ADVANCE_RIP_AND_FINISH();
6818 IEM_MC_END();
6819 break;
6820 }
6821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6822 }
6823}
6824
6825
6826/**
6827 * @opcode 0xb8
6828 */
6829FNIEMOP_DEF(iemOp_eAX_Iv)
6830{
6831 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
6832 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6833}
6834
6835
6836/**
6837 * @opcode 0xb9
6838 */
6839FNIEMOP_DEF(iemOp_eCX_Iv)
6840{
6841 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
6842 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6843}
6844
6845
6846/**
6847 * @opcode 0xba
6848 */
6849FNIEMOP_DEF(iemOp_eDX_Iv)
6850{
6851 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
6852 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6853}
6854
6855
6856/**
6857 * @opcode 0xbb
6858 */
6859FNIEMOP_DEF(iemOp_eBX_Iv)
6860{
6861 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
6862 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6863}
6864
6865
6866/**
6867 * @opcode 0xbc
6868 */
6869FNIEMOP_DEF(iemOp_eSP_Iv)
6870{
6871 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
6872 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6873}
6874
6875
6876/**
6877 * @opcode 0xbd
6878 */
6879FNIEMOP_DEF(iemOp_eBP_Iv)
6880{
6881 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
6882 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6883}
6884
6885
6886/**
6887 * @opcode 0xbe
6888 */
6889FNIEMOP_DEF(iemOp_eSI_Iv)
6890{
6891 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6892 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6893}
6894
6895
6896/**
6897 * @opcode 0xbf
6898 */
6899FNIEMOP_DEF(iemOp_eDI_Iv)
6900{
6901 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6902 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6903}
6904
6905
6906/**
6907 * @opcode 0xc0
6908 */
6909FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6910{
6911 IEMOP_HLP_MIN_186();
6912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6913 PCIEMOPSHIFTSIZES pImpl;
6914 switch (IEM_GET_MODRM_REG_8(bRm))
6915 {
6916 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6917 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6918 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6919 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6920 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6921 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6922 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6923 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6924 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6925 }
6926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6927
6928 if (IEM_IS_MODRM_REG_MODE(bRm))
6929 {
6930 /* register */
6931 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6933 IEM_MC_BEGIN(3, 0);
6934 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6935 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6936 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6937 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6938 IEM_MC_REF_EFLAGS(pEFlags);
6939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6940 IEM_MC_ADVANCE_RIP_AND_FINISH();
6941 IEM_MC_END();
6942 }
6943 else
6944 {
6945 /* memory */
6946 IEM_MC_BEGIN(3, 2);
6947 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6948 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6949 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6951
6952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6953 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6954 IEM_MC_ASSIGN(cShiftArg, cShift);
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6957 IEM_MC_FETCH_EFLAGS(EFlags);
6958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6959
6960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6961 IEM_MC_COMMIT_EFLAGS(EFlags);
6962 IEM_MC_ADVANCE_RIP_AND_FINISH();
6963 IEM_MC_END();
6964 }
6965}
6966
6967
6968/**
6969 * @opcode 0xc1
6970 */
6971FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6972{
6973 IEMOP_HLP_MIN_186();
6974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6975 PCIEMOPSHIFTSIZES pImpl;
6976 switch (IEM_GET_MODRM_REG_8(bRm))
6977 {
6978 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6979 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6980 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6981 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6982 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6983 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6984 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6985 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6986 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6987 }
6988 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6989
6990 if (IEM_IS_MODRM_REG_MODE(bRm))
6991 {
6992 /* register */
6993 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6995 switch (pVCpu->iem.s.enmEffOpSize)
6996 {
6997 case IEMMODE_16BIT:
6998 IEM_MC_BEGIN(3, 0);
6999 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7000 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7001 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7002 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7003 IEM_MC_REF_EFLAGS(pEFlags);
7004 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7005 IEM_MC_ADVANCE_RIP_AND_FINISH();
7006 IEM_MC_END();
7007 break;
7008
7009 case IEMMODE_32BIT:
7010 IEM_MC_BEGIN(3, 0);
7011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7012 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7014 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7015 IEM_MC_REF_EFLAGS(pEFlags);
7016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7017 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7018 IEM_MC_ADVANCE_RIP_AND_FINISH();
7019 IEM_MC_END();
7020 break;
7021
7022 case IEMMODE_64BIT:
7023 IEM_MC_BEGIN(3, 0);
7024 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7025 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7026 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7027 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7028 IEM_MC_REF_EFLAGS(pEFlags);
7029 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7030 IEM_MC_ADVANCE_RIP_AND_FINISH();
7031 IEM_MC_END();
7032 break;
7033
7034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7035 }
7036 }
7037 else
7038 {
7039 /* memory */
7040 switch (pVCpu->iem.s.enmEffOpSize)
7041 {
7042 case IEMMODE_16BIT:
7043 IEM_MC_BEGIN(3, 2);
7044 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7045 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7046 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7048
7049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7050 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7051 IEM_MC_ASSIGN(cShiftArg, cShift);
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7054 IEM_MC_FETCH_EFLAGS(EFlags);
7055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7056
7057 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7058 IEM_MC_COMMIT_EFLAGS(EFlags);
7059 IEM_MC_ADVANCE_RIP_AND_FINISH();
7060 IEM_MC_END();
7061 break;
7062
7063 case IEMMODE_32BIT:
7064 IEM_MC_BEGIN(3, 2);
7065 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7066 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7067 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7069
7070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7071 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7072 IEM_MC_ASSIGN(cShiftArg, cShift);
7073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7074 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7075 IEM_MC_FETCH_EFLAGS(EFlags);
7076 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7077
7078 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7079 IEM_MC_COMMIT_EFLAGS(EFlags);
7080 IEM_MC_ADVANCE_RIP_AND_FINISH();
7081 IEM_MC_END();
7082 break;
7083
7084 case IEMMODE_64BIT:
7085 IEM_MC_BEGIN(3, 2);
7086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7087 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7088 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7090
7091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7092 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7093 IEM_MC_ASSIGN(cShiftArg, cShift);
7094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7095 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7096 IEM_MC_FETCH_EFLAGS(EFlags);
7097 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7098
7099 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7100 IEM_MC_COMMIT_EFLAGS(EFlags);
7101 IEM_MC_ADVANCE_RIP_AND_FINISH();
7102 IEM_MC_END();
7103 break;
7104
7105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7106 }
7107 }
7108}
7109
7110
7111/**
7112 * @opcode 0xc2
7113 */
7114FNIEMOP_DEF(iemOp_retn_Iw)
7115{
7116 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7117 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7118 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7120 switch (pVCpu->iem.s.enmEffOpSize)
7121 {
7122 case IEMMODE_16BIT:
7123 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_16, u16Imm);
7124 case IEMMODE_32BIT:
7125 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_32, u16Imm);
7126 case IEMMODE_64BIT:
7127 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_64, u16Imm);
7128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7129 }
7130}
7131
7132
7133/**
7134 * @opcode 0xc3
7135 */
7136FNIEMOP_DEF(iemOp_retn)
7137{
7138 IEMOP_MNEMONIC(retn, "retn");
7139 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7141 switch (pVCpu->iem.s.enmEffOpSize)
7142 {
7143 case IEMMODE_16BIT:
7144 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_16);
7145 case IEMMODE_32BIT:
7146 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_32);
7147 case IEMMODE_64BIT:
7148 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_64);
7149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7150 }
7151}
7152
7153
7154/**
7155 * @opcode 0xc4
7156 */
7157FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7158{
7159 /* The LDS instruction is invalid 64-bit mode. In legacy and
7160 compatability mode it is invalid with MOD=3.
7161 The use as a VEX prefix is made possible by assigning the inverted
7162 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7163 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7165 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
7166 || IEM_IS_MODRM_REG_MODE(bRm) )
7167 {
7168 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7169 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7170 {
7171 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7172 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7173 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7174 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7175 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7176 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
7177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7178 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7179 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7180 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7181 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7182 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7183 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7184
7185 switch (bRm & 0x1f)
7186 {
7187 case 1: /* 0x0f lead opcode byte. */
7188#ifdef IEM_WITH_VEX
7189 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7190#else
7191 IEMOP_BITCH_ABOUT_STUB();
7192 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7193#endif
7194
7195 case 2: /* 0x0f 0x38 lead opcode bytes. */
7196#ifdef IEM_WITH_VEX
7197 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7198#else
7199 IEMOP_BITCH_ABOUT_STUB();
7200 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7201#endif
7202
7203 case 3: /* 0x0f 0x3a lead opcode bytes. */
7204#ifdef IEM_WITH_VEX
7205 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7206#else
7207 IEMOP_BITCH_ABOUT_STUB();
7208 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7209#endif
7210
7211 default:
7212 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7213 return IEMOP_RAISE_INVALID_OPCODE();
7214 }
7215 }
7216 Log(("VEX3: VEX support disabled!\n"));
7217 return IEMOP_RAISE_INVALID_OPCODE();
7218 }
7219
7220 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7221 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7222}
7223
7224
7225/**
7226 * @opcode 0xc5
7227 */
7228FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7229{
7230 /* The LES instruction is invalid 64-bit mode. In legacy and
7231 compatability mode it is invalid with MOD=3.
7232 The use as a VEX prefix is made possible by assigning the inverted
7233 REX.R to the top MOD bit, and the top bit in the inverted register
7234 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7235 to accessing registers 0..7 in this VEX form. */
7236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7237 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
7238 || IEM_IS_MODRM_REG_MODE(bRm))
7239 {
7240 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7241 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7242 {
7243 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7244 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7245 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7246 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7247 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7248 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7249 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7250 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7251
7252#ifdef IEM_WITH_VEX
7253 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7254#else
7255 IEMOP_BITCH_ABOUT_STUB();
7256 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7257#endif
7258 }
7259
7260 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7261 Log(("VEX2: VEX support disabled!\n"));
7262 return IEMOP_RAISE_INVALID_OPCODE();
7263 }
7264
7265 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7266 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7267}
7268
7269
7270/**
7271 * @opcode 0xc6
7272 */
7273FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7274{
7275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7276 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7277 return IEMOP_RAISE_INVALID_OPCODE();
7278 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7279
7280 if (IEM_IS_MODRM_REG_MODE(bRm))
7281 {
7282 /* register access */
7283 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7285 IEM_MC_BEGIN(0, 0);
7286 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7287 IEM_MC_ADVANCE_RIP_AND_FINISH();
7288 IEM_MC_END();
7289 }
7290 else
7291 {
7292 /* memory access. */
7293 IEM_MC_BEGIN(0, 1);
7294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7296 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7298 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7299 IEM_MC_ADVANCE_RIP_AND_FINISH();
7300 IEM_MC_END();
7301 }
7302}
7303
7304
7305/**
7306 * @opcode 0xc7
7307 */
7308FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7309{
7310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7311 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7312 return IEMOP_RAISE_INVALID_OPCODE();
7313 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7314
7315 if (IEM_IS_MODRM_REG_MODE(bRm))
7316 {
7317 /* register access */
7318 switch (pVCpu->iem.s.enmEffOpSize)
7319 {
7320 case IEMMODE_16BIT:
7321 IEM_MC_BEGIN(0, 0);
7322 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7324 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
7325 IEM_MC_ADVANCE_RIP_AND_FINISH();
7326 IEM_MC_END();
7327 break;
7328
7329 case IEMMODE_32BIT:
7330 IEM_MC_BEGIN(0, 0);
7331 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7333 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
7334 IEM_MC_ADVANCE_RIP_AND_FINISH();
7335 IEM_MC_END();
7336 break;
7337
7338 case IEMMODE_64BIT:
7339 IEM_MC_BEGIN(0, 0);
7340 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7342 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
7343 IEM_MC_ADVANCE_RIP_AND_FINISH();
7344 IEM_MC_END();
7345 break;
7346
7347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7348 }
7349 }
7350 else
7351 {
7352 /* memory access. */
7353 switch (pVCpu->iem.s.enmEffOpSize)
7354 {
7355 case IEMMODE_16BIT:
7356 IEM_MC_BEGIN(0, 1);
7357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7359 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7361 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
7362 IEM_MC_ADVANCE_RIP_AND_FINISH();
7363 IEM_MC_END();
7364 break;
7365
7366 case IEMMODE_32BIT:
7367 IEM_MC_BEGIN(0, 1);
7368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7370 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7372 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
7373 IEM_MC_ADVANCE_RIP_AND_FINISH();
7374 IEM_MC_END();
7375 break;
7376
7377 case IEMMODE_64BIT:
7378 IEM_MC_BEGIN(0, 1);
7379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7381 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7383 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
7384 IEM_MC_ADVANCE_RIP_AND_FINISH();
7385 IEM_MC_END();
7386 break;
7387
7388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7389 }
7390 }
7391}
7392
7393
7394
7395
7396/**
7397 * @opcode 0xc8
7398 */
7399FNIEMOP_DEF(iemOp_enter_Iw_Ib)
7400{
7401 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
7402 IEMOP_HLP_MIN_186();
7403 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7404 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
7405 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
7406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7407 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
7408}
7409
7410
7411/**
7412 * @opcode 0xc9
7413 */
7414FNIEMOP_DEF(iemOp_leave)
7415{
7416 IEMOP_MNEMONIC(leave, "leave");
7417 IEMOP_HLP_MIN_186();
7418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7420 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
7421}
7422
7423
7424/**
7425 * @opcode 0xca
7426 */
7427FNIEMOP_DEF(iemOp_retf_Iw)
7428{
7429 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
7430 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7432 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
7433}
7434
7435
7436/**
7437 * @opcode 0xcb
7438 */
7439FNIEMOP_DEF(iemOp_retf)
7440{
7441 IEMOP_MNEMONIC(retf, "retf");
7442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7443 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
7444}
7445
7446
7447/**
7448 * @opcode 0xcc
7449 */
7450FNIEMOP_DEF(iemOp_int3)
7451{
7452 IEMOP_MNEMONIC(int3, "int3");
7453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7454 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
7455}
7456
7457
7458/**
7459 * @opcode 0xcd
7460 */
7461FNIEMOP_DEF(iemOp_int_Ib)
7462{
7463 IEMOP_MNEMONIC(int_Ib, "int Ib");
7464 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
7465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7466 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
7467}
7468
7469
7470/**
7471 * @opcode 0xce
7472 */
7473FNIEMOP_DEF(iemOp_into)
7474{
7475 IEMOP_MNEMONIC(into, "into");
7476 IEMOP_HLP_NO_64BIT();
7477
7478 IEM_MC_BEGIN(2, 0);
7479 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
7480 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
7481 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
7482 IEM_MC_END();
7483}
7484
7485
7486/**
7487 * @opcode 0xcf
7488 */
7489FNIEMOP_DEF(iemOp_iret)
7490{
7491 IEMOP_MNEMONIC(iret, "iret");
7492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7493 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
7494}
7495
7496
7497/**
7498 * @opcode 0xd0
7499 */
7500FNIEMOP_DEF(iemOp_Grp2_Eb_1)
7501{
7502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7503 PCIEMOPSHIFTSIZES pImpl;
7504 switch (IEM_GET_MODRM_REG_8(bRm))
7505 {
7506 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
7507 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
7508 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
7509 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
7510 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
7511 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
7512 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
7513 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7514 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7515 }
7516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7517
7518 if (IEM_IS_MODRM_REG_MODE(bRm))
7519 {
7520 /* register */
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7522 IEM_MC_BEGIN(3, 0);
7523 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7524 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7525 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7526 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7527 IEM_MC_REF_EFLAGS(pEFlags);
7528 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7529 IEM_MC_ADVANCE_RIP_AND_FINISH();
7530 IEM_MC_END();
7531 }
7532 else
7533 {
7534 /* memory */
7535 IEM_MC_BEGIN(3, 2);
7536 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7537 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7538 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7540
7541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7543 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7544 IEM_MC_FETCH_EFLAGS(EFlags);
7545 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7546
7547 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7548 IEM_MC_COMMIT_EFLAGS(EFlags);
7549 IEM_MC_ADVANCE_RIP_AND_FINISH();
7550 IEM_MC_END();
7551 }
7552}
7553
7554
7555
7556/**
7557 * @opcode 0xd1
7558 */
7559FNIEMOP_DEF(iemOp_Grp2_Ev_1)
7560{
7561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7562 PCIEMOPSHIFTSIZES pImpl;
7563 switch (IEM_GET_MODRM_REG_8(bRm))
7564 {
7565 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
7566 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
7567 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
7568 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
7569 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
7570 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
7571 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
7572 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7573 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7574 }
7575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7576
7577 if (IEM_IS_MODRM_REG_MODE(bRm))
7578 {
7579 /* register */
7580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7581 switch (pVCpu->iem.s.enmEffOpSize)
7582 {
7583 case IEMMODE_16BIT:
7584 IEM_MC_BEGIN(3, 0);
7585 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7586 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7587 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7588 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7589 IEM_MC_REF_EFLAGS(pEFlags);
7590 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7591 IEM_MC_ADVANCE_RIP_AND_FINISH();
7592 IEM_MC_END();
7593 break;
7594
7595 case IEMMODE_32BIT:
7596 IEM_MC_BEGIN(3, 0);
7597 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7598 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7600 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7601 IEM_MC_REF_EFLAGS(pEFlags);
7602 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7604 IEM_MC_ADVANCE_RIP_AND_FINISH();
7605 IEM_MC_END();
7606 break;
7607
7608 case IEMMODE_64BIT:
7609 IEM_MC_BEGIN(3, 0);
7610 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7611 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7613 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7614 IEM_MC_REF_EFLAGS(pEFlags);
7615 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7616 IEM_MC_ADVANCE_RIP_AND_FINISH();
7617 IEM_MC_END();
7618 break;
7619
7620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7621 }
7622 }
7623 else
7624 {
7625 /* memory */
7626 switch (pVCpu->iem.s.enmEffOpSize)
7627 {
7628 case IEMMODE_16BIT:
7629 IEM_MC_BEGIN(3, 2);
7630 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7631 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7632 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7634
7635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7637 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7638 IEM_MC_FETCH_EFLAGS(EFlags);
7639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7640
7641 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7642 IEM_MC_COMMIT_EFLAGS(EFlags);
7643 IEM_MC_ADVANCE_RIP_AND_FINISH();
7644 IEM_MC_END();
7645 break;
7646
7647 case IEMMODE_32BIT:
7648 IEM_MC_BEGIN(3, 2);
7649 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7650 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7651 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7653
7654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7656 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7657 IEM_MC_FETCH_EFLAGS(EFlags);
7658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7659
7660 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7661 IEM_MC_COMMIT_EFLAGS(EFlags);
7662 IEM_MC_ADVANCE_RIP_AND_FINISH();
7663 IEM_MC_END();
7664 break;
7665
7666 case IEMMODE_64BIT:
7667 IEM_MC_BEGIN(3, 2);
7668 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7669 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7670 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7672
7673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7675 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7676 IEM_MC_FETCH_EFLAGS(EFlags);
7677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7678
7679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7680 IEM_MC_COMMIT_EFLAGS(EFlags);
7681 IEM_MC_ADVANCE_RIP_AND_FINISH();
7682 IEM_MC_END();
7683 break;
7684
7685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7686 }
7687 }
7688}
7689
7690
7691/**
7692 * @opcode 0xd2
7693 */
7694FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7695{
7696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7697 PCIEMOPSHIFTSIZES pImpl;
7698 switch (IEM_GET_MODRM_REG_8(bRm))
7699 {
7700 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
7701 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
7702 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
7703 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
7704 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
7705 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
7706 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
7707 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7709 }
7710 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7711
7712 if (IEM_IS_MODRM_REG_MODE(bRm))
7713 {
7714 /* register */
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716 IEM_MC_BEGIN(3, 0);
7717 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7718 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7719 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7720 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7721 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7722 IEM_MC_REF_EFLAGS(pEFlags);
7723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7724 IEM_MC_ADVANCE_RIP_AND_FINISH();
7725 IEM_MC_END();
7726 }
7727 else
7728 {
7729 /* memory */
7730 IEM_MC_BEGIN(3, 2);
7731 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7732 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7733 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7735
7736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7739 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7740 IEM_MC_FETCH_EFLAGS(EFlags);
7741 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7742
7743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7744 IEM_MC_COMMIT_EFLAGS(EFlags);
7745 IEM_MC_ADVANCE_RIP_AND_FINISH();
7746 IEM_MC_END();
7747 }
7748}
7749
7750
7751/**
7752 * @opcode 0xd3
7753 */
7754FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7755{
7756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7757 PCIEMOPSHIFTSIZES pImpl;
7758 switch (IEM_GET_MODRM_REG_8(bRm))
7759 {
7760 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
7761 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
7762 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
7763 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
7764 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
7765 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
7766 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
7767 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7768 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7769 }
7770 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7771
7772 if (IEM_IS_MODRM_REG_MODE(bRm))
7773 {
7774 /* register */
7775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7776 switch (pVCpu->iem.s.enmEffOpSize)
7777 {
7778 case IEMMODE_16BIT:
7779 IEM_MC_BEGIN(3, 0);
7780 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7781 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7783 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7784 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7785 IEM_MC_REF_EFLAGS(pEFlags);
7786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7787 IEM_MC_ADVANCE_RIP_AND_FINISH();
7788 IEM_MC_END();
7789 break;
7790
7791 case IEMMODE_32BIT:
7792 IEM_MC_BEGIN(3, 0);
7793 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7794 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7795 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7796 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7797 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7798 IEM_MC_REF_EFLAGS(pEFlags);
7799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7800 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7801 IEM_MC_ADVANCE_RIP_AND_FINISH();
7802 IEM_MC_END();
7803 break;
7804
7805 case IEMMODE_64BIT:
7806 IEM_MC_BEGIN(3, 0);
7807 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7808 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7810 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7811 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7812 IEM_MC_REF_EFLAGS(pEFlags);
7813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7814 IEM_MC_ADVANCE_RIP_AND_FINISH();
7815 IEM_MC_END();
7816 break;
7817
7818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7819 }
7820 }
7821 else
7822 {
7823 /* memory */
7824 switch (pVCpu->iem.s.enmEffOpSize)
7825 {
7826 case IEMMODE_16BIT:
7827 IEM_MC_BEGIN(3, 2);
7828 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7829 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7830 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7832
7833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7835 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7836 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7837 IEM_MC_FETCH_EFLAGS(EFlags);
7838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7839
7840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7841 IEM_MC_COMMIT_EFLAGS(EFlags);
7842 IEM_MC_ADVANCE_RIP_AND_FINISH();
7843 IEM_MC_END();
7844 break;
7845
7846 case IEMMODE_32BIT:
7847 IEM_MC_BEGIN(3, 2);
7848 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7849 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7850 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7852
7853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7855 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7856 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7857 IEM_MC_FETCH_EFLAGS(EFlags);
7858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7859
7860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7861 IEM_MC_COMMIT_EFLAGS(EFlags);
7862 IEM_MC_ADVANCE_RIP_AND_FINISH();
7863 IEM_MC_END();
7864 break;
7865
7866 case IEMMODE_64BIT:
7867 IEM_MC_BEGIN(3, 2);
7868 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7869 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7870 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7872
7873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7875 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7876 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7877 IEM_MC_FETCH_EFLAGS(EFlags);
7878 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7879
7880 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7881 IEM_MC_COMMIT_EFLAGS(EFlags);
7882 IEM_MC_ADVANCE_RIP_AND_FINISH();
7883 IEM_MC_END();
7884 break;
7885
7886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7887 }
7888 }
7889}
7890
7891/**
7892 * @opcode 0xd4
7893 */
7894FNIEMOP_DEF(iemOp_aam_Ib)
7895{
7896 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
7897 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7899 IEMOP_HLP_NO_64BIT();
7900 if (!bImm)
7901 return IEMOP_RAISE_DIVIDE_ERROR();
7902 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
7903}
7904
7905
7906/**
7907 * @opcode 0xd5
7908 */
7909FNIEMOP_DEF(iemOp_aad_Ib)
7910{
7911 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7912 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7914 IEMOP_HLP_NO_64BIT();
7915 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7916}
7917
7918
7919/**
7920 * @opcode 0xd6
7921 */
7922FNIEMOP_DEF(iemOp_salc)
7923{
7924 IEMOP_MNEMONIC(salc, "salc");
7925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7926 IEMOP_HLP_NO_64BIT();
7927
7928 IEM_MC_BEGIN(0, 0);
7929 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7930 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7931 } IEM_MC_ELSE() {
7932 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7933 } IEM_MC_ENDIF();
7934 IEM_MC_ADVANCE_RIP_AND_FINISH();
7935 IEM_MC_END();
7936}
7937
7938
7939/**
7940 * @opcode 0xd7
7941 */
7942FNIEMOP_DEF(iemOp_xlat)
7943{
7944 IEMOP_MNEMONIC(xlat, "xlat");
7945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7946 switch (pVCpu->iem.s.enmEffAddrMode)
7947 {
7948 case IEMMODE_16BIT:
7949 IEM_MC_BEGIN(2, 0);
7950 IEM_MC_LOCAL(uint8_t, u8Tmp);
7951 IEM_MC_LOCAL(uint16_t, u16Addr);
7952 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7953 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7954 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7955 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7956 IEM_MC_ADVANCE_RIP_AND_FINISH();
7957 IEM_MC_END();
7958 break;
7959
7960 case IEMMODE_32BIT:
7961 IEM_MC_BEGIN(2, 0);
7962 IEM_MC_LOCAL(uint8_t, u8Tmp);
7963 IEM_MC_LOCAL(uint32_t, u32Addr);
7964 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7965 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7966 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7967 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7968 IEM_MC_ADVANCE_RIP_AND_FINISH();
7969 IEM_MC_END();
7970 break;
7971
7972 case IEMMODE_64BIT:
7973 IEM_MC_BEGIN(2, 0);
7974 IEM_MC_LOCAL(uint8_t, u8Tmp);
7975 IEM_MC_LOCAL(uint64_t, u64Addr);
7976 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7977 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7978 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7979 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7980 IEM_MC_ADVANCE_RIP_AND_FINISH();
7981 IEM_MC_END();
7982 break;
7983
7984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7985 }
7986}
7987
7988
7989/**
7990 * Common worker for FPU instructions working on ST0 and STn, and storing the
7991 * result in ST0.
7992 *
7993 * @param bRm Mod R/M byte.
7994 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7995 */
7996FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7997{
7998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7999
8000 IEM_MC_BEGIN(3, 1);
8001 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8002 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8003 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8004 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8005
8006 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8007 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8008 IEM_MC_PREPARE_FPU_USAGE();
8009 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8010 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8011 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8012 } IEM_MC_ELSE() {
8013 IEM_MC_FPU_STACK_UNDERFLOW(0);
8014 } IEM_MC_ENDIF();
8015 IEM_MC_ADVANCE_RIP_AND_FINISH();
8016
8017 IEM_MC_END();
8018}
8019
8020
8021/**
8022 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8023 * flags.
8024 *
8025 * @param bRm Mod R/M byte.
8026 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8027 */
8028FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8029{
8030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8031
8032 IEM_MC_BEGIN(3, 1);
8033 IEM_MC_LOCAL(uint16_t, u16Fsw);
8034 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8035 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8036 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8037
8038 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8039 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8040 IEM_MC_PREPARE_FPU_USAGE();
8041 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8042 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8043 IEM_MC_UPDATE_FSW(u16Fsw);
8044 } IEM_MC_ELSE() {
8045 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8046 } IEM_MC_ENDIF();
8047 IEM_MC_ADVANCE_RIP_AND_FINISH();
8048
8049 IEM_MC_END();
8050}
8051
8052
8053/**
8054 * Common worker for FPU instructions working on ST0 and STn, only affecting
8055 * flags, and popping when done.
8056 *
8057 * @param bRm Mod R/M byte.
8058 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8059 */
8060FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8061{
8062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8063
8064 IEM_MC_BEGIN(3, 1);
8065 IEM_MC_LOCAL(uint16_t, u16Fsw);
8066 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8067 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8068 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8069
8070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8072 IEM_MC_PREPARE_FPU_USAGE();
8073 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8074 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8075 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8076 } IEM_MC_ELSE() {
8077 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
8078 } IEM_MC_ENDIF();
8079 IEM_MC_ADVANCE_RIP_AND_FINISH();
8080
8081 IEM_MC_END();
8082}
8083
8084
8085/** Opcode 0xd8 11/0. */
8086FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8087{
8088 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8089 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8090}
8091
8092
8093/** Opcode 0xd8 11/1. */
8094FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8095{
8096 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8097 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8098}
8099
8100
8101/** Opcode 0xd8 11/2. */
8102FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8103{
8104 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8105 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8106}
8107
8108
8109/** Opcode 0xd8 11/3. */
8110FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8111{
8112 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8113 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8114}
8115
8116
8117/** Opcode 0xd8 11/4. */
8118FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8119{
8120 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8121 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8122}
8123
8124
8125/** Opcode 0xd8 11/5. */
8126FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8127{
8128 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8129 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8130}
8131
8132
8133/** Opcode 0xd8 11/6. */
8134FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8135{
8136 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8137 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8138}
8139
8140
8141/** Opcode 0xd8 11/7. */
8142FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8143{
8144 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8145 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8146}
8147
8148
8149/**
8150 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8151 * the result in ST0.
8152 *
8153 * @param bRm Mod R/M byte.
8154 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8155 */
8156FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8157{
8158 IEM_MC_BEGIN(3, 3);
8159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8160 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8161 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8162 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8163 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8164 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8165
8166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8168
8169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8171 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8172
8173 IEM_MC_PREPARE_FPU_USAGE();
8174 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8175 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8176 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8177 } IEM_MC_ELSE() {
8178 IEM_MC_FPU_STACK_UNDERFLOW(0);
8179 } IEM_MC_ENDIF();
8180 IEM_MC_ADVANCE_RIP_AND_FINISH();
8181
8182 IEM_MC_END();
8183}
8184
8185
8186/** Opcode 0xd8 !11/0. */
8187FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8188{
8189 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8190 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8191}
8192
8193
8194/** Opcode 0xd8 !11/1. */
8195FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8196{
8197 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8198 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8199}
8200
8201
8202/** Opcode 0xd8 !11/2. */
8203FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8204{
8205 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8206
8207 IEM_MC_BEGIN(3, 3);
8208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8209 IEM_MC_LOCAL(uint16_t, u16Fsw);
8210 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8212 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8213 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8214
8215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217
8218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8220 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8221
8222 IEM_MC_PREPARE_FPU_USAGE();
8223 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8224 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8225 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8226 } IEM_MC_ELSE() {
8227 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8228 } IEM_MC_ENDIF();
8229 IEM_MC_ADVANCE_RIP_AND_FINISH();
8230
8231 IEM_MC_END();
8232}
8233
8234
8235/** Opcode 0xd8 !11/3. */
8236FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8237{
8238 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8239
8240 IEM_MC_BEGIN(3, 3);
8241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8242 IEM_MC_LOCAL(uint16_t, u16Fsw);
8243 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8244 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8245 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8246 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8247
8248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8250
8251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8253 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8254
8255 IEM_MC_PREPARE_FPU_USAGE();
8256 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8257 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8258 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8259 } IEM_MC_ELSE() {
8260 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8261 } IEM_MC_ENDIF();
8262 IEM_MC_ADVANCE_RIP_AND_FINISH();
8263
8264 IEM_MC_END();
8265}
8266
8267
8268/** Opcode 0xd8 !11/4. */
8269FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8270{
8271 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8272 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8273}
8274
8275
8276/** Opcode 0xd8 !11/5. */
8277FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8278{
8279 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8281}
8282
8283
8284/** Opcode 0xd8 !11/6. */
8285FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8286{
8287 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8289}
8290
8291
8292/** Opcode 0xd8 !11/7. */
8293FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8294{
8295 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8297}
8298
8299
8300/**
8301 * @opcode 0xd8
8302 */
8303FNIEMOP_DEF(iemOp_EscF0)
8304{
8305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8306 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
8307
8308 if (IEM_IS_MODRM_REG_MODE(bRm))
8309 {
8310 switch (IEM_GET_MODRM_REG_8(bRm))
8311 {
8312 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
8313 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
8314 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
8315 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8316 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
8317 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
8318 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
8319 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
8320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8321 }
8322 }
8323 else
8324 {
8325 switch (IEM_GET_MODRM_REG_8(bRm))
8326 {
8327 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
8328 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
8329 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
8330 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
8331 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
8332 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
8333 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
8334 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
8335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8336 }
8337 }
8338}
8339
8340
8341/** Opcode 0xd9 /0 mem32real
8342 * @sa iemOp_fld_m64r */
8343FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
8344{
8345 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
8346
8347 IEM_MC_BEGIN(2, 3);
8348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8349 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8350 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
8351 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8352 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
8353
8354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8356
8357 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8359 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8360
8361 IEM_MC_PREPARE_FPU_USAGE();
8362 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8363 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
8364 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8365 } IEM_MC_ELSE() {
8366 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8367 } IEM_MC_ENDIF();
8368 IEM_MC_ADVANCE_RIP_AND_FINISH();
8369
8370 IEM_MC_END();
8371}
8372
8373
8374/** Opcode 0xd9 !11/2 mem32real */
8375FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
8376{
8377 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
8378 IEM_MC_BEGIN(3, 2);
8379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8380 IEM_MC_LOCAL(uint16_t, u16Fsw);
8381 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8382 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8383 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8384
8385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8389
8390 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8391 IEM_MC_PREPARE_FPU_USAGE();
8392 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8393 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8394 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8395 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8396 } IEM_MC_ELSE() {
8397 IEM_MC_IF_FCW_IM() {
8398 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8399 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8400 } IEM_MC_ENDIF();
8401 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8402 } IEM_MC_ENDIF();
8403 IEM_MC_ADVANCE_RIP_AND_FINISH();
8404
8405 IEM_MC_END();
8406}
8407
8408
8409/** Opcode 0xd9 !11/3 */
8410FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
8411{
8412 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
8413 IEM_MC_BEGIN(3, 2);
8414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8415 IEM_MC_LOCAL(uint16_t, u16Fsw);
8416 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8417 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8418 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8419
8420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8422 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8423 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8424
8425 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8426 IEM_MC_PREPARE_FPU_USAGE();
8427 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8428 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8429 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8430 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8431 } IEM_MC_ELSE() {
8432 IEM_MC_IF_FCW_IM() {
8433 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8434 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8435 } IEM_MC_ENDIF();
8436 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8437 } IEM_MC_ENDIF();
8438 IEM_MC_ADVANCE_RIP_AND_FINISH();
8439
8440 IEM_MC_END();
8441}
8442
8443
8444/** Opcode 0xd9 !11/4 */
8445FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
8446{
8447 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
8448 IEM_MC_BEGIN(3, 0);
8449 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8450 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8451 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8455 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8456 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8457 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8458 IEM_MC_END();
8459}
8460
8461
8462/** Opcode 0xd9 !11/5 */
8463FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
8464{
8465 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
8466 IEM_MC_BEGIN(1, 1);
8467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8468 IEM_MC_ARG(uint16_t, u16Fsw, 0);
8469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8472 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8473 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8474 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
8475 IEM_MC_END();
8476}
8477
8478
8479/** Opcode 0xd9 !11/6 */
8480FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
8481{
8482 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
8483 IEM_MC_BEGIN(3, 0);
8484 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8485 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8486 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8490 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8491 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8492 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
8493 IEM_MC_END();
8494}
8495
8496
8497/** Opcode 0xd9 !11/7 */
8498FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
8499{
8500 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
8501 IEM_MC_BEGIN(2, 0);
8502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8503 IEM_MC_LOCAL(uint16_t, u16Fcw);
8504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8507 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8508 IEM_MC_FETCH_FCW(u16Fcw);
8509 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
8510 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8511 IEM_MC_END();
8512}
8513
8514
8515/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
8516FNIEMOP_DEF(iemOp_fnop)
8517{
8518 IEMOP_MNEMONIC(fnop, "fnop");
8519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8520
8521 IEM_MC_BEGIN(0, 0);
8522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8524 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8525 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
8526 * intel optimizations. Investigate. */
8527 IEM_MC_UPDATE_FPU_OPCODE_IP();
8528 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8529 IEM_MC_END();
8530}
8531
8532
8533/** Opcode 0xd9 11/0 stN */
8534FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
8535{
8536 IEMOP_MNEMONIC(fld_stN, "fld stN");
8537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8538
8539 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8540 * indicates that it does. */
8541 IEM_MC_BEGIN(0, 2);
8542 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8543 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8544 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8545 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8546
8547 IEM_MC_PREPARE_FPU_USAGE();
8548 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
8549 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8550 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8551 } IEM_MC_ELSE() {
8552 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
8553 } IEM_MC_ENDIF();
8554
8555 IEM_MC_ADVANCE_RIP_AND_FINISH();
8556 IEM_MC_END();
8557}
8558
8559
8560/** Opcode 0xd9 11/3 stN */
8561FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
8562{
8563 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
8564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8565
8566 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8567 * indicates that it does. */
8568 IEM_MC_BEGIN(2, 3);
8569 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
8570 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
8571 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8572 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
8573 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
8574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8576
8577 IEM_MC_PREPARE_FPU_USAGE();
8578 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8579 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
8580 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
8581 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8582 } IEM_MC_ELSE() {
8583 IEM_MC_CALL_CIMPL_2(iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
8584 } IEM_MC_ENDIF();
8585
8586 IEM_MC_ADVANCE_RIP_AND_FINISH();
8587 IEM_MC_END();
8588}
8589
8590
8591/** Opcode 0xd9 11/4, 0xdd 11/2. */
8592FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
8593{
8594 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
8595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8596
8597 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
8598 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
8599 if (!iDstReg)
8600 {
8601 IEM_MC_BEGIN(0, 1);
8602 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
8603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8605
8606 IEM_MC_PREPARE_FPU_USAGE();
8607 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
8608 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8609 } IEM_MC_ELSE() {
8610 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
8611 } IEM_MC_ENDIF();
8612
8613 IEM_MC_ADVANCE_RIP_AND_FINISH();
8614 IEM_MC_END();
8615 }
8616 else
8617 {
8618 IEM_MC_BEGIN(0, 2);
8619 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8620 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8621 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8623
8624 IEM_MC_PREPARE_FPU_USAGE();
8625 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8626 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8627 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
8628 } IEM_MC_ELSE() {
8629 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
8630 } IEM_MC_ENDIF();
8631
8632 IEM_MC_ADVANCE_RIP_AND_FINISH();
8633 IEM_MC_END();
8634 }
8635}
8636
8637
8638/**
8639 * Common worker for FPU instructions working on ST0 and replaces it with the
8640 * result, i.e. unary operators.
8641 *
8642 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8643 */
8644FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
8645{
8646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8647
8648 IEM_MC_BEGIN(2, 1);
8649 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8650 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8651 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8652
8653 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8654 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8655 IEM_MC_PREPARE_FPU_USAGE();
8656 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8657 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
8658 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8659 } IEM_MC_ELSE() {
8660 IEM_MC_FPU_STACK_UNDERFLOW(0);
8661 } IEM_MC_ENDIF();
8662 IEM_MC_ADVANCE_RIP_AND_FINISH();
8663
8664 IEM_MC_END();
8665}
8666
8667
8668/** Opcode 0xd9 0xe0. */
8669FNIEMOP_DEF(iemOp_fchs)
8670{
8671 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
8672 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
8673}
8674
8675
8676/** Opcode 0xd9 0xe1. */
8677FNIEMOP_DEF(iemOp_fabs)
8678{
8679 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
8680 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
8681}
8682
8683
8684/** Opcode 0xd9 0xe4. */
8685FNIEMOP_DEF(iemOp_ftst)
8686{
8687 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
8688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8689
8690 IEM_MC_BEGIN(2, 1);
8691 IEM_MC_LOCAL(uint16_t, u16Fsw);
8692 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8693 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8694
8695 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8696 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8697 IEM_MC_PREPARE_FPU_USAGE();
8698 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8699 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
8700 IEM_MC_UPDATE_FSW(u16Fsw);
8701 } IEM_MC_ELSE() {
8702 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8703 } IEM_MC_ENDIF();
8704 IEM_MC_ADVANCE_RIP_AND_FINISH();
8705
8706 IEM_MC_END();
8707}
8708
8709
8710/** Opcode 0xd9 0xe5. */
8711FNIEMOP_DEF(iemOp_fxam)
8712{
8713 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
8714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8715
8716 IEM_MC_BEGIN(2, 1);
8717 IEM_MC_LOCAL(uint16_t, u16Fsw);
8718 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8719 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8720
8721 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8722 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8723 IEM_MC_PREPARE_FPU_USAGE();
8724 IEM_MC_REF_FPUREG(pr80Value, 0);
8725 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
8726 IEM_MC_UPDATE_FSW(u16Fsw);
8727 IEM_MC_ADVANCE_RIP_AND_FINISH();
8728
8729 IEM_MC_END();
8730}
8731
8732
8733/**
8734 * Common worker for FPU instructions pushing a constant onto the FPU stack.
8735 *
8736 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8737 */
8738FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
8739{
8740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8741
8742 IEM_MC_BEGIN(1, 1);
8743 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8744 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8745
8746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8748 IEM_MC_PREPARE_FPU_USAGE();
8749 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8750 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
8751 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8752 } IEM_MC_ELSE() {
8753 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
8754 } IEM_MC_ENDIF();
8755 IEM_MC_ADVANCE_RIP_AND_FINISH();
8756
8757 IEM_MC_END();
8758}
8759
8760
8761/** Opcode 0xd9 0xe8. */
8762FNIEMOP_DEF(iemOp_fld1)
8763{
8764 IEMOP_MNEMONIC(fld1, "fld1");
8765 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
8766}
8767
8768
8769/** Opcode 0xd9 0xe9. */
8770FNIEMOP_DEF(iemOp_fldl2t)
8771{
8772 IEMOP_MNEMONIC(fldl2t, "fldl2t");
8773 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
8774}
8775
8776
8777/** Opcode 0xd9 0xea. */
8778FNIEMOP_DEF(iemOp_fldl2e)
8779{
8780 IEMOP_MNEMONIC(fldl2e, "fldl2e");
8781 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
8782}
8783
8784/** Opcode 0xd9 0xeb. */
8785FNIEMOP_DEF(iemOp_fldpi)
8786{
8787 IEMOP_MNEMONIC(fldpi, "fldpi");
8788 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
8789}
8790
8791
8792/** Opcode 0xd9 0xec. */
8793FNIEMOP_DEF(iemOp_fldlg2)
8794{
8795 IEMOP_MNEMONIC(fldlg2, "fldlg2");
8796 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
8797}
8798
8799/** Opcode 0xd9 0xed. */
8800FNIEMOP_DEF(iemOp_fldln2)
8801{
8802 IEMOP_MNEMONIC(fldln2, "fldln2");
8803 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
8804}
8805
8806
8807/** Opcode 0xd9 0xee. */
8808FNIEMOP_DEF(iemOp_fldz)
8809{
8810 IEMOP_MNEMONIC(fldz, "fldz");
8811 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
8812}
8813
8814
8815/** Opcode 0xd9 0xf0.
8816 *
8817 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
8818 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
8819 * to produce proper results for +Inf and -Inf.
8820 *
8821 * This is probably usful in the implementation pow() and similar.
8822 */
8823FNIEMOP_DEF(iemOp_f2xm1)
8824{
8825 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
8826 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
8827}
8828
8829
8830/**
8831 * Common worker for FPU instructions working on STn and ST0, storing the result
8832 * in STn, and popping the stack unless IE, DE or ZE was raised.
8833 *
8834 * @param bRm Mod R/M byte.
8835 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8836 */
8837FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8838{
8839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8840
8841 IEM_MC_BEGIN(3, 1);
8842 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8843 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8844 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8845 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8846
8847 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8848 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8849
8850 IEM_MC_PREPARE_FPU_USAGE();
8851 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
8852 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8853 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
8854 } IEM_MC_ELSE() {
8855 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
8856 } IEM_MC_ENDIF();
8857 IEM_MC_ADVANCE_RIP_AND_FINISH();
8858
8859 IEM_MC_END();
8860}
8861
8862
8863/** Opcode 0xd9 0xf1. */
8864FNIEMOP_DEF(iemOp_fyl2x)
8865{
8866 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
8867 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
8868}
8869
8870
8871/**
8872 * Common worker for FPU instructions working on ST0 and having two outputs, one
8873 * replacing ST0 and one pushed onto the stack.
8874 *
8875 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8876 */
8877FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8878{
8879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8880
8881 IEM_MC_BEGIN(2, 1);
8882 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8883 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8884 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8885
8886 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8887 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8888 IEM_MC_PREPARE_FPU_USAGE();
8889 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8890 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8891 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
8892 } IEM_MC_ELSE() {
8893 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
8894 } IEM_MC_ENDIF();
8895 IEM_MC_ADVANCE_RIP_AND_FINISH();
8896
8897 IEM_MC_END();
8898}
8899
8900
8901/** Opcode 0xd9 0xf2. */
8902FNIEMOP_DEF(iemOp_fptan)
8903{
8904 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8905 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8906}
8907
8908
8909/** Opcode 0xd9 0xf3. */
8910FNIEMOP_DEF(iemOp_fpatan)
8911{
8912 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8913 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8914}
8915
8916
8917/** Opcode 0xd9 0xf4. */
8918FNIEMOP_DEF(iemOp_fxtract)
8919{
8920 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8921 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8922}
8923
8924
8925/** Opcode 0xd9 0xf5. */
8926FNIEMOP_DEF(iemOp_fprem1)
8927{
8928 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8929 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8930}
8931
8932
8933/** Opcode 0xd9 0xf6. */
8934FNIEMOP_DEF(iemOp_fdecstp)
8935{
8936 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8938 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8939 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8940 * FINCSTP and FDECSTP. */
8941
8942 IEM_MC_BEGIN(0,0);
8943
8944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8946
8947 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8948 IEM_MC_FPU_STACK_DEC_TOP();
8949 IEM_MC_UPDATE_FSW_CONST(0);
8950
8951 IEM_MC_ADVANCE_RIP_AND_FINISH();
8952 IEM_MC_END();
8953}
8954
8955
8956/** Opcode 0xd9 0xf7. */
8957FNIEMOP_DEF(iemOp_fincstp)
8958{
8959 IEMOP_MNEMONIC(fincstp, "fincstp");
8960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8961 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8962 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8963 * FINCSTP and FDECSTP. */
8964
8965 IEM_MC_BEGIN(0,0);
8966
8967 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8968 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8969
8970 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8971 IEM_MC_FPU_STACK_INC_TOP();
8972 IEM_MC_UPDATE_FSW_CONST(0);
8973
8974 IEM_MC_ADVANCE_RIP_AND_FINISH();
8975 IEM_MC_END();
8976}
8977
8978
8979/** Opcode 0xd9 0xf8. */
8980FNIEMOP_DEF(iemOp_fprem)
8981{
8982 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8983 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8984}
8985
8986
8987/** Opcode 0xd9 0xf9. */
8988FNIEMOP_DEF(iemOp_fyl2xp1)
8989{
8990 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8991 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8992}
8993
8994
8995/** Opcode 0xd9 0xfa. */
8996FNIEMOP_DEF(iemOp_fsqrt)
8997{
8998 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8999 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9000}
9001
9002
9003/** Opcode 0xd9 0xfb. */
9004FNIEMOP_DEF(iemOp_fsincos)
9005{
9006 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9007 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9008}
9009
9010
9011/** Opcode 0xd9 0xfc. */
9012FNIEMOP_DEF(iemOp_frndint)
9013{
9014 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9015 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9016}
9017
9018
9019/** Opcode 0xd9 0xfd. */
9020FNIEMOP_DEF(iemOp_fscale)
9021{
9022 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9023 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9024}
9025
9026
9027/** Opcode 0xd9 0xfe. */
9028FNIEMOP_DEF(iemOp_fsin)
9029{
9030 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9031 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9032}
9033
9034
9035/** Opcode 0xd9 0xff. */
9036FNIEMOP_DEF(iemOp_fcos)
9037{
9038 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9039 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9040}
9041
9042
9043/** Used by iemOp_EscF1. */
9044IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9045{
9046 /* 0xe0 */ iemOp_fchs,
9047 /* 0xe1 */ iemOp_fabs,
9048 /* 0xe2 */ iemOp_Invalid,
9049 /* 0xe3 */ iemOp_Invalid,
9050 /* 0xe4 */ iemOp_ftst,
9051 /* 0xe5 */ iemOp_fxam,
9052 /* 0xe6 */ iemOp_Invalid,
9053 /* 0xe7 */ iemOp_Invalid,
9054 /* 0xe8 */ iemOp_fld1,
9055 /* 0xe9 */ iemOp_fldl2t,
9056 /* 0xea */ iemOp_fldl2e,
9057 /* 0xeb */ iemOp_fldpi,
9058 /* 0xec */ iemOp_fldlg2,
9059 /* 0xed */ iemOp_fldln2,
9060 /* 0xee */ iemOp_fldz,
9061 /* 0xef */ iemOp_Invalid,
9062 /* 0xf0 */ iemOp_f2xm1,
9063 /* 0xf1 */ iemOp_fyl2x,
9064 /* 0xf2 */ iemOp_fptan,
9065 /* 0xf3 */ iemOp_fpatan,
9066 /* 0xf4 */ iemOp_fxtract,
9067 /* 0xf5 */ iemOp_fprem1,
9068 /* 0xf6 */ iemOp_fdecstp,
9069 /* 0xf7 */ iemOp_fincstp,
9070 /* 0xf8 */ iemOp_fprem,
9071 /* 0xf9 */ iemOp_fyl2xp1,
9072 /* 0xfa */ iemOp_fsqrt,
9073 /* 0xfb */ iemOp_fsincos,
9074 /* 0xfc */ iemOp_frndint,
9075 /* 0xfd */ iemOp_fscale,
9076 /* 0xfe */ iemOp_fsin,
9077 /* 0xff */ iemOp_fcos
9078};
9079
9080
9081/**
9082 * @opcode 0xd9
9083 */
9084FNIEMOP_DEF(iemOp_EscF1)
9085{
9086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9087 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9088
9089 if (IEM_IS_MODRM_REG_MODE(bRm))
9090 {
9091 switch (IEM_GET_MODRM_REG_8(bRm))
9092 {
9093 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9094 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9095 case 2:
9096 if (bRm == 0xd0)
9097 return FNIEMOP_CALL(iemOp_fnop);
9098 return IEMOP_RAISE_INVALID_OPCODE();
9099 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9100 case 4:
9101 case 5:
9102 case 6:
9103 case 7:
9104 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9105 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9107 }
9108 }
9109 else
9110 {
9111 switch (IEM_GET_MODRM_REG_8(bRm))
9112 {
9113 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9114 case 1: return IEMOP_RAISE_INVALID_OPCODE();
9115 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9116 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9117 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9118 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9119 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9120 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9122 }
9123 }
9124}
9125
9126
9127/** Opcode 0xda 11/0. */
9128FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9129{
9130 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9132
9133 IEM_MC_BEGIN(0, 1);
9134 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9135
9136 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9137 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9138
9139 IEM_MC_PREPARE_FPU_USAGE();
9140 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9141 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9142 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9143 } IEM_MC_ENDIF();
9144 IEM_MC_UPDATE_FPU_OPCODE_IP();
9145 } IEM_MC_ELSE() {
9146 IEM_MC_FPU_STACK_UNDERFLOW(0);
9147 } IEM_MC_ENDIF();
9148 IEM_MC_ADVANCE_RIP_AND_FINISH();
9149
9150 IEM_MC_END();
9151}
9152
9153
9154/** Opcode 0xda 11/1. */
9155FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9156{
9157 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9159
9160 IEM_MC_BEGIN(0, 1);
9161 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9162
9163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9165
9166 IEM_MC_PREPARE_FPU_USAGE();
9167 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9169 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9170 } IEM_MC_ENDIF();
9171 IEM_MC_UPDATE_FPU_OPCODE_IP();
9172 } IEM_MC_ELSE() {
9173 IEM_MC_FPU_STACK_UNDERFLOW(0);
9174 } IEM_MC_ENDIF();
9175 IEM_MC_ADVANCE_RIP_AND_FINISH();
9176
9177 IEM_MC_END();
9178}
9179
9180
9181/** Opcode 0xda 11/2. */
9182FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9183{
9184 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9186
9187 IEM_MC_BEGIN(0, 1);
9188 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9189
9190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9191 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9192
9193 IEM_MC_PREPARE_FPU_USAGE();
9194 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9195 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9196 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9197 } IEM_MC_ENDIF();
9198 IEM_MC_UPDATE_FPU_OPCODE_IP();
9199 } IEM_MC_ELSE() {
9200 IEM_MC_FPU_STACK_UNDERFLOW(0);
9201 } IEM_MC_ENDIF();
9202 IEM_MC_ADVANCE_RIP_AND_FINISH();
9203
9204 IEM_MC_END();
9205}
9206
9207
9208/** Opcode 0xda 11/3. */
9209FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9210{
9211 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9213
9214 IEM_MC_BEGIN(0, 1);
9215 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9216
9217 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9218 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9219
9220 IEM_MC_PREPARE_FPU_USAGE();
9221 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9223 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9224 } IEM_MC_ENDIF();
9225 IEM_MC_UPDATE_FPU_OPCODE_IP();
9226 } IEM_MC_ELSE() {
9227 IEM_MC_FPU_STACK_UNDERFLOW(0);
9228 } IEM_MC_ENDIF();
9229 IEM_MC_ADVANCE_RIP_AND_FINISH();
9230
9231 IEM_MC_END();
9232}
9233
9234
9235/**
9236 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9237 * flags, and popping twice when done.
9238 *
9239 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9240 */
9241FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9242{
9243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9244
9245 IEM_MC_BEGIN(3, 1);
9246 IEM_MC_LOCAL(uint16_t, u16Fsw);
9247 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9248 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9249 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9250
9251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9253
9254 IEM_MC_PREPARE_FPU_USAGE();
9255 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9256 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9257 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
9258 } IEM_MC_ELSE() {
9259 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
9260 } IEM_MC_ENDIF();
9261 IEM_MC_ADVANCE_RIP_AND_FINISH();
9262
9263 IEM_MC_END();
9264}
9265
9266
9267/** Opcode 0xda 0xe9. */
9268FNIEMOP_DEF(iemOp_fucompp)
9269{
9270 IEMOP_MNEMONIC(fucompp, "fucompp");
9271 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9272}
9273
9274
9275/**
9276 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9277 * the result in ST0.
9278 *
9279 * @param bRm Mod R/M byte.
9280 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9281 */
9282FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9283{
9284 IEM_MC_BEGIN(3, 3);
9285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9286 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9287 IEM_MC_LOCAL(int32_t, i32Val2);
9288 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9289 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9290 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9291
9292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9294
9295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9297 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9298
9299 IEM_MC_PREPARE_FPU_USAGE();
9300 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9301 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9302 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9303 } IEM_MC_ELSE() {
9304 IEM_MC_FPU_STACK_UNDERFLOW(0);
9305 } IEM_MC_ENDIF();
9306 IEM_MC_ADVANCE_RIP_AND_FINISH();
9307
9308 IEM_MC_END();
9309}
9310
9311
9312/** Opcode 0xda !11/0. */
9313FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9314{
9315 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9316 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9317}
9318
9319
9320/** Opcode 0xda !11/1. */
9321FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
9322{
9323 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
9324 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
9325}
9326
9327
9328/** Opcode 0xda !11/2. */
9329FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
9330{
9331 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
9332
9333 IEM_MC_BEGIN(3, 3);
9334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9335 IEM_MC_LOCAL(uint16_t, u16Fsw);
9336 IEM_MC_LOCAL(int32_t, i32Val2);
9337 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9338 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9339 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9340
9341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9343
9344 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9345 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9346 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9347
9348 IEM_MC_PREPARE_FPU_USAGE();
9349 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9350 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9351 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9352 } IEM_MC_ELSE() {
9353 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9354 } IEM_MC_ENDIF();
9355 IEM_MC_ADVANCE_RIP_AND_FINISH();
9356
9357 IEM_MC_END();
9358}
9359
9360
9361/** Opcode 0xda !11/3. */
9362FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
9363{
9364 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
9365
9366 IEM_MC_BEGIN(3, 3);
9367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9368 IEM_MC_LOCAL(uint16_t, u16Fsw);
9369 IEM_MC_LOCAL(int32_t, i32Val2);
9370 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9371 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9372 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9373
9374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9376
9377 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9378 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9379 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9380
9381 IEM_MC_PREPARE_FPU_USAGE();
9382 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9383 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9384 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9385 } IEM_MC_ELSE() {
9386 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9387 } IEM_MC_ENDIF();
9388 IEM_MC_ADVANCE_RIP_AND_FINISH();
9389
9390 IEM_MC_END();
9391}
9392
9393
9394/** Opcode 0xda !11/4. */
9395FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
9396{
9397 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
9398 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
9399}
9400
9401
9402/** Opcode 0xda !11/5. */
9403FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
9404{
9405 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
9406 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
9407}
9408
9409
9410/** Opcode 0xda !11/6. */
9411FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
9412{
9413 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
9414 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
9415}
9416
9417
9418/** Opcode 0xda !11/7. */
9419FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
9420{
9421 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
9422 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
9423}
9424
9425
9426/**
9427 * @opcode 0xda
9428 */
9429FNIEMOP_DEF(iemOp_EscF2)
9430{
9431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9432 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
9433 if (IEM_IS_MODRM_REG_MODE(bRm))
9434 {
9435 switch (IEM_GET_MODRM_REG_8(bRm))
9436 {
9437 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
9438 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
9439 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
9440 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
9441 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9442 case 5:
9443 if (bRm == 0xe9)
9444 return FNIEMOP_CALL(iemOp_fucompp);
9445 return IEMOP_RAISE_INVALID_OPCODE();
9446 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9447 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9449 }
9450 }
9451 else
9452 {
9453 switch (IEM_GET_MODRM_REG_8(bRm))
9454 {
9455 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
9456 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
9457 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
9458 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
9459 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
9460 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
9461 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
9462 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
9463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9464 }
9465 }
9466}
9467
9468
9469/** Opcode 0xdb !11/0. */
9470FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
9471{
9472 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
9473
9474 IEM_MC_BEGIN(2, 3);
9475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9476 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9477 IEM_MC_LOCAL(int32_t, i32Val);
9478 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9479 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
9480
9481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9483
9484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9486 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9487
9488 IEM_MC_PREPARE_FPU_USAGE();
9489 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9490 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
9491 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9492 } IEM_MC_ELSE() {
9493 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9494 } IEM_MC_ENDIF();
9495 IEM_MC_ADVANCE_RIP_AND_FINISH();
9496
9497 IEM_MC_END();
9498}
9499
9500
9501/** Opcode 0xdb !11/1. */
9502FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
9503{
9504 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
9505 IEM_MC_BEGIN(3, 2);
9506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9507 IEM_MC_LOCAL(uint16_t, u16Fsw);
9508 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9509 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9510 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9511
9512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9516
9517 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9518 IEM_MC_PREPARE_FPU_USAGE();
9519 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9520 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9521 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9522 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9523 } IEM_MC_ELSE() {
9524 IEM_MC_IF_FCW_IM() {
9525 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9526 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9527 } IEM_MC_ENDIF();
9528 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9529 } IEM_MC_ENDIF();
9530 IEM_MC_ADVANCE_RIP_AND_FINISH();
9531
9532 IEM_MC_END();
9533}
9534
9535
9536/** Opcode 0xdb !11/2. */
9537FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
9538{
9539 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
9540 IEM_MC_BEGIN(3, 2);
9541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9542 IEM_MC_LOCAL(uint16_t, u16Fsw);
9543 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9544 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9545 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9546
9547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9549 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9550 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9551
9552 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9553 IEM_MC_PREPARE_FPU_USAGE();
9554 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9555 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9556 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9557 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9558 } IEM_MC_ELSE() {
9559 IEM_MC_IF_FCW_IM() {
9560 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9561 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9562 } IEM_MC_ENDIF();
9563 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9564 } IEM_MC_ENDIF();
9565 IEM_MC_ADVANCE_RIP_AND_FINISH();
9566
9567 IEM_MC_END();
9568}
9569
9570
9571/** Opcode 0xdb !11/3. */
9572FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
9573{
9574 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
9575 IEM_MC_BEGIN(3, 2);
9576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9577 IEM_MC_LOCAL(uint16_t, u16Fsw);
9578 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9579 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9580 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9581
9582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9584 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9585 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9586
9587 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9588 IEM_MC_PREPARE_FPU_USAGE();
9589 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9590 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9591 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9592 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9593 } IEM_MC_ELSE() {
9594 IEM_MC_IF_FCW_IM() {
9595 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9596 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9597 } IEM_MC_ENDIF();
9598 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9599 } IEM_MC_ENDIF();
9600 IEM_MC_ADVANCE_RIP_AND_FINISH();
9601
9602 IEM_MC_END();
9603}
9604
9605
9606/** Opcode 0xdb !11/5. */
9607FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
9608{
9609 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
9610
9611 IEM_MC_BEGIN(2, 3);
9612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9613 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9614 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
9615 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9616 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
9617
9618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9620
9621 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9623 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9624
9625 IEM_MC_PREPARE_FPU_USAGE();
9626 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9627 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
9628 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9629 } IEM_MC_ELSE() {
9630 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9631 } IEM_MC_ENDIF();
9632 IEM_MC_ADVANCE_RIP_AND_FINISH();
9633
9634 IEM_MC_END();
9635}
9636
9637
9638/** Opcode 0xdb !11/7. */
9639FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
9640{
9641 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
9642 IEM_MC_BEGIN(3, 2);
9643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9644 IEM_MC_LOCAL(uint16_t, u16Fsw);
9645 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9646 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
9647 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9648
9649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9651 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9652 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9653
9654 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
9655 IEM_MC_PREPARE_FPU_USAGE();
9656 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9657 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
9658 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
9659 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9660 } IEM_MC_ELSE() {
9661 IEM_MC_IF_FCW_IM() {
9662 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
9663 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
9664 } IEM_MC_ENDIF();
9665 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9666 } IEM_MC_ENDIF();
9667 IEM_MC_ADVANCE_RIP_AND_FINISH();
9668
9669 IEM_MC_END();
9670}
9671
9672
9673/** Opcode 0xdb 11/0. */
9674FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
9675{
9676 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
9677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9678
9679 IEM_MC_BEGIN(0, 1);
9680 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9681
9682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9684
9685 IEM_MC_PREPARE_FPU_USAGE();
9686 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9687 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
9688 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9689 } IEM_MC_ENDIF();
9690 IEM_MC_UPDATE_FPU_OPCODE_IP();
9691 } IEM_MC_ELSE() {
9692 IEM_MC_FPU_STACK_UNDERFLOW(0);
9693 } IEM_MC_ENDIF();
9694 IEM_MC_ADVANCE_RIP_AND_FINISH();
9695
9696 IEM_MC_END();
9697}
9698
9699
9700/** Opcode 0xdb 11/1. */
9701FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
9702{
9703 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
9704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9705
9706 IEM_MC_BEGIN(0, 1);
9707 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9708
9709 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9710 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9711
9712 IEM_MC_PREPARE_FPU_USAGE();
9713 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9714 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9715 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9716 } IEM_MC_ENDIF();
9717 IEM_MC_UPDATE_FPU_OPCODE_IP();
9718 } IEM_MC_ELSE() {
9719 IEM_MC_FPU_STACK_UNDERFLOW(0);
9720 } IEM_MC_ENDIF();
9721 IEM_MC_ADVANCE_RIP_AND_FINISH();
9722
9723 IEM_MC_END();
9724}
9725
9726
9727/** Opcode 0xdb 11/2. */
9728FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
9729{
9730 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
9731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9732
9733 IEM_MC_BEGIN(0, 1);
9734 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9735
9736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9738
9739 IEM_MC_PREPARE_FPU_USAGE();
9740 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9741 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9742 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9743 } IEM_MC_ENDIF();
9744 IEM_MC_UPDATE_FPU_OPCODE_IP();
9745 } IEM_MC_ELSE() {
9746 IEM_MC_FPU_STACK_UNDERFLOW(0);
9747 } IEM_MC_ENDIF();
9748 IEM_MC_ADVANCE_RIP_AND_FINISH();
9749
9750 IEM_MC_END();
9751}
9752
9753
9754/** Opcode 0xdb 11/3. */
9755FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
9756{
9757 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
9758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9759
9760 IEM_MC_BEGIN(0, 1);
9761 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9762
9763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9765
9766 IEM_MC_PREPARE_FPU_USAGE();
9767 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9768 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
9769 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9770 } IEM_MC_ENDIF();
9771 IEM_MC_UPDATE_FPU_OPCODE_IP();
9772 } IEM_MC_ELSE() {
9773 IEM_MC_FPU_STACK_UNDERFLOW(0);
9774 } IEM_MC_ENDIF();
9775 IEM_MC_ADVANCE_RIP_AND_FINISH();
9776
9777 IEM_MC_END();
9778}
9779
9780
9781/** Opcode 0xdb 0xe0. */
9782FNIEMOP_DEF(iemOp_fneni)
9783{
9784 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
9785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9786 IEM_MC_BEGIN(0,0);
9787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9788 IEM_MC_ADVANCE_RIP_AND_FINISH();
9789 IEM_MC_END();
9790}
9791
9792
9793/** Opcode 0xdb 0xe1. */
9794FNIEMOP_DEF(iemOp_fndisi)
9795{
9796 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
9797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9798 IEM_MC_BEGIN(0,0);
9799 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9800 IEM_MC_ADVANCE_RIP_AND_FINISH();
9801 IEM_MC_END();
9802}
9803
9804
9805/** Opcode 0xdb 0xe2. */
9806FNIEMOP_DEF(iemOp_fnclex)
9807{
9808 IEMOP_MNEMONIC(fnclex, "fnclex");
9809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9810
9811 IEM_MC_BEGIN(0,0);
9812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9813 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9814 IEM_MC_CLEAR_FSW_EX();
9815 IEM_MC_ADVANCE_RIP_AND_FINISH();
9816 IEM_MC_END();
9817}
9818
9819
9820/** Opcode 0xdb 0xe3. */
9821FNIEMOP_DEF(iemOp_fninit)
9822{
9823 IEMOP_MNEMONIC(fninit, "fninit");
9824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9825 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
9826}
9827
9828
9829/** Opcode 0xdb 0xe4. */
9830FNIEMOP_DEF(iemOp_fnsetpm)
9831{
9832 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
9833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9834 IEM_MC_BEGIN(0,0);
9835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9836 IEM_MC_ADVANCE_RIP_AND_FINISH();
9837 IEM_MC_END();
9838}
9839
9840
9841/** Opcode 0xdb 0xe5. */
9842FNIEMOP_DEF(iemOp_frstpm)
9843{
9844 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9845#if 0 /* #UDs on newer CPUs */
9846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9847 IEM_MC_BEGIN(0,0);
9848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9849 IEM_MC_ADVANCE_RIP_AND_FINISH();
9850 IEM_MC_END();
9851 return VINF_SUCCESS;
9852#else
9853 return IEMOP_RAISE_INVALID_OPCODE();
9854#endif
9855}
9856
9857
9858/** Opcode 0xdb 11/5. */
9859FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9860{
9861 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9862 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
9863 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9864}
9865
9866
9867/** Opcode 0xdb 11/6. */
9868FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9869{
9870 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9871 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
9872 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9873}
9874
9875
9876/**
9877 * @opcode 0xdb
9878 */
9879FNIEMOP_DEF(iemOp_EscF3)
9880{
9881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9882 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9883 if (IEM_IS_MODRM_REG_MODE(bRm))
9884 {
9885 switch (IEM_GET_MODRM_REG_8(bRm))
9886 {
9887 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9888 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9889 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9890 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9891 case 4:
9892 switch (bRm)
9893 {
9894 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9895 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9896 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9897 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9898 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9899 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9900 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
9901 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
9902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9903 }
9904 break;
9905 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9906 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9907 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9909 }
9910 }
9911 else
9912 {
9913 switch (IEM_GET_MODRM_REG_8(bRm))
9914 {
9915 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9916 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9917 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9918 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9919 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9920 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9921 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9922 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9924 }
9925 }
9926}
9927
9928
9929/**
9930 * Common worker for FPU instructions working on STn and ST0, and storing the
9931 * result in STn unless IE, DE or ZE was raised.
9932 *
9933 * @param bRm Mod R/M byte.
9934 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9935 */
9936FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9937{
9938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9939
9940 IEM_MC_BEGIN(3, 1);
9941 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9942 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9943 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9944 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9945
9946 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9947 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9948
9949 IEM_MC_PREPARE_FPU_USAGE();
9950 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9951 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9952 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9953 } IEM_MC_ELSE() {
9954 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9955 } IEM_MC_ENDIF();
9956 IEM_MC_ADVANCE_RIP_AND_FINISH();
9957
9958 IEM_MC_END();
9959}
9960
9961
9962/** Opcode 0xdc 11/0. */
9963FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9964{
9965 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9966 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9967}
9968
9969
9970/** Opcode 0xdc 11/1. */
9971FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9972{
9973 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9974 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9975}
9976
9977
9978/** Opcode 0xdc 11/4. */
9979FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9980{
9981 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9982 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9983}
9984
9985
9986/** Opcode 0xdc 11/5. */
9987FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9988{
9989 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9990 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9991}
9992
9993
9994/** Opcode 0xdc 11/6. */
9995FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9996{
9997 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9998 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9999}
10000
10001
10002/** Opcode 0xdc 11/7. */
10003FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10004{
10005 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10006 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10007}
10008
10009
10010/**
10011 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10012 * memory operand, and storing the result in ST0.
10013 *
10014 * @param bRm Mod R/M byte.
10015 * @param pfnImpl Pointer to the instruction implementation (assembly).
10016 */
10017FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10018{
10019 IEM_MC_BEGIN(3, 3);
10020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10021 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10022 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10023 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10024 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10025 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10026
10027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10031
10032 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10033 IEM_MC_PREPARE_FPU_USAGE();
10034 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10035 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10036 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10037 } IEM_MC_ELSE() {
10038 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10039 } IEM_MC_ENDIF();
10040 IEM_MC_ADVANCE_RIP_AND_FINISH();
10041
10042 IEM_MC_END();
10043}
10044
10045
10046/** Opcode 0xdc !11/0. */
10047FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10048{
10049 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10050 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10051}
10052
10053
10054/** Opcode 0xdc !11/1. */
10055FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10056{
10057 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10058 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10059}
10060
10061
10062/** Opcode 0xdc !11/2. */
10063FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10064{
10065 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10066
10067 IEM_MC_BEGIN(3, 3);
10068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10069 IEM_MC_LOCAL(uint16_t, u16Fsw);
10070 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10071 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10072 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10073 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10074
10075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10077
10078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10079 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10080 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10081
10082 IEM_MC_PREPARE_FPU_USAGE();
10083 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10084 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10085 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10086 } IEM_MC_ELSE() {
10087 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10088 } IEM_MC_ENDIF();
10089 IEM_MC_ADVANCE_RIP_AND_FINISH();
10090
10091 IEM_MC_END();
10092}
10093
10094
10095/** Opcode 0xdc !11/3. */
10096FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10097{
10098 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10099
10100 IEM_MC_BEGIN(3, 3);
10101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10102 IEM_MC_LOCAL(uint16_t, u16Fsw);
10103 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10104 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10105 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10106 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10107
10108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10110
10111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10113 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10114
10115 IEM_MC_PREPARE_FPU_USAGE();
10116 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10117 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10118 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10119 } IEM_MC_ELSE() {
10120 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10121 } IEM_MC_ENDIF();
10122 IEM_MC_ADVANCE_RIP_AND_FINISH();
10123
10124 IEM_MC_END();
10125}
10126
10127
10128/** Opcode 0xdc !11/4. */
10129FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10130{
10131 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10132 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10133}
10134
10135
10136/** Opcode 0xdc !11/5. */
10137FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10138{
10139 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10140 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10141}
10142
10143
10144/** Opcode 0xdc !11/6. */
10145FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10146{
10147 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10148 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10149}
10150
10151
10152/** Opcode 0xdc !11/7. */
10153FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10154{
10155 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10156 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10157}
10158
10159
10160/**
10161 * @opcode 0xdc
10162 */
10163FNIEMOP_DEF(iemOp_EscF4)
10164{
10165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10166 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10167 if (IEM_IS_MODRM_REG_MODE(bRm))
10168 {
10169 switch (IEM_GET_MODRM_REG_8(bRm))
10170 {
10171 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10172 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10173 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10174 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10175 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10176 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10177 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10178 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10180 }
10181 }
10182 else
10183 {
10184 switch (IEM_GET_MODRM_REG_8(bRm))
10185 {
10186 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10187 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10188 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10189 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10190 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10191 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10192 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10193 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10195 }
10196 }
10197}
10198
10199
10200/** Opcode 0xdd !11/0.
10201 * @sa iemOp_fld_m32r */
10202FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10203{
10204 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10205
10206 IEM_MC_BEGIN(2, 3);
10207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10208 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10209 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10210 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10211 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10212
10213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10215 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10216 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10217
10218 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10219 IEM_MC_PREPARE_FPU_USAGE();
10220 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10221 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10222 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10223 } IEM_MC_ELSE() {
10224 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10225 } IEM_MC_ENDIF();
10226 IEM_MC_ADVANCE_RIP_AND_FINISH();
10227
10228 IEM_MC_END();
10229}
10230
10231
10232/** Opcode 0xdd !11/0. */
10233FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10234{
10235 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10236 IEM_MC_BEGIN(3, 2);
10237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10238 IEM_MC_LOCAL(uint16_t, u16Fsw);
10239 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10240 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10241 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10242
10243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10245 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10246 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10247
10248 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10249 IEM_MC_PREPARE_FPU_USAGE();
10250 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10251 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10252 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10253 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10254 } IEM_MC_ELSE() {
10255 IEM_MC_IF_FCW_IM() {
10256 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10257 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10258 } IEM_MC_ENDIF();
10259 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10260 } IEM_MC_ENDIF();
10261 IEM_MC_ADVANCE_RIP_AND_FINISH();
10262
10263 IEM_MC_END();
10264}
10265
10266
10267/** Opcode 0xdd !11/0. */
10268FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10269{
10270 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10271 IEM_MC_BEGIN(3, 2);
10272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10273 IEM_MC_LOCAL(uint16_t, u16Fsw);
10274 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10275 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10276 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10277
10278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10282
10283 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10284 IEM_MC_PREPARE_FPU_USAGE();
10285 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10286 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10287 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10288 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10289 } IEM_MC_ELSE() {
10290 IEM_MC_IF_FCW_IM() {
10291 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10292 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10293 } IEM_MC_ENDIF();
10294 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10295 } IEM_MC_ENDIF();
10296 IEM_MC_ADVANCE_RIP_AND_FINISH();
10297
10298 IEM_MC_END();
10299}
10300
10301
10302
10303
10304/** Opcode 0xdd !11/0. */
10305FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10306{
10307 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10308 IEM_MC_BEGIN(3, 2);
10309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10310 IEM_MC_LOCAL(uint16_t, u16Fsw);
10311 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10312 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10313 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10314
10315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10317 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10318 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10319
10320 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10321 IEM_MC_PREPARE_FPU_USAGE();
10322 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10323 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10324 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10325 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10326 } IEM_MC_ELSE() {
10327 IEM_MC_IF_FCW_IM() {
10328 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10329 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10330 } IEM_MC_ENDIF();
10331 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10332 } IEM_MC_ENDIF();
10333 IEM_MC_ADVANCE_RIP_AND_FINISH();
10334
10335 IEM_MC_END();
10336}
10337
10338
10339/** Opcode 0xdd !11/0. */
10340FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
10341{
10342 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
10343 IEM_MC_BEGIN(3, 0);
10344 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10345 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10346 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10349 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10350 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10351 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10352 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10353 IEM_MC_END();
10354}
10355
10356
10357/** Opcode 0xdd !11/0. */
10358FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
10359{
10360 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
10361 IEM_MC_BEGIN(3, 0);
10362 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10363 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10364 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10367 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10368 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
10369 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10370 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
10371 IEM_MC_END();
10372}
10373
10374/** Opcode 0xdd !11/0. */
10375FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
10376{
10377 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
10378
10379 IEM_MC_BEGIN(0, 2);
10380 IEM_MC_LOCAL(uint16_t, u16Tmp);
10381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10382
10383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10385 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10386
10387 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10388 IEM_MC_FETCH_FSW(u16Tmp);
10389 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
10390 IEM_MC_ADVANCE_RIP_AND_FINISH();
10391
10392/** @todo Debug / drop a hint to the verifier that things may differ
10393 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
10394 * NT4SP1. (X86_FSW_PE) */
10395 IEM_MC_END();
10396}
10397
10398
10399/** Opcode 0xdd 11/0. */
10400FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
10401{
10402 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
10403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10404 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
10405 unmodified. */
10406
10407 IEM_MC_BEGIN(0, 0);
10408
10409 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10410 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10411
10412 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10413 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10414 IEM_MC_UPDATE_FPU_OPCODE_IP();
10415
10416 IEM_MC_ADVANCE_RIP_AND_FINISH();
10417 IEM_MC_END();
10418}
10419
10420
10421/** Opcode 0xdd 11/1. */
10422FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
10423{
10424 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
10425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10426
10427 IEM_MC_BEGIN(0, 2);
10428 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10429 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10430 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10431 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10432
10433 IEM_MC_PREPARE_FPU_USAGE();
10434 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10435 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10436 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
10437 } IEM_MC_ELSE() {
10438 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
10439 } IEM_MC_ENDIF();
10440
10441 IEM_MC_ADVANCE_RIP_AND_FINISH();
10442 IEM_MC_END();
10443}
10444
10445
10446/** Opcode 0xdd 11/3. */
10447FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
10448{
10449 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
10450 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
10451}
10452
10453
10454/** Opcode 0xdd 11/4. */
10455FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
10456{
10457 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
10458 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
10459}
10460
10461
10462/**
10463 * @opcode 0xdd
10464 */
10465FNIEMOP_DEF(iemOp_EscF5)
10466{
10467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10468 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
10469 if (IEM_IS_MODRM_REG_MODE(bRm))
10470 {
10471 switch (IEM_GET_MODRM_REG_8(bRm))
10472 {
10473 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
10474 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
10475 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
10476 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
10477 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
10478 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
10479 case 6: return IEMOP_RAISE_INVALID_OPCODE();
10480 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10482 }
10483 }
10484 else
10485 {
10486 switch (IEM_GET_MODRM_REG_8(bRm))
10487 {
10488 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
10489 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
10490 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
10491 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
10492 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
10493 case 5: return IEMOP_RAISE_INVALID_OPCODE();
10494 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
10495 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
10496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10497 }
10498 }
10499}
10500
10501
10502/** Opcode 0xde 11/0. */
10503FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
10504{
10505 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
10506 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
10507}
10508
10509
10510/** Opcode 0xde 11/0. */
10511FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
10512{
10513 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
10514 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
10515}
10516
10517
10518/** Opcode 0xde 0xd9. */
10519FNIEMOP_DEF(iemOp_fcompp)
10520{
10521 IEMOP_MNEMONIC(fcompp, "fcompp");
10522 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
10523}
10524
10525
10526/** Opcode 0xde 11/4. */
10527FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
10528{
10529 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
10530 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
10531}
10532
10533
10534/** Opcode 0xde 11/5. */
10535FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
10536{
10537 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
10538 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
10539}
10540
10541
10542/** Opcode 0xde 11/6. */
10543FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
10544{
10545 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
10546 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
10547}
10548
10549
10550/** Opcode 0xde 11/7. */
10551FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
10552{
10553 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
10554 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
10555}
10556
10557
10558/**
10559 * Common worker for FPU instructions working on ST0 and an m16i, and storing
10560 * the result in ST0.
10561 *
10562 * @param bRm Mod R/M byte.
10563 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10564 */
10565FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
10566{
10567 IEM_MC_BEGIN(3, 3);
10568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10569 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10570 IEM_MC_LOCAL(int16_t, i16Val2);
10571 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10572 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10573 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10574
10575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10577
10578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10579 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10580 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10581
10582 IEM_MC_PREPARE_FPU_USAGE();
10583 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10584 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
10585 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10586 } IEM_MC_ELSE() {
10587 IEM_MC_FPU_STACK_UNDERFLOW(0);
10588 } IEM_MC_ENDIF();
10589 IEM_MC_ADVANCE_RIP_AND_FINISH();
10590
10591 IEM_MC_END();
10592}
10593
10594
10595/** Opcode 0xde !11/0. */
10596FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
10597{
10598 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
10599 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
10600}
10601
10602
10603/** Opcode 0xde !11/1. */
10604FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
10605{
10606 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
10607 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
10608}
10609
10610
10611/** Opcode 0xde !11/2. */
10612FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
10613{
10614 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
10615
10616 IEM_MC_BEGIN(3, 3);
10617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10618 IEM_MC_LOCAL(uint16_t, u16Fsw);
10619 IEM_MC_LOCAL(int16_t, i16Val2);
10620 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10621 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10622 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10623
10624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10626
10627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10629 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10630
10631 IEM_MC_PREPARE_FPU_USAGE();
10632 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10633 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10634 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10635 } IEM_MC_ELSE() {
10636 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10637 } IEM_MC_ENDIF();
10638 IEM_MC_ADVANCE_RIP_AND_FINISH();
10639
10640 IEM_MC_END();
10641}
10642
10643
10644/** Opcode 0xde !11/3. */
10645FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
10646{
10647 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
10648
10649 IEM_MC_BEGIN(3, 3);
10650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10651 IEM_MC_LOCAL(uint16_t, u16Fsw);
10652 IEM_MC_LOCAL(int16_t, i16Val2);
10653 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10654 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10655 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10656
10657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659
10660 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10661 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10662 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10663
10664 IEM_MC_PREPARE_FPU_USAGE();
10665 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10666 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10667 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10668 } IEM_MC_ELSE() {
10669 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10670 } IEM_MC_ENDIF();
10671 IEM_MC_ADVANCE_RIP_AND_FINISH();
10672
10673 IEM_MC_END();
10674}
10675
10676
10677/** Opcode 0xde !11/4. */
10678FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
10679{
10680 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
10681 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
10682}
10683
10684
10685/** Opcode 0xde !11/5. */
10686FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
10687{
10688 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
10689 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
10690}
10691
10692
10693/** Opcode 0xde !11/6. */
10694FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
10695{
10696 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
10697 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
10698}
10699
10700
10701/** Opcode 0xde !11/7. */
10702FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
10703{
10704 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
10705 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
10706}
10707
10708
10709/**
10710 * @opcode 0xde
10711 */
10712FNIEMOP_DEF(iemOp_EscF6)
10713{
10714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10715 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
10716 if (IEM_IS_MODRM_REG_MODE(bRm))
10717 {
10718 switch (IEM_GET_MODRM_REG_8(bRm))
10719 {
10720 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
10721 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
10722 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10723 case 3: if (bRm == 0xd9)
10724 return FNIEMOP_CALL(iemOp_fcompp);
10725 return IEMOP_RAISE_INVALID_OPCODE();
10726 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
10727 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
10728 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
10729 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
10730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10731 }
10732 }
10733 else
10734 {
10735 switch (IEM_GET_MODRM_REG_8(bRm))
10736 {
10737 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
10738 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
10739 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
10740 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
10741 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
10742 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
10743 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
10744 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
10745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10746 }
10747 }
10748}
10749
10750
10751/** Opcode 0xdf 11/0.
10752 * Undocument instruction, assumed to work like ffree + fincstp. */
10753FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
10754{
10755 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
10756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10757
10758 IEM_MC_BEGIN(0, 0);
10759
10760 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10761 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10762
10763 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10764 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10765 IEM_MC_FPU_STACK_INC_TOP();
10766 IEM_MC_UPDATE_FPU_OPCODE_IP();
10767
10768 IEM_MC_ADVANCE_RIP_AND_FINISH();
10769 IEM_MC_END();
10770}
10771
10772
10773/** Opcode 0xdf 0xe0. */
10774FNIEMOP_DEF(iemOp_fnstsw_ax)
10775{
10776 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
10777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10778
10779 IEM_MC_BEGIN(0, 1);
10780 IEM_MC_LOCAL(uint16_t, u16Tmp);
10781 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10782 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10783 IEM_MC_FETCH_FSW(u16Tmp);
10784 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10785 IEM_MC_ADVANCE_RIP_AND_FINISH();
10786 IEM_MC_END();
10787}
10788
10789
10790/** Opcode 0xdf 11/5. */
10791FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
10792{
10793 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
10794 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10795 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10796}
10797
10798
10799/** Opcode 0xdf 11/6. */
10800FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
10801{
10802 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
10803 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10804 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10805}
10806
10807
10808/** Opcode 0xdf !11/0. */
10809FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
10810{
10811 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
10812
10813 IEM_MC_BEGIN(2, 3);
10814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10815 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10816 IEM_MC_LOCAL(int16_t, i16Val);
10817 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10818 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
10819
10820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10822
10823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10824 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10825 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10826
10827 IEM_MC_PREPARE_FPU_USAGE();
10828 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10829 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10830 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10831 } IEM_MC_ELSE() {
10832 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10833 } IEM_MC_ENDIF();
10834 IEM_MC_ADVANCE_RIP_AND_FINISH();
10835
10836 IEM_MC_END();
10837}
10838
10839
10840/** Opcode 0xdf !11/1. */
10841FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10842{
10843 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10844 IEM_MC_BEGIN(3, 2);
10845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10846 IEM_MC_LOCAL(uint16_t, u16Fsw);
10847 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10848 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10849 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10850
10851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10853 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10854 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10855
10856 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10857 IEM_MC_PREPARE_FPU_USAGE();
10858 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10859 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10860 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10861 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10862 } IEM_MC_ELSE() {
10863 IEM_MC_IF_FCW_IM() {
10864 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10865 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10866 } IEM_MC_ENDIF();
10867 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10868 } IEM_MC_ENDIF();
10869 IEM_MC_ADVANCE_RIP_AND_FINISH();
10870
10871 IEM_MC_END();
10872}
10873
10874
10875/** Opcode 0xdf !11/2. */
10876FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10877{
10878 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10879 IEM_MC_BEGIN(3, 2);
10880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10881 IEM_MC_LOCAL(uint16_t, u16Fsw);
10882 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10883 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10884 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10885
10886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10889 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10890
10891 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10892 IEM_MC_PREPARE_FPU_USAGE();
10893 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10894 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10895 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10896 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10897 } IEM_MC_ELSE() {
10898 IEM_MC_IF_FCW_IM() {
10899 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10900 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10901 } IEM_MC_ENDIF();
10902 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10903 } IEM_MC_ENDIF();
10904 IEM_MC_ADVANCE_RIP_AND_FINISH();
10905
10906 IEM_MC_END();
10907}
10908
10909
10910/** Opcode 0xdf !11/3. */
10911FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10912{
10913 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10914 IEM_MC_BEGIN(3, 2);
10915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10916 IEM_MC_LOCAL(uint16_t, u16Fsw);
10917 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10918 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10919 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10920
10921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10923 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10924 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10925
10926 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10927 IEM_MC_PREPARE_FPU_USAGE();
10928 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10929 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10930 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10931 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10932 } IEM_MC_ELSE() {
10933 IEM_MC_IF_FCW_IM() {
10934 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10935 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10936 } IEM_MC_ENDIF();
10937 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10938 } IEM_MC_ENDIF();
10939 IEM_MC_ADVANCE_RIP_AND_FINISH();
10940
10941 IEM_MC_END();
10942}
10943
10944
10945/** Opcode 0xdf !11/4. */
10946FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10947{
10948 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10949
10950 IEM_MC_BEGIN(2, 3);
10951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10952 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10953 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10954 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10955 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10956
10957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10959
10960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10961 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10962 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10963
10964 IEM_MC_PREPARE_FPU_USAGE();
10965 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10966 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10967 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10968 } IEM_MC_ELSE() {
10969 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10970 } IEM_MC_ENDIF();
10971 IEM_MC_ADVANCE_RIP_AND_FINISH();
10972
10973 IEM_MC_END();
10974}
10975
10976
10977/** Opcode 0xdf !11/5. */
10978FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10979{
10980 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10981
10982 IEM_MC_BEGIN(2, 3);
10983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10984 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10985 IEM_MC_LOCAL(int64_t, i64Val);
10986 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10987 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10988
10989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10991
10992 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10993 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10994 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10995
10996 IEM_MC_PREPARE_FPU_USAGE();
10997 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10998 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
10999 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11000 } IEM_MC_ELSE() {
11001 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11002 } IEM_MC_ENDIF();
11003 IEM_MC_ADVANCE_RIP_AND_FINISH();
11004
11005 IEM_MC_END();
11006}
11007
11008
11009/** Opcode 0xdf !11/6. */
11010FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11011{
11012 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11013 IEM_MC_BEGIN(3, 2);
11014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11015 IEM_MC_LOCAL(uint16_t, u16Fsw);
11016 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11017 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11018 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11019
11020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11022 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11023 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11024
11025 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11026 IEM_MC_PREPARE_FPU_USAGE();
11027 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11028 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11029 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11030 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11031 } IEM_MC_ELSE() {
11032 IEM_MC_IF_FCW_IM() {
11033 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11034 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11035 } IEM_MC_ENDIF();
11036 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11037 } IEM_MC_ENDIF();
11038 IEM_MC_ADVANCE_RIP_AND_FINISH();
11039
11040 IEM_MC_END();
11041}
11042
11043
11044/** Opcode 0xdf !11/7. */
11045FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11046{
11047 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11048 IEM_MC_BEGIN(3, 2);
11049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11050 IEM_MC_LOCAL(uint16_t, u16Fsw);
11051 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11052 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11053 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11054
11055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11057 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11058 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11059
11060 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11061 IEM_MC_PREPARE_FPU_USAGE();
11062 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11063 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11064 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11065 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11066 } IEM_MC_ELSE() {
11067 IEM_MC_IF_FCW_IM() {
11068 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11069 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11070 } IEM_MC_ENDIF();
11071 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11072 } IEM_MC_ENDIF();
11073 IEM_MC_ADVANCE_RIP_AND_FINISH();
11074
11075 IEM_MC_END();
11076}
11077
11078
11079/**
11080 * @opcode 0xdf
11081 */
11082FNIEMOP_DEF(iemOp_EscF7)
11083{
11084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11085 if (IEM_IS_MODRM_REG_MODE(bRm))
11086 {
11087 switch (IEM_GET_MODRM_REG_8(bRm))
11088 {
11089 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11090 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11091 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11092 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11093 case 4: if (bRm == 0xe0)
11094 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11095 return IEMOP_RAISE_INVALID_OPCODE();
11096 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11097 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11098 case 7: return IEMOP_RAISE_INVALID_OPCODE();
11099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11100 }
11101 }
11102 else
11103 {
11104 switch (IEM_GET_MODRM_REG_8(bRm))
11105 {
11106 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11107 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11108 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11109 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11110 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11111 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11112 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11113 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11115 }
11116 }
11117}
11118
11119
11120/**
11121 * @opcode 0xe0
11122 */
11123FNIEMOP_DEF(iemOp_loopne_Jb)
11124{
11125 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11126 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11128 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11129
11130 switch (pVCpu->iem.s.enmEffAddrMode)
11131 {
11132 case IEMMODE_16BIT:
11133 IEM_MC_BEGIN(0,0);
11134 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11135 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11136 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11137 } IEM_MC_ELSE() {
11138 IEM_MC_ADVANCE_RIP_AND_FINISH();
11139 } IEM_MC_ENDIF();
11140 IEM_MC_END();
11141 break;
11142
11143 case IEMMODE_32BIT:
11144 IEM_MC_BEGIN(0,0);
11145 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11146 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11147 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11148 } IEM_MC_ELSE() {
11149 IEM_MC_ADVANCE_RIP_AND_FINISH();
11150 } IEM_MC_ENDIF();
11151 IEM_MC_END();
11152 break;
11153
11154 case IEMMODE_64BIT:
11155 IEM_MC_BEGIN(0,0);
11156 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11157 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11158 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11159 } IEM_MC_ELSE() {
11160 IEM_MC_ADVANCE_RIP_AND_FINISH();
11161 } IEM_MC_ENDIF();
11162 IEM_MC_END();
11163 break;
11164
11165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11166 }
11167}
11168
11169
11170/**
11171 * @opcode 0xe1
11172 */
11173FNIEMOP_DEF(iemOp_loope_Jb)
11174{
11175 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11176 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11178 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11179
11180 switch (pVCpu->iem.s.enmEffAddrMode)
11181 {
11182 case IEMMODE_16BIT:
11183 IEM_MC_BEGIN(0,0);
11184 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11185 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11186 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11187 } IEM_MC_ELSE() {
11188 IEM_MC_ADVANCE_RIP_AND_FINISH();
11189 } IEM_MC_ENDIF();
11190 IEM_MC_END();
11191 break;
11192
11193 case IEMMODE_32BIT:
11194 IEM_MC_BEGIN(0,0);
11195 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11196 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11197 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11198 } IEM_MC_ELSE() {
11199 IEM_MC_ADVANCE_RIP_AND_FINISH();
11200 } IEM_MC_ENDIF();
11201 IEM_MC_END();
11202 break;
11203
11204 case IEMMODE_64BIT:
11205 IEM_MC_BEGIN(0,0);
11206 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11207 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11208 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11209 } IEM_MC_ELSE() {
11210 IEM_MC_ADVANCE_RIP_AND_FINISH();
11211 } IEM_MC_ENDIF();
11212 IEM_MC_END();
11213 break;
11214
11215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11216 }
11217}
11218
11219
11220/**
11221 * @opcode 0xe2
11222 */
11223FNIEMOP_DEF(iemOp_loop_Jb)
11224{
11225 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11226 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11228 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11229
11230 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11231 * using the 32-bit operand size override. How can that be restarted? See
11232 * weird pseudo code in intel manual. */
11233
11234 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11235 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11236 * the loop causes guest crashes, but when logging it's nice to skip a few million
11237 * lines of useless output. */
11238#if defined(LOG_ENABLED)
11239 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11240 switch (pVCpu->iem.s.enmEffAddrMode)
11241 {
11242 case IEMMODE_16BIT:
11243 IEM_MC_BEGIN(0,0);
11244 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11245 IEM_MC_ADVANCE_RIP_AND_FINISH();
11246 IEM_MC_END();
11247 break;
11248
11249 case IEMMODE_32BIT:
11250 IEM_MC_BEGIN(0,0);
11251 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11252 IEM_MC_ADVANCE_RIP_AND_FINISH();
11253 IEM_MC_END();
11254 break;
11255
11256 case IEMMODE_64BIT:
11257 IEM_MC_BEGIN(0,0);
11258 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11259 IEM_MC_ADVANCE_RIP_AND_FINISH();
11260 IEM_MC_END();
11261 break;
11262
11263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11264 }
11265#endif
11266
11267 switch (pVCpu->iem.s.enmEffAddrMode)
11268 {
11269 case IEMMODE_16BIT:
11270 IEM_MC_BEGIN(0,0);
11271
11272 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11273 IEM_MC_IF_CX_IS_NZ() {
11274 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11275 } IEM_MC_ELSE() {
11276 IEM_MC_ADVANCE_RIP_AND_FINISH();
11277 } IEM_MC_ENDIF();
11278 IEM_MC_END();
11279 break;
11280
11281 case IEMMODE_32BIT:
11282 IEM_MC_BEGIN(0,0);
11283 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11284 IEM_MC_IF_ECX_IS_NZ() {
11285 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11286 } IEM_MC_ELSE() {
11287 IEM_MC_ADVANCE_RIP_AND_FINISH();
11288 } IEM_MC_ENDIF();
11289 IEM_MC_END();
11290 break;
11291
11292 case IEMMODE_64BIT:
11293 IEM_MC_BEGIN(0,0);
11294 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11295 IEM_MC_IF_RCX_IS_NZ() {
11296 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11297 } IEM_MC_ELSE() {
11298 IEM_MC_ADVANCE_RIP_AND_FINISH();
11299 } IEM_MC_ENDIF();
11300 IEM_MC_END();
11301 break;
11302
11303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11304 }
11305}
11306
11307
11308/**
11309 * @opcode 0xe3
11310 */
11311FNIEMOP_DEF(iemOp_jecxz_Jb)
11312{
11313 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11314 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11316 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11317
11318 switch (pVCpu->iem.s.enmEffAddrMode)
11319 {
11320 case IEMMODE_16BIT:
11321 IEM_MC_BEGIN(0,0);
11322 IEM_MC_IF_CX_IS_NZ() {
11323 IEM_MC_ADVANCE_RIP_AND_FINISH();
11324 } IEM_MC_ELSE() {
11325 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11326 } IEM_MC_ENDIF();
11327 IEM_MC_END();
11328 break;
11329
11330 case IEMMODE_32BIT:
11331 IEM_MC_BEGIN(0,0);
11332 IEM_MC_IF_ECX_IS_NZ() {
11333 IEM_MC_ADVANCE_RIP_AND_FINISH();
11334 } IEM_MC_ELSE() {
11335 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11336 } IEM_MC_ENDIF();
11337 IEM_MC_END();
11338 break;
11339
11340 case IEMMODE_64BIT:
11341 IEM_MC_BEGIN(0,0);
11342 IEM_MC_IF_RCX_IS_NZ() {
11343 IEM_MC_ADVANCE_RIP_AND_FINISH();
11344 } IEM_MC_ELSE() {
11345 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11346 } IEM_MC_ENDIF();
11347 IEM_MC_END();
11348 break;
11349
11350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11351 }
11352}
11353
11354
11355/** Opcode 0xe4 */
11356FNIEMOP_DEF(iemOp_in_AL_Ib)
11357{
11358 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
11359 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11361 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11362}
11363
11364
11365/** Opcode 0xe5 */
11366FNIEMOP_DEF(iemOp_in_eAX_Ib)
11367{
11368 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
11369 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11371 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11372 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11373}
11374
11375
11376/** Opcode 0xe6 */
11377FNIEMOP_DEF(iemOp_out_Ib_AL)
11378{
11379 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
11380 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11382 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11383}
11384
11385
11386/** Opcode 0xe7 */
11387FNIEMOP_DEF(iemOp_out_Ib_eAX)
11388{
11389 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
11390 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11392 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11393 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11394}
11395
11396
11397/**
11398 * @opcode 0xe8
11399 */
11400FNIEMOP_DEF(iemOp_call_Jv)
11401{
11402 IEMOP_MNEMONIC(call_Jv, "call Jv");
11403 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11404 switch (pVCpu->iem.s.enmEffOpSize)
11405 {
11406 case IEMMODE_16BIT:
11407 {
11408 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11409 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
11410 }
11411
11412 case IEMMODE_32BIT:
11413 {
11414 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11415 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
11416 }
11417
11418 case IEMMODE_64BIT:
11419 {
11420 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11421 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
11422 }
11423
11424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11425 }
11426}
11427
11428
11429/**
11430 * @opcode 0xe9
11431 */
11432FNIEMOP_DEF(iemOp_jmp_Jv)
11433{
11434 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
11435 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11436 switch (pVCpu->iem.s.enmEffOpSize)
11437 {
11438 case IEMMODE_16BIT:
11439 {
11440 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
11441 IEM_MC_BEGIN(0, 0);
11442 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
11443 IEM_MC_END();
11444 break;
11445 }
11446
11447 case IEMMODE_64BIT:
11448 case IEMMODE_32BIT:
11449 {
11450 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
11451 IEM_MC_BEGIN(0, 0);
11452 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
11453 IEM_MC_END();
11454 break;
11455 }
11456
11457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11458 }
11459}
11460
11461
11462/**
11463 * @opcode 0xea
11464 */
11465FNIEMOP_DEF(iemOp_jmp_Ap)
11466{
11467 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
11468 IEMOP_HLP_NO_64BIT();
11469
11470 /* Decode the far pointer address and pass it on to the far call C implementation. */
11471 uint32_t offSeg;
11472 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11473 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11474 else
11475 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11476 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11478 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11479}
11480
11481
11482/**
11483 * @opcode 0xeb
11484 */
11485FNIEMOP_DEF(iemOp_jmp_Jb)
11486{
11487 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
11488 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11491
11492 IEM_MC_BEGIN(0, 0);
11493 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11494 IEM_MC_END();
11495}
11496
11497
11498/** Opcode 0xec */
11499FNIEMOP_DEF(iemOp_in_AL_DX)
11500{
11501 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
11502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11503 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
11504}
11505
11506
11507/** Opcode 0xed */
11508FNIEMOP_DEF(iemOp_in_eAX_DX)
11509{
11510 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
11511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11512 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11513 pVCpu->iem.s.enmEffAddrMode);
11514}
11515
11516
11517/** Opcode 0xee */
11518FNIEMOP_DEF(iemOp_out_DX_AL)
11519{
11520 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
11521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11522 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
11523}
11524
11525
11526/** Opcode 0xef */
11527FNIEMOP_DEF(iemOp_out_DX_eAX)
11528{
11529 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
11530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11531 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11532 pVCpu->iem.s.enmEffAddrMode);
11533}
11534
11535
11536/**
11537 * @opcode 0xf0
11538 */
11539FNIEMOP_DEF(iemOp_lock)
11540{
11541 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
11542 if (!pVCpu->iem.s.fDisregardLock)
11543 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
11544
11545 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11546 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11547}
11548
11549
11550/**
11551 * @opcode 0xf1
11552 */
11553FNIEMOP_DEF(iemOp_int1)
11554{
11555 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
11556 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
11557 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
11558 * LOADALL memo. Needs some testing. */
11559 IEMOP_HLP_MIN_386();
11560 /** @todo testcase! */
11561 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
11562}
11563
11564
11565/**
11566 * @opcode 0xf2
11567 */
11568FNIEMOP_DEF(iemOp_repne)
11569{
11570 /* This overrides any previous REPE prefix. */
11571 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
11572 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
11573 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
11574
11575 /* For the 4 entry opcode tables, REPNZ overrides any previous
11576 REPZ and operand size prefixes. */
11577 pVCpu->iem.s.idxPrefix = 3;
11578
11579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11580 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11581}
11582
11583
11584/**
11585 * @opcode 0xf3
11586 */
11587FNIEMOP_DEF(iemOp_repe)
11588{
11589 /* This overrides any previous REPNE prefix. */
11590 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
11591 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
11592 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
11593
11594 /* For the 4 entry opcode tables, REPNZ overrides any previous
11595 REPNZ and operand size prefixes. */
11596 pVCpu->iem.s.idxPrefix = 2;
11597
11598 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11599 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11600}
11601
11602
11603/**
11604 * @opcode 0xf4
11605 */
11606FNIEMOP_DEF(iemOp_hlt)
11607{
11608 IEMOP_MNEMONIC(hlt, "hlt");
11609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11610 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
11611}
11612
11613
11614/**
11615 * @opcode 0xf5
11616 */
11617FNIEMOP_DEF(iemOp_cmc)
11618{
11619 IEMOP_MNEMONIC(cmc, "cmc");
11620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11621 IEM_MC_BEGIN(0, 0);
11622 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
11623 IEM_MC_ADVANCE_RIP_AND_FINISH();
11624 IEM_MC_END();
11625}
11626
11627
11628/**
11629 * Body for of 'inc/dec/not/neg Eb'.
11630 */
11631#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
11632 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
11633 { \
11634 /* register access */ \
11635 IEMOP_HLP_DONE_DECODING(); \
11636 IEM_MC_BEGIN(2, 0); \
11637 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11638 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11639 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
11640 IEM_MC_REF_EFLAGS(pEFlags); \
11641 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11642 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11643 IEM_MC_END(); \
11644 } \
11645 else \
11646 { \
11647 /* memory access. */ \
11648 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11649 { \
11650 IEM_MC_BEGIN(2, 2); \
11651 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11652 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11654 \
11655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11656 IEMOP_HLP_DONE_DECODING(); \
11657 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11658 IEM_MC_FETCH_EFLAGS(EFlags); \
11659 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11660 \
11661 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11662 IEM_MC_COMMIT_EFLAGS(EFlags); \
11663 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11664 IEM_MC_END(); \
11665 } \
11666 else \
11667 { \
11668 IEM_MC_BEGIN(2, 2); \
11669 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11670 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11672 \
11673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11674 IEMOP_HLP_DONE_DECODING(); \
11675 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11676 IEM_MC_FETCH_EFLAGS(EFlags); \
11677 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
11678 \
11679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11680 IEM_MC_COMMIT_EFLAGS(EFlags); \
11681 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11682 IEM_MC_END(); \
11683 } \
11684 } \
11685 (void)0
11686
11687
11688/**
11689 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
11690 */
11691#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11692 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11693 { \
11694 /* \
11695 * Register target \
11696 */ \
11697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11698 switch (pVCpu->iem.s.enmEffOpSize) \
11699 { \
11700 case IEMMODE_16BIT: \
11701 IEM_MC_BEGIN(2, 0); \
11702 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11703 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11704 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11705 IEM_MC_REF_EFLAGS(pEFlags); \
11706 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11707 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11708 IEM_MC_END(); \
11709 break; \
11710 \
11711 case IEMMODE_32BIT: \
11712 IEM_MC_BEGIN(2, 0); \
11713 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11714 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11715 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11716 IEM_MC_REF_EFLAGS(pEFlags); \
11717 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11718 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
11719 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11720 IEM_MC_END(); \
11721 break; \
11722 \
11723 case IEMMODE_64BIT: \
11724 IEM_MC_BEGIN(2, 0); \
11725 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11726 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11727 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11728 IEM_MC_REF_EFLAGS(pEFlags); \
11729 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11730 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11731 IEM_MC_END(); \
11732 break; \
11733 \
11734 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11735 } \
11736 } \
11737 else \
11738 { \
11739 /* \
11740 * Memory target. \
11741 */ \
11742 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11743 { \
11744 switch (pVCpu->iem.s.enmEffOpSize) \
11745 { \
11746 case IEMMODE_16BIT: \
11747 IEM_MC_BEGIN(2, 2); \
11748 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11749 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11751 \
11752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11753 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11754 IEM_MC_FETCH_EFLAGS(EFlags); \
11755 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11756 \
11757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11758 IEM_MC_COMMIT_EFLAGS(EFlags); \
11759 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11760 IEM_MC_END(); \
11761 break; \
11762 \
11763 case IEMMODE_32BIT: \
11764 IEM_MC_BEGIN(2, 2); \
11765 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11766 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11768 \
11769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11770 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11771 IEM_MC_FETCH_EFLAGS(EFlags); \
11772 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11773 \
11774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11775 IEM_MC_COMMIT_EFLAGS(EFlags); \
11776 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11777 IEM_MC_END(); \
11778 break; \
11779 \
11780 case IEMMODE_64BIT: \
11781 IEM_MC_BEGIN(2, 2); \
11782 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11783 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11785 \
11786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11787 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11788 IEM_MC_FETCH_EFLAGS(EFlags); \
11789 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11790 \
11791 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11792 IEM_MC_COMMIT_EFLAGS(EFlags); \
11793 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11794 IEM_MC_END(); \
11795 break; \
11796 \
11797 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11798 } \
11799 } \
11800 else \
11801 { \
11802 (void)0
11803
11804#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11805 switch (pVCpu->iem.s.enmEffOpSize) \
11806 { \
11807 case IEMMODE_16BIT: \
11808 IEM_MC_BEGIN(2, 2); \
11809 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11810 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11812 \
11813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11814 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11815 IEM_MC_FETCH_EFLAGS(EFlags); \
11816 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
11817 \
11818 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11819 IEM_MC_COMMIT_EFLAGS(EFlags); \
11820 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11821 IEM_MC_END(); \
11822 break; \
11823 \
11824 case IEMMODE_32BIT: \
11825 IEM_MC_BEGIN(2, 2); \
11826 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11827 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11829 \
11830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11831 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11832 IEM_MC_FETCH_EFLAGS(EFlags); \
11833 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
11834 \
11835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11836 IEM_MC_COMMIT_EFLAGS(EFlags); \
11837 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11838 IEM_MC_END(); \
11839 break; \
11840 \
11841 case IEMMODE_64BIT: \
11842 IEM_MC_BEGIN(2, 2); \
11843 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11844 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11846 \
11847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11848 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11849 IEM_MC_FETCH_EFLAGS(EFlags); \
11850 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
11851 \
11852 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11853 IEM_MC_COMMIT_EFLAGS(EFlags); \
11854 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11855 IEM_MC_END(); \
11856 break; \
11857 \
11858 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11859 } \
11860 } \
11861 } \
11862 (void)0
11863
11864
11865/**
11866 * @opmaps grp3_f6
11867 * @opcode /0
11868 * @todo also /1
11869 */
11870FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
11871{
11872 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
11873 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11874
11875 if (IEM_IS_MODRM_REG_MODE(bRm))
11876 {
11877 /* register access */
11878 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11880
11881 IEM_MC_BEGIN(3, 0);
11882 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11883 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
11884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11885 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11886 IEM_MC_REF_EFLAGS(pEFlags);
11887 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11888 IEM_MC_ADVANCE_RIP_AND_FINISH();
11889 IEM_MC_END();
11890 }
11891 else
11892 {
11893 /* memory access. */
11894 IEM_MC_BEGIN(3, 2);
11895 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11896 IEM_MC_ARG(uint8_t, u8Src, 1);
11897 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11899
11900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11901 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11902 IEM_MC_ASSIGN(u8Src, u8Imm);
11903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11904 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11905 IEM_MC_FETCH_EFLAGS(EFlags);
11906 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11907
11908 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
11909 IEM_MC_COMMIT_EFLAGS(EFlags);
11910 IEM_MC_ADVANCE_RIP_AND_FINISH();
11911 IEM_MC_END();
11912 }
11913}
11914
11915
11916/** Opcode 0xf6 /4, /5, /6 and /7. */
11917FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11918{
11919 if (IEM_IS_MODRM_REG_MODE(bRm))
11920 {
11921 /* register access */
11922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11923 IEM_MC_BEGIN(3, 1);
11924 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11925 IEM_MC_ARG(uint8_t, u8Value, 1);
11926 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11927 IEM_MC_LOCAL(int32_t, rc);
11928
11929 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11930 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11931 IEM_MC_REF_EFLAGS(pEFlags);
11932 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11933 IEM_MC_IF_LOCAL_IS_Z(rc) {
11934 IEM_MC_ADVANCE_RIP_AND_FINISH();
11935 } IEM_MC_ELSE() {
11936 IEM_MC_RAISE_DIVIDE_ERROR();
11937 } IEM_MC_ENDIF();
11938
11939 IEM_MC_END();
11940 }
11941 else
11942 {
11943 /* memory access. */
11944 IEM_MC_BEGIN(3, 2);
11945 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11946 IEM_MC_ARG(uint8_t, u8Value, 1);
11947 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11949 IEM_MC_LOCAL(int32_t, rc);
11950
11951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11953 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11954 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11955 IEM_MC_REF_EFLAGS(pEFlags);
11956 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11957 IEM_MC_IF_LOCAL_IS_Z(rc) {
11958 IEM_MC_ADVANCE_RIP_AND_FINISH();
11959 } IEM_MC_ELSE() {
11960 IEM_MC_RAISE_DIVIDE_ERROR();
11961 } IEM_MC_ENDIF();
11962
11963 IEM_MC_END();
11964 }
11965}
11966
11967
11968/** Opcode 0xf7 /4, /5, /6 and /7. */
11969FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11970{
11971 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11972
11973 if (IEM_IS_MODRM_REG_MODE(bRm))
11974 {
11975 /* register access */
11976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11977 switch (pVCpu->iem.s.enmEffOpSize)
11978 {
11979 case IEMMODE_16BIT:
11980 {
11981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11982 IEM_MC_BEGIN(4, 1);
11983 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11984 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11985 IEM_MC_ARG(uint16_t, u16Value, 2);
11986 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11987 IEM_MC_LOCAL(int32_t, rc);
11988
11989 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11990 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11991 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11992 IEM_MC_REF_EFLAGS(pEFlags);
11993 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11994 IEM_MC_IF_LOCAL_IS_Z(rc) {
11995 IEM_MC_ADVANCE_RIP_AND_FINISH();
11996 } IEM_MC_ELSE() {
11997 IEM_MC_RAISE_DIVIDE_ERROR();
11998 } IEM_MC_ENDIF();
11999
12000 IEM_MC_END();
12001 break;
12002 }
12003
12004 case IEMMODE_32BIT:
12005 {
12006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12007 IEM_MC_BEGIN(4, 1);
12008 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12009 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12010 IEM_MC_ARG(uint32_t, u32Value, 2);
12011 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12012 IEM_MC_LOCAL(int32_t, rc);
12013
12014 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12015 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12016 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12017 IEM_MC_REF_EFLAGS(pEFlags);
12018 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12019 IEM_MC_IF_LOCAL_IS_Z(rc) {
12020 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12021 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12022 IEM_MC_ADVANCE_RIP_AND_FINISH();
12023 } IEM_MC_ELSE() {
12024 IEM_MC_RAISE_DIVIDE_ERROR();
12025 } IEM_MC_ENDIF();
12026
12027 IEM_MC_END();
12028 break;
12029 }
12030
12031 case IEMMODE_64BIT:
12032 {
12033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12034 IEM_MC_BEGIN(4, 1);
12035 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12036 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12037 IEM_MC_ARG(uint64_t, u64Value, 2);
12038 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12039 IEM_MC_LOCAL(int32_t, rc);
12040
12041 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12042 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12043 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12044 IEM_MC_REF_EFLAGS(pEFlags);
12045 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12046 IEM_MC_IF_LOCAL_IS_Z(rc) {
12047 IEM_MC_ADVANCE_RIP_AND_FINISH();
12048 } IEM_MC_ELSE() {
12049 IEM_MC_RAISE_DIVIDE_ERROR();
12050 } IEM_MC_ENDIF();
12051
12052 IEM_MC_END();
12053 break;
12054 }
12055
12056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12057 }
12058 }
12059 else
12060 {
12061 /* memory access. */
12062 switch (pVCpu->iem.s.enmEffOpSize)
12063 {
12064 case IEMMODE_16BIT:
12065 {
12066 IEM_MC_BEGIN(4, 2);
12067 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12068 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12069 IEM_MC_ARG(uint16_t, u16Value, 2);
12070 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12072 IEM_MC_LOCAL(int32_t, rc);
12073
12074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12076 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12077 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12078 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12079 IEM_MC_REF_EFLAGS(pEFlags);
12080 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12081 IEM_MC_IF_LOCAL_IS_Z(rc) {
12082 IEM_MC_ADVANCE_RIP_AND_FINISH();
12083 } IEM_MC_ELSE() {
12084 IEM_MC_RAISE_DIVIDE_ERROR();
12085 } IEM_MC_ENDIF();
12086
12087 IEM_MC_END();
12088 break;
12089 }
12090
12091 case IEMMODE_32BIT:
12092 {
12093 IEM_MC_BEGIN(4, 2);
12094 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12095 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12096 IEM_MC_ARG(uint32_t, u32Value, 2);
12097 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12099 IEM_MC_LOCAL(int32_t, rc);
12100
12101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12103 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12104 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12105 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12106 IEM_MC_REF_EFLAGS(pEFlags);
12107 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12108 IEM_MC_IF_LOCAL_IS_Z(rc) {
12109 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12110 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12111 IEM_MC_ADVANCE_RIP_AND_FINISH();
12112 } IEM_MC_ELSE() {
12113 IEM_MC_RAISE_DIVIDE_ERROR();
12114 } IEM_MC_ENDIF();
12115
12116 IEM_MC_END();
12117 break;
12118 }
12119
12120 case IEMMODE_64BIT:
12121 {
12122 IEM_MC_BEGIN(4, 2);
12123 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12124 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12125 IEM_MC_ARG(uint64_t, u64Value, 2);
12126 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12128 IEM_MC_LOCAL(int32_t, rc);
12129
12130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12132 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12133 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12134 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12135 IEM_MC_REF_EFLAGS(pEFlags);
12136 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12137 IEM_MC_IF_LOCAL_IS_Z(rc) {
12138 IEM_MC_ADVANCE_RIP_AND_FINISH();
12139 } IEM_MC_ELSE() {
12140 IEM_MC_RAISE_DIVIDE_ERROR();
12141 } IEM_MC_ENDIF();
12142
12143 IEM_MC_END();
12144 break;
12145 }
12146
12147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12148 }
12149 }
12150}
12151
12152
12153/**
12154 * @opmaps grp3_f6
12155 * @opcode /2
12156 */
12157FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12158{
12159 IEMOP_MNEMONIC(not_Eb, "not Eb");
12160 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12161}
12162
12163
12164/**
12165 * @opmaps grp3_f6
12166 * @opcode /3
12167 */
12168FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12169{
12170 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12171 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12172}
12173
12174
12175/**
12176 * @opcode 0xf6
12177 */
12178FNIEMOP_DEF(iemOp_Grp3_Eb)
12179{
12180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12181 switch (IEM_GET_MODRM_REG_8(bRm))
12182 {
12183 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12184 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12185 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12186 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12187 case 4:
12188 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12189 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12190 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12191 case 5:
12192 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12193 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12194 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12195 case 6:
12196 IEMOP_MNEMONIC(div_Eb, "div Eb");
12197 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12198 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12199 case 7:
12200 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12202 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12204 }
12205}
12206
12207
12208/** Opcode 0xf7 /0. */
12209FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12210{
12211 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12212 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12213
12214 if (IEM_IS_MODRM_REG_MODE(bRm))
12215 {
12216 /* register access */
12217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12218 switch (pVCpu->iem.s.enmEffOpSize)
12219 {
12220 case IEMMODE_16BIT:
12221 {
12222 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12223 IEM_MC_BEGIN(3, 0);
12224 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12225 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12226 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12227 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12228 IEM_MC_REF_EFLAGS(pEFlags);
12229 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12230 IEM_MC_ADVANCE_RIP_AND_FINISH();
12231 IEM_MC_END();
12232 break;
12233 }
12234
12235 case IEMMODE_32BIT:
12236 {
12237 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12238 IEM_MC_BEGIN(3, 0);
12239 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12240 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12241 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12242 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12243 IEM_MC_REF_EFLAGS(pEFlags);
12244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12245 /* No clearing the high dword here - test doesn't write back the result. */
12246 IEM_MC_ADVANCE_RIP_AND_FINISH();
12247 IEM_MC_END();
12248 break;
12249 }
12250
12251 case IEMMODE_64BIT:
12252 {
12253 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12254 IEM_MC_BEGIN(3, 0);
12255 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12256 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12257 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12258 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12259 IEM_MC_REF_EFLAGS(pEFlags);
12260 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12261 IEM_MC_ADVANCE_RIP_AND_FINISH();
12262 IEM_MC_END();
12263 break;
12264 }
12265
12266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12267 }
12268 }
12269 else
12270 {
12271 /* memory access. */
12272 switch (pVCpu->iem.s.enmEffOpSize)
12273 {
12274 case IEMMODE_16BIT:
12275 {
12276 IEM_MC_BEGIN(3, 2);
12277 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12278 IEM_MC_ARG(uint16_t, u16Src, 1);
12279 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12281
12282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12283 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12284 IEM_MC_ASSIGN(u16Src, u16Imm);
12285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12286 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12287 IEM_MC_FETCH_EFLAGS(EFlags);
12288 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12289
12290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
12291 IEM_MC_COMMIT_EFLAGS(EFlags);
12292 IEM_MC_ADVANCE_RIP_AND_FINISH();
12293 IEM_MC_END();
12294 break;
12295 }
12296
12297 case IEMMODE_32BIT:
12298 {
12299 IEM_MC_BEGIN(3, 2);
12300 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12301 IEM_MC_ARG(uint32_t, u32Src, 1);
12302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12304
12305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12306 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12307 IEM_MC_ASSIGN(u32Src, u32Imm);
12308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12309 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12310 IEM_MC_FETCH_EFLAGS(EFlags);
12311 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12312
12313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
12314 IEM_MC_COMMIT_EFLAGS(EFlags);
12315 IEM_MC_ADVANCE_RIP_AND_FINISH();
12316 IEM_MC_END();
12317 break;
12318 }
12319
12320 case IEMMODE_64BIT:
12321 {
12322 IEM_MC_BEGIN(3, 2);
12323 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12324 IEM_MC_ARG(uint64_t, u64Src, 1);
12325 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12327
12328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12329 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12330 IEM_MC_ASSIGN(u64Src, u64Imm);
12331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12332 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12333 IEM_MC_FETCH_EFLAGS(EFlags);
12334 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12335
12336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
12337 IEM_MC_COMMIT_EFLAGS(EFlags);
12338 IEM_MC_ADVANCE_RIP_AND_FINISH();
12339 IEM_MC_END();
12340 break;
12341 }
12342
12343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12344 }
12345 }
12346}
12347
12348
12349/** Opcode 0xf7 /2. */
12350FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
12351{
12352 IEMOP_MNEMONIC(not_Ev, "not Ev");
12353 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
12354 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
12355}
12356
12357
12358/** Opcode 0xf7 /3. */
12359FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
12360{
12361 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
12362 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
12363 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
12364}
12365
12366
12367/**
12368 * @opcode 0xf7
12369 */
12370FNIEMOP_DEF(iemOp_Grp3_Ev)
12371{
12372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12373 switch (IEM_GET_MODRM_REG_8(bRm))
12374 {
12375 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12376 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12377 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
12378 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
12379 case 4:
12380 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
12381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12382 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
12383 case 5:
12384 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
12385 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12386 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
12387 case 6:
12388 IEMOP_MNEMONIC(div_Ev, "div Ev");
12389 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12390 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
12391 case 7:
12392 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
12393 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12394 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
12395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12396 }
12397}
12398
12399
12400/**
12401 * @opcode 0xf8
12402 */
12403FNIEMOP_DEF(iemOp_clc)
12404{
12405 IEMOP_MNEMONIC(clc, "clc");
12406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12407 IEM_MC_BEGIN(0, 0);
12408 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
12409 IEM_MC_ADVANCE_RIP_AND_FINISH();
12410 IEM_MC_END();
12411}
12412
12413
12414/**
12415 * @opcode 0xf9
12416 */
12417FNIEMOP_DEF(iemOp_stc)
12418{
12419 IEMOP_MNEMONIC(stc, "stc");
12420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12421 IEM_MC_BEGIN(0, 0);
12422 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
12423 IEM_MC_ADVANCE_RIP_AND_FINISH();
12424 IEM_MC_END();
12425}
12426
12427
12428/**
12429 * @opcode 0xfa
12430 */
12431FNIEMOP_DEF(iemOp_cli)
12432{
12433 IEMOP_MNEMONIC(cli, "cli");
12434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12435 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
12436}
12437
12438
12439FNIEMOP_DEF(iemOp_sti)
12440{
12441 IEMOP_MNEMONIC(sti, "sti");
12442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12443 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
12444}
12445
12446
12447/**
12448 * @opcode 0xfc
12449 */
12450FNIEMOP_DEF(iemOp_cld)
12451{
12452 IEMOP_MNEMONIC(cld, "cld");
12453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12454 IEM_MC_BEGIN(0, 0);
12455 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
12456 IEM_MC_ADVANCE_RIP_AND_FINISH();
12457 IEM_MC_END();
12458}
12459
12460
12461/**
12462 * @opcode 0xfd
12463 */
12464FNIEMOP_DEF(iemOp_std)
12465{
12466 IEMOP_MNEMONIC(std, "std");
12467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12468 IEM_MC_BEGIN(0, 0);
12469 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
12470 IEM_MC_ADVANCE_RIP_AND_FINISH();
12471 IEM_MC_END();
12472}
12473
12474
12475/**
12476 * @opmaps grp4
12477 * @opcode /0
12478 */
12479FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
12480{
12481 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
12482 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
12483}
12484
12485
12486/**
12487 * @opmaps grp4
12488 * @opcode /1
12489 */
12490FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
12491{
12492 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
12493 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
12494}
12495
12496
12497/**
12498 * @opcode 0xfe
12499 */
12500FNIEMOP_DEF(iemOp_Grp4)
12501{
12502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12503 switch (IEM_GET_MODRM_REG_8(bRm))
12504 {
12505 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
12506 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
12507 default:
12508 /** @todo is the eff-addr decoded? */
12509 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
12510 return IEMOP_RAISE_INVALID_OPCODE();
12511 }
12512}
12513
12514/** Opcode 0xff /0. */
12515FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
12516{
12517 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
12518 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
12519 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
12520}
12521
12522
12523/** Opcode 0xff /1. */
12524FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
12525{
12526 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
12527 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
12528 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
12529}
12530
12531
12532/**
12533 * Opcode 0xff /2.
12534 * @param bRm The RM byte.
12535 */
12536FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
12537{
12538 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
12539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12540
12541 if (IEM_IS_MODRM_REG_MODE(bRm))
12542 {
12543 /* The new RIP is taken from a register. */
12544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12545 switch (pVCpu->iem.s.enmEffOpSize)
12546 {
12547 case IEMMODE_16BIT:
12548 IEM_MC_BEGIN(1, 0);
12549 IEM_MC_ARG(uint16_t, u16Target, 0);
12550 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12551 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
12552 IEM_MC_END();
12553 break;
12554
12555 case IEMMODE_32BIT:
12556 IEM_MC_BEGIN(1, 0);
12557 IEM_MC_ARG(uint32_t, u32Target, 0);
12558 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12559 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
12560 IEM_MC_END();
12561 break;
12562
12563 case IEMMODE_64BIT:
12564 IEM_MC_BEGIN(1, 0);
12565 IEM_MC_ARG(uint64_t, u64Target, 0);
12566 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12567 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
12568 IEM_MC_END();
12569 break;
12570
12571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12572 }
12573 }
12574 else
12575 {
12576 /* The new RIP is taken from a register. */
12577 switch (pVCpu->iem.s.enmEffOpSize)
12578 {
12579 case IEMMODE_16BIT:
12580 IEM_MC_BEGIN(1, 1);
12581 IEM_MC_ARG(uint16_t, u16Target, 0);
12582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12585 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12586 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
12587 IEM_MC_END();
12588 break;
12589
12590 case IEMMODE_32BIT:
12591 IEM_MC_BEGIN(1, 1);
12592 IEM_MC_ARG(uint32_t, u32Target, 0);
12593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12596 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12597 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
12598 IEM_MC_END();
12599 break;
12600
12601 case IEMMODE_64BIT:
12602 IEM_MC_BEGIN(1, 1);
12603 IEM_MC_ARG(uint64_t, u64Target, 0);
12604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12607 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12608 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
12609 IEM_MC_END();
12610 break;
12611
12612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12613 }
12614 }
12615}
12616
12617FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, PFNIEMCIMPLFARBRANCH, pfnCImpl)
12618{
12619 /* Registers? How?? */
12620 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
12621 { /* likely */ }
12622 else
12623 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
12624
12625 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */
12626 /** @todo what does VIA do? */
12627 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu))
12628 { /* likely */ }
12629 else
12630 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT;
12631
12632 /* Far pointer loaded from memory. */
12633 switch (pVCpu->iem.s.enmEffOpSize)
12634 {
12635 case IEMMODE_16BIT:
12636 IEM_MC_BEGIN(3, 1);
12637 IEM_MC_ARG(uint16_t, u16Sel, 0);
12638 IEM_MC_ARG(uint16_t, offSeg, 1);
12639 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
12640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12643 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12644 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
12645 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
12646 IEM_MC_END();
12647 break;
12648
12649 case IEMMODE_32BIT:
12650 IEM_MC_BEGIN(3, 1);
12651 IEM_MC_ARG(uint16_t, u16Sel, 0);
12652 IEM_MC_ARG(uint32_t, offSeg, 1);
12653 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
12654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12657 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12658 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
12659 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
12660 IEM_MC_END();
12661 break;
12662
12663 case IEMMODE_64BIT:
12664 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu));
12665 IEM_MC_BEGIN(3, 1);
12666 IEM_MC_ARG(uint16_t, u16Sel, 0);
12667 IEM_MC_ARG(uint64_t, offSeg, 1);
12668 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2);
12669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12672 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12673 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
12674 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
12675 IEM_MC_END();
12676 break;
12677
12678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12679 }
12680}
12681
12682
12683/**
12684 * Opcode 0xff /3.
12685 * @param bRm The RM byte.
12686 */
12687FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
12688{
12689 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
12690 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
12691}
12692
12693
12694/**
12695 * Opcode 0xff /4.
12696 * @param bRm The RM byte.
12697 */
12698FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
12699{
12700 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
12701 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12702
12703 if (IEM_IS_MODRM_REG_MODE(bRm))
12704 {
12705 /* The new RIP is taken from a register. */
12706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12707 switch (pVCpu->iem.s.enmEffOpSize)
12708 {
12709 case IEMMODE_16BIT:
12710 IEM_MC_BEGIN(0, 1);
12711 IEM_MC_LOCAL(uint16_t, u16Target);
12712 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12713 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12714 IEM_MC_END();
12715 break;
12716
12717 case IEMMODE_32BIT:
12718 IEM_MC_BEGIN(0, 1);
12719 IEM_MC_LOCAL(uint32_t, u32Target);
12720 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12721 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12722 IEM_MC_END();
12723 break;
12724
12725 case IEMMODE_64BIT:
12726 IEM_MC_BEGIN(0, 1);
12727 IEM_MC_LOCAL(uint64_t, u64Target);
12728 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12729 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12730 IEM_MC_END();
12731 break;
12732
12733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12734 }
12735 }
12736 else
12737 {
12738 /* The new RIP is taken from a memory location. */
12739 switch (pVCpu->iem.s.enmEffOpSize)
12740 {
12741 case IEMMODE_16BIT:
12742 IEM_MC_BEGIN(0, 2);
12743 IEM_MC_LOCAL(uint16_t, u16Target);
12744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12747 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12748 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12749 IEM_MC_END();
12750 break;
12751
12752 case IEMMODE_32BIT:
12753 IEM_MC_BEGIN(0, 2);
12754 IEM_MC_LOCAL(uint32_t, u32Target);
12755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12758 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12759 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12760 IEM_MC_END();
12761 break;
12762
12763 case IEMMODE_64BIT:
12764 IEM_MC_BEGIN(0, 2);
12765 IEM_MC_LOCAL(uint64_t, u64Target);
12766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12769 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12770 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12771 IEM_MC_END();
12772 break;
12773
12774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12775 }
12776 }
12777}
12778
12779
12780/**
12781 * Opcode 0xff /5.
12782 * @param bRm The RM byte.
12783 */
12784FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
12785{
12786 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
12787 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
12788}
12789
12790
12791/**
12792 * Opcode 0xff /6.
12793 * @param bRm The RM byte.
12794 */
12795FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
12796{
12797 IEMOP_MNEMONIC(push_Ev, "push Ev");
12798
12799 /* Registers are handled by a common worker. */
12800 if (IEM_IS_MODRM_REG_MODE(bRm))
12801 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
12802
12803 /* Memory we do here. */
12804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12805 switch (pVCpu->iem.s.enmEffOpSize)
12806 {
12807 case IEMMODE_16BIT:
12808 IEM_MC_BEGIN(0, 2);
12809 IEM_MC_LOCAL(uint16_t, u16Src);
12810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12813 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12814 IEM_MC_PUSH_U16(u16Src);
12815 IEM_MC_ADVANCE_RIP_AND_FINISH();
12816 IEM_MC_END();
12817 break;
12818
12819 case IEMMODE_32BIT:
12820 IEM_MC_BEGIN(0, 2);
12821 IEM_MC_LOCAL(uint32_t, u32Src);
12822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12825 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12826 IEM_MC_PUSH_U32(u32Src);
12827 IEM_MC_ADVANCE_RIP_AND_FINISH();
12828 IEM_MC_END();
12829 break;
12830
12831 case IEMMODE_64BIT:
12832 IEM_MC_BEGIN(0, 2);
12833 IEM_MC_LOCAL(uint64_t, u64Src);
12834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12837 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12838 IEM_MC_PUSH_U64(u64Src);
12839 IEM_MC_ADVANCE_RIP_AND_FINISH();
12840 IEM_MC_END();
12841 break;
12842
12843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12844 }
12845}
12846
12847
12848/**
12849 * @opcode 0xff
12850 */
12851FNIEMOP_DEF(iemOp_Grp5)
12852{
12853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12854 switch (IEM_GET_MODRM_REG_8(bRm))
12855 {
12856 case 0:
12857 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
12858 case 1:
12859 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
12860 case 2:
12861 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
12862 case 3:
12863 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
12864 case 4:
12865 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
12866 case 5:
12867 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
12868 case 6:
12869 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
12870 case 7:
12871 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
12872 return IEMOP_RAISE_INVALID_OPCODE();
12873 }
12874 AssertFailedReturn(VERR_IEM_IPE_3);
12875}
12876
12877
12878
12879const PFNIEMOP g_apfnOneByteMap[256] =
12880{
12881 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
12882 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
12883 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
12884 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
12885 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
12886 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
12887 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
12888 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
12889 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
12890 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
12891 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
12892 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
12893 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
12894 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
12895 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
12896 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
12897 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
12898 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
12899 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
12900 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
12901 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
12902 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
12903 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
12904 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
12905 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
12906 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
12907 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
12908 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
12909 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
12910 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
12911 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
12912 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
12913 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
12914 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
12915 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
12916 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
12917 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
12918 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
12919 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
12920 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
12921 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
12922 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
12923 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
12924 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
12925 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
12926 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
12927 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
12928 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
12929 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
12930 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
12931 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
12932 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
12933 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
12934 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
12935 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
12936 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
12937 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
12938 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
12939 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
12940 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
12941 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
12942 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
12943 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
12944 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
12945};
12946
12947
12948/** @} */
12949
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette