VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103212

Last change on this file since 103212 was 103212, checked in by vboxsync, 14 months ago

VMM/IEMAllInst*: Liveness analysis, part 5: Flag input & modification annotations. bugref:10372

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 552.5 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103212 2024-02-05 22:29:39Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
75 \
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
77 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
78 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
79 IEM_MC_REF_EFLAGS(pEFlags); \
80 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
81 \
82 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
83 IEM_MC_END(); \
84 } \
85 else \
86 { \
87 /* \
88 * We're accessing memory. \
89 * Note! We're putting the eflags on the stack here so we can commit them \
90 * after the memory. \
91 */ \
92 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
93 { \
94 IEM_MC_BEGIN(3, 3, 0, 0); \
95 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
96 IEM_MC_ARG(uint8_t, u8Src, 1); \
97 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
98 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
99 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
100 \
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
102 IEMOP_HLP_DONE_DECODING(); \
103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
104 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
105 IEM_MC_FETCH_EFLAGS(EFlags); \
106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
107 \
108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
109 IEM_MC_COMMIT_EFLAGS(EFlags); \
110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
111 IEM_MC_END(); \
112 } \
113 else \
114 { \
115 IEM_MC_BEGIN(3, 3, 0, 0); \
116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
117 IEM_MC_ARG(uint8_t, u8Src, 1); \
118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
120 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
121 \
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
123 IEMOP_HLP_DONE_DECODING(); \
124 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
125 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
126 IEM_MC_FETCH_EFLAGS(EFlags); \
127 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
128 \
129 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
130 IEM_MC_COMMIT_EFLAGS(EFlags); \
131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
132 IEM_MC_END(); \
133 } \
134 } \
135 (void)0
136
137/**
138 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
139 * operands.
140 */
141#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
143 \
144 /* \
145 * If rm is denoting a register, no more instruction bytes. \
146 */ \
147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
148 { \
149 IEM_MC_BEGIN(3, 0, 0, 0); \
150 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
151 IEM_MC_ARG(uint8_t, u8Src, 1); \
152 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
153 \
154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
156 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
157 IEM_MC_REF_EFLAGS(pEFlags); \
158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
159 \
160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
161 IEM_MC_END(); \
162 } \
163 else \
164 { \
165 /* \
166 * We're accessing memory. \
167 * Note! We're putting the eflags on the stack here so we can commit them \
168 * after the memory. \
169 */ \
170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
171 { \
172 IEM_MC_BEGIN(3, 3, 0, 0); \
173 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
178 \
179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
183 IEM_MC_FETCH_EFLAGS(EFlags); \
184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
185 \
186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
187 IEM_MC_COMMIT_EFLAGS(EFlags); \
188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
189 IEM_MC_END(); \
190 } \
191 else \
192 { \
193 IEMOP_HLP_DONE_DECODING(); \
194 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
195 } \
196 } \
197 (void)0
198
199/**
200 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
201 * destination.
202 */
203#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
205 \
206 /* \
207 * If rm is denoting a register, no more instruction bytes. \
208 */ \
209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
210 { \
211 IEM_MC_BEGIN(3, 0, 0, 0); \
212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
213 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
214 IEM_MC_ARG(uint8_t, u8Src, 1); \
215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
216 \
217 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
218 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_EFLAGS(pEFlags); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 \
222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
223 IEM_MC_END(); \
224 } \
225 else \
226 { \
227 /* \
228 * We're accessing memory. \
229 */ \
230 IEM_MC_BEGIN(3, 1, 0, 0); \
231 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
235 \
236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
238 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
239 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_REF_EFLAGS(pEFlags); \
241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
242 \
243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
244 IEM_MC_END(); \
245 } \
246 (void)0
247
248
249/**
250 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
251 * memory/register as the destination.
252 */
253#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
255 \
256 /* \
257 * If rm is denoting a register, no more instruction bytes. \
258 */ \
259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
260 { \
261 switch (pVCpu->iem.s.enmEffOpSize) \
262 { \
263 case IEMMODE_16BIT: \
264 IEM_MC_BEGIN(3, 0, 0, 0); \
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
267 IEM_MC_ARG(uint16_t, u16Src, 1); \
268 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
269 \
270 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
272 IEM_MC_REF_EFLAGS(pEFlags); \
273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
274 \
275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
276 IEM_MC_END(); \
277 break; \
278 \
279 case IEMMODE_32BIT: \
280 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
282 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
283 IEM_MC_ARG(uint32_t, u32Src, 1); \
284 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
285 \
286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
290 \
291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 break; \
295 \
296 case IEMMODE_64BIT: \
297 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
300 IEM_MC_ARG(uint64_t, u64Src, 1); \
301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
302 \
303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
305 IEM_MC_REF_EFLAGS(pEFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
307 \
308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
309 IEM_MC_END(); \
310 break; \
311 \
312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
313 } \
314 } \
315 else \
316 { \
317 /* \
318 * We're accessing memory. \
319 * Note! We're putting the eflags on the stack here so we can commit them \
320 * after the memory. \
321 */ \
322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
323 { \
324 switch (pVCpu->iem.s.enmEffOpSize) \
325 { \
326 case IEMMODE_16BIT: \
327 IEM_MC_BEGIN(3, 3, 0, 0); \
328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
329 IEM_MC_ARG(uint16_t, u16Src, 1); \
330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
333 \
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
335 IEMOP_HLP_DONE_DECODING(); \
336 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
338 IEM_MC_FETCH_EFLAGS(EFlags); \
339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
340 \
341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
342 IEM_MC_COMMIT_EFLAGS(EFlags); \
343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
344 IEM_MC_END(); \
345 break; \
346 \
347 case IEMMODE_32BIT: \
348 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_ARG(uint32_t, u32Src, 1); \
351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
354 \
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
356 IEMOP_HLP_DONE_DECODING(); \
357 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
359 IEM_MC_FETCH_EFLAGS(EFlags); \
360 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
361 \
362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
363 IEM_MC_COMMIT_EFLAGS(EFlags); \
364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
365 IEM_MC_END(); \
366 break; \
367 \
368 case IEMMODE_64BIT: \
369 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
371 IEM_MC_ARG(uint64_t, u64Src, 1); \
372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
375 \
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
377 IEMOP_HLP_DONE_DECODING(); \
378 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
380 IEM_MC_FETCH_EFLAGS(EFlags); \
381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
382 \
383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
384 IEM_MC_COMMIT_EFLAGS(EFlags); \
385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
386 IEM_MC_END(); \
387 break; \
388 \
389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
390 } \
391 } \
392 else \
393 { \
394 (void)0
395/* Separate macro to work around parsing issue in IEMAllInstPython.py */
396#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 } \
466 (void)0
467
468/**
469 * Body for read-only word/dword/qword instructions like TEST and CMP with
470 * memory/register as the destination.
471 */
472#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
474 \
475 /* \
476 * If rm is denoting a register, no more instruction bytes. \
477 */ \
478 if (IEM_IS_MODRM_REG_MODE(bRm)) \
479 { \
480 switch (pVCpu->iem.s.enmEffOpSize) \
481 { \
482 case IEMMODE_16BIT: \
483 IEM_MC_BEGIN(3, 0, 0, 0); \
484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
485 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
486 IEM_MC_ARG(uint16_t, u16Src, 1); \
487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
488 \
489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
491 IEM_MC_REF_EFLAGS(pEFlags); \
492 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
493 \
494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
495 IEM_MC_END(); \
496 break; \
497 \
498 case IEMMODE_32BIT: \
499 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
502 IEM_MC_ARG(uint32_t, u32Src, 1); \
503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
504 \
505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
507 IEM_MC_REF_EFLAGS(pEFlags); \
508 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
509 \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
518 IEM_MC_ARG(uint64_t, u64Src, 1); \
519 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
520 \
521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
522 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
523 IEM_MC_REF_EFLAGS(pEFlags); \
524 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
525 \
526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
527 IEM_MC_END(); \
528 break; \
529 \
530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
531 } \
532 } \
533 else \
534 { \
535 /* \
536 * We're accessing memory. \
537 * Note! We're putting the eflags on the stack here so we can commit them \
538 * after the memory. \
539 */ \
540 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
541 { \
542 switch (pVCpu->iem.s.enmEffOpSize) \
543 { \
544 case IEMMODE_16BIT: \
545 IEM_MC_BEGIN(3, 3, 0, 0); \
546 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
547 IEM_MC_ARG(uint16_t, u16Src, 1); \
548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
551 \
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
553 IEMOP_HLP_DONE_DECODING(); \
554 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
556 IEM_MC_FETCH_EFLAGS(EFlags); \
557 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
558 \
559 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
560 IEM_MC_COMMIT_EFLAGS(EFlags); \
561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
562 IEM_MC_END(); \
563 break; \
564 \
565 case IEMMODE_32BIT: \
566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
567 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
568 IEM_MC_ARG(uint32_t, u32Src, 1); \
569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
572 \
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
574 IEMOP_HLP_DONE_DECODING(); \
575 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
576 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_EFLAGS(EFlags); \
578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
579 \
580 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
581 IEM_MC_COMMIT_EFLAGS(EFlags); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 break; \
585 \
586 case IEMMODE_64BIT: \
587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
588 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
589 IEM_MC_ARG(uint64_t, u64Src, 1); \
590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
593 \
594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
595 IEMOP_HLP_DONE_DECODING(); \
596 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
598 IEM_MC_FETCH_EFLAGS(EFlags); \
599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
600 \
601 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
602 IEM_MC_COMMIT_EFLAGS(EFlags); \
603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
604 IEM_MC_END(); \
605 break; \
606 \
607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
608 } \
609 } \
610 else \
611 { \
612 IEMOP_HLP_DONE_DECODING(); \
613 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
614 } \
615 } \
616 (void)0
617
618
619/**
620 * Body for instructions like ADD, AND, OR, ++ with working on AL with
621 * a byte immediate.
622 */
623#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
625 \
626 IEM_MC_BEGIN(3, 0, 0, 0); \
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
630 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
631 \
632 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
633 IEM_MC_REF_EFLAGS(pEFlags); \
634 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
635 \
636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
637 IEM_MC_END()
638
639/**
640 * Body for instructions like ADD, AND, OR, ++ with working on
641 * AX/EAX/RAX with a word/dword immediate.
642 */
643#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
644 switch (pVCpu->iem.s.enmEffOpSize) \
645 { \
646 case IEMMODE_16BIT: \
647 { \
648 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
649 \
650 IEM_MC_BEGIN(3, 0, 0, 0); \
651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
653 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
655 \
656 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
657 IEM_MC_REF_EFLAGS(pEFlags); \
658 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
659 \
660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
661 IEM_MC_END(); \
662 } \
663 \
664 case IEMMODE_32BIT: \
665 { \
666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
667 \
668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
670 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
671 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
673 \
674 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
675 IEM_MC_REF_EFLAGS(pEFlags); \
676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
677 \
678 if (a_fModifiesDstReg) \
679 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
681 IEM_MC_END(); \
682 } \
683 \
684 case IEMMODE_64BIT: \
685 { \
686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
687 \
688 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
691 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
692 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
693 \
694 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
697 \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 } \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 (void)0
705
706
707
708/* Instruction specification format - work in progress: */
709
710/**
711 * @opcode 0x00
712 * @opmnemonic add
713 * @op1 rm:Eb
714 * @op2 reg:Gb
715 * @opmaps one
716 * @openc ModR/M
717 * @opflclass arithmetic
718 * @ophints harmless ignores_op_sizes
719 * @opstats add_Eb_Gb
720 * @opgroup og_gen_arith_bin
721 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
722 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
723 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
724 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
725 */
726FNIEMOP_DEF(iemOp_add_Eb_Gb)
727{
728 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
729 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked);
730}
731
732
733/**
734 * @opcode 0x01
735 * @opgroup og_gen_arith_bin
736 * @opflclass arithmetic
737 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
738 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
739 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
740 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
741 */
742FNIEMOP_DEF(iemOp_add_Ev_Gv)
743{
744 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
745 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
746 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
747}
748
749
750/**
751 * @opcode 0x02
752 * @opgroup og_gen_arith_bin
753 * @opflclass arithmetic
754 * @opcopytests iemOp_add_Eb_Gb
755 */
756FNIEMOP_DEF(iemOp_add_Gb_Eb)
757{
758 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
759 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
760}
761
762
763/**
764 * @opcode 0x03
765 * @opgroup og_gen_arith_bin
766 * @opflclass arithmetic
767 * @opcopytests iemOp_add_Ev_Gv
768 */
769FNIEMOP_DEF(iemOp_add_Gv_Ev)
770{
771 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
772 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
773}
774
775
776/**
777 * @opcode 0x04
778 * @opgroup og_gen_arith_bin
779 * @opflclass arithmetic
780 * @opcopytests iemOp_add_Eb_Gb
781 */
782FNIEMOP_DEF(iemOp_add_Al_Ib)
783{
784 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
785 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
786}
787
788
789/**
790 * @opcode 0x05
791 * @opgroup og_gen_arith_bin
792 * @opflclass arithmetic
793 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
794 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
795 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
796 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
797 */
798FNIEMOP_DEF(iemOp_add_eAX_Iz)
799{
800 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
801 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
802}
803
804
805/**
806 * @opcode 0x06
807 * @opgroup og_stack_sreg
808 */
809FNIEMOP_DEF(iemOp_push_ES)
810{
811 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
812 IEMOP_HLP_NO_64BIT();
813 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
814}
815
816
817/**
818 * @opcode 0x07
819 * @opgroup og_stack_sreg
820 */
821FNIEMOP_DEF(iemOp_pop_ES)
822{
823 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
824 IEMOP_HLP_NO_64BIT();
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
828 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
829 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
830 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
831 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
832 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
833}
834
835
836/**
837 * @opcode 0x08
838 * @opgroup og_gen_arith_bin
839 * @opflclass logical
840 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
841 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
842 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
843 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
844 */
845FNIEMOP_DEF(iemOp_or_Eb_Gb)
846{
847 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
849 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked);
850}
851
852
853/*
854 * @opcode 0x09
855 * @opgroup og_gen_arith_bin
856 * @opflclass logical
857 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
858 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
859 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
860 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
861 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
862 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
863 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
864 */
865FNIEMOP_DEF(iemOp_or_Ev_Gv)
866{
867 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
868 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
869 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
870 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
871}
872
873
874/**
875 * @opcode 0x0a
876 * @opgroup og_gen_arith_bin
877 * @opflclass logical
878 * @opcopytests iemOp_or_Eb_Gb
879 */
880FNIEMOP_DEF(iemOp_or_Gb_Eb)
881{
882 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
884 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
885}
886
887
888/**
889 * @opcode 0x0b
890 * @opgroup og_gen_arith_bin
891 * @opflclass logical
892 * @opcopytests iemOp_or_Ev_Gv
893 */
894FNIEMOP_DEF(iemOp_or_Gv_Ev)
895{
896 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
897 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
898 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
899}
900
901
902/**
903 * @opcode 0x0c
904 * @opgroup og_gen_arith_bin
905 * @opflclass logical
906 * @opcopytests iemOp_or_Eb_Gb
907 */
908FNIEMOP_DEF(iemOp_or_Al_Ib)
909{
910 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
911 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
912 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
913}
914
915
916/**
917 * @opcode 0x0d
918 * @opgroup og_gen_arith_bin
919 * @opflclass logical
920 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
921 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
922 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
923 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
924 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
925 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
926 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
927 */
928FNIEMOP_DEF(iemOp_or_eAX_Iz)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
933}
934
935
936/**
937 * @opcode 0x0e
938 * @opgroup og_stack_sreg
939 */
940FNIEMOP_DEF(iemOp_push_CS)
941{
942 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
943 IEMOP_HLP_NO_64BIT();
944 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
945}
946
947
948/**
949 * @opcode 0x0f
950 * @opmnemonic EscTwo0f
951 * @openc two0f
952 * @opdisenum OP_2B_ESC
953 * @ophints harmless
954 * @opgroup og_escapes
955 */
956FNIEMOP_DEF(iemOp_2byteEscape)
957{
958#if 0 /// @todo def VBOX_STRICT
959 /* Sanity check the table the first time around. */
960 static bool s_fTested = false;
961 if (RT_LIKELY(s_fTested)) { /* likely */ }
962 else
963 {
964 s_fTested = true;
965 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
966 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
967 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
968 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
969 }
970#endif
971
972 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
973 {
974 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
975 IEMOP_HLP_MIN_286();
976 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
977 }
978 /* @opdone */
979
980 /*
981 * On the 8086 this is a POP CS instruction.
982 * For the time being we don't specify this this.
983 */
984 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
985 IEMOP_HLP_NO_64BIT();
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
987 /** @todo eliminate END_TB here */
988 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
989 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
990 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
991 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
992}
993
994/**
995 * @opcode 0x10
996 * @opgroup og_gen_arith_bin
997 * @opflclass arithmetic_carry
998 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
999 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1000 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1001 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1002 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1003 */
1004FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1005{
1006 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1007 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked);
1008}
1009
1010
1011/**
1012 * @opcode 0x11
1013 * @opgroup og_gen_arith_bin
1014 * @opflclass arithmetic_carry
1015 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1016 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1017 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1018 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1019 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1020 */
1021FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1022{
1023 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1024 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1025 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1026}
1027
1028
1029/**
1030 * @opcode 0x12
1031 * @opgroup og_gen_arith_bin
1032 * @opflclass arithmetic_carry
1033 * @opcopytests iemOp_adc_Eb_Gb
1034 */
1035FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1036{
1037 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1038 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1039}
1040
1041
1042/**
1043 * @opcode 0x13
1044 * @opgroup og_gen_arith_bin
1045 * @opflclass arithmetic_carry
1046 * @opcopytests iemOp_adc_Ev_Gv
1047 */
1048FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1049{
1050 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1051 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1052}
1053
1054
1055/**
1056 * @opcode 0x14
1057 * @opgroup og_gen_arith_bin
1058 * @opflclass arithmetic_carry
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Al_Ib)
1062{
1063 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x15
1070 * @opgroup og_gen_arith_bin
1071 * @opflclass arithmetic_carry
1072 * @opcopytests iemOp_adc_Ev_Gv
1073 */
1074FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1075{
1076 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1077 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1078}
1079
1080
1081/**
1082 * @opcode 0x16
1083 */
1084FNIEMOP_DEF(iemOp_push_SS)
1085{
1086 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1087 IEMOP_HLP_NO_64BIT();
1088 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1089}
1090
1091
1092/**
1093 * @opcode 0x17
1094 */
1095FNIEMOP_DEF(iemOp_pop_SS)
1096{
1097 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1099 IEMOP_HLP_NO_64BIT();
1100 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1102 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1103 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1104 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1105 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1106 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1107}
1108
1109
1110/**
1111 * @opcode 0x18
1112 * @opgroup og_gen_arith_bin
1113 * @opflclass arithmetic_carry
1114 */
1115FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1116{
1117 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1118 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked);
1119}
1120
1121
1122/**
1123 * @opcode 0x19
1124 * @opgroup og_gen_arith_bin
1125 * @opflclass arithmetic_carry
1126 */
1127FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1128{
1129 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1130 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1131 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1132}
1133
1134
1135/**
1136 * @opcode 0x1a
1137 * @opgroup og_gen_arith_bin
1138 * @opflclass arithmetic_carry
1139 */
1140FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1141{
1142 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1143 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1144}
1145
1146
1147/**
1148 * @opcode 0x1b
1149 * @opgroup og_gen_arith_bin
1150 * @opflclass arithmetic_carry
1151 */
1152FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1153{
1154 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1155 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1156}
1157
1158
1159/**
1160 * @opcode 0x1c
1161 * @opgroup og_gen_arith_bin
1162 * @opflclass arithmetic_carry
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1165{
1166 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1d
1173 * @opgroup og_gen_arith_bin
1174 * @opflclass arithmetic_carry
1175 */
1176FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1177{
1178 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1e
1185 * @opgroup og_stack_sreg
1186 */
1187FNIEMOP_DEF(iemOp_push_DS)
1188{
1189 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1190 IEMOP_HLP_NO_64BIT();
1191 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1192}
1193
1194
1195/**
1196 * @opcode 0x1f
1197 * @opgroup og_stack_sreg
1198 */
1199FNIEMOP_DEF(iemOp_pop_DS)
1200{
1201 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1203 IEMOP_HLP_NO_64BIT();
1204 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1206 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1207 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1208 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1209 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1210 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1211}
1212
1213
1214/**
1215 * @opcode 0x20
1216 * @opgroup og_gen_arith_bin
1217 * @opflclass logical
1218 */
1219FNIEMOP_DEF(iemOp_and_Eb_Gb)
1220{
1221 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1222 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1223 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked);
1224}
1225
1226
1227/**
1228 * @opcode 0x21
1229 * @opgroup og_gen_arith_bin
1230 * @opflclass logical
1231 */
1232FNIEMOP_DEF(iemOp_and_Ev_Gv)
1233{
1234 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1236 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1237 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1238}
1239
1240
1241/**
1242 * @opcode 0x22
1243 * @opgroup og_gen_arith_bin
1244 * @opflclass logical
1245 */
1246FNIEMOP_DEF(iemOp_and_Gb_Eb)
1247{
1248 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1249 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1250 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1251}
1252
1253
1254/**
1255 * @opcode 0x23
1256 * @opgroup og_gen_arith_bin
1257 * @opflclass logical
1258 */
1259FNIEMOP_DEF(iemOp_and_Gv_Ev)
1260{
1261 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1263 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1264}
1265
1266
1267/**
1268 * @opcode 0x24
1269 * @opgroup og_gen_arith_bin
1270 * @opflclass logical
1271 */
1272FNIEMOP_DEF(iemOp_and_Al_Ib)
1273{
1274 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1276 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1277}
1278
1279
1280/**
1281 * @opcode 0x25
1282 * @opgroup og_gen_arith_bin
1283 * @opflclass logical
1284 */
1285FNIEMOP_DEF(iemOp_and_eAX_Iz)
1286{
1287 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1289 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1290}
1291
1292
1293/**
1294 * @opcode 0x26
1295 * @opmnemonic SEG
1296 * @op1 ES
1297 * @opgroup og_prefix
1298 * @openc prefix
1299 * @opdisenum OP_SEG
1300 * @ophints harmless
1301 */
1302FNIEMOP_DEF(iemOp_seg_ES)
1303{
1304 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1305 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1306 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1307
1308 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1309 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1310}
1311
1312
1313/**
1314 * @opcode 0x27
1315 * @opfltest af,cf
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef of
1318 */
1319FNIEMOP_DEF(iemOp_daa)
1320{
1321 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1322 IEMOP_HLP_NO_64BIT();
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1326}
1327
1328
1329/**
1330 * @opcode 0x28
1331 * @opgroup og_gen_arith_bin
1332 * @opflclass arithmetic
1333 */
1334FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1335{
1336 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1337 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked);
1338}
1339
1340
1341/**
1342 * @opcode 0x29
1343 * @opgroup og_gen_arith_bin
1344 * @opflclass arithmetic
1345 */
1346FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1347{
1348 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1349 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1350 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1351}
1352
1353
1354/**
1355 * @opcode 0x2a
1356 * @opgroup og_gen_arith_bin
1357 * @opflclass arithmetic
1358 */
1359FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1360{
1361 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1362 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1363}
1364
1365
1366/**
1367 * @opcode 0x2b
1368 * @opgroup og_gen_arith_bin
1369 * @opflclass arithmetic
1370 */
1371FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1372{
1373 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1374 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1375}
1376
1377
1378/**
1379 * @opcode 0x2c
1380 * @opgroup og_gen_arith_bin
1381 * @opflclass arithmetic
1382 */
1383FNIEMOP_DEF(iemOp_sub_Al_Ib)
1384{
1385 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1386 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1387}
1388
1389
1390/**
1391 * @opcode 0x2d
1392 * @opgroup og_gen_arith_bin
1393 * @opflclass arithmetic
1394 */
1395FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1396{
1397 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1398 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1399}
1400
1401
1402/**
1403 * @opcode 0x2e
1404 * @opmnemonic SEG
1405 * @op1 CS
1406 * @opgroup og_prefix
1407 * @openc prefix
1408 * @opdisenum OP_SEG
1409 * @ophints harmless
1410 */
1411FNIEMOP_DEF(iemOp_seg_CS)
1412{
1413 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1414 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1415 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1416
1417 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1418 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1419}
1420
1421
1422/**
1423 * @opcode 0x2f
1424 * @opfltest af,cf
1425 * @opflmodify cf,pf,af,zf,sf,of
1426 * @opflundef of
1427 */
1428FNIEMOP_DEF(iemOp_das)
1429{
1430 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1431 IEMOP_HLP_NO_64BIT();
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1434 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1435}
1436
1437
1438/**
1439 * @opcode 0x30
1440 * @opgroup og_gen_arith_bin
1441 * @opflclass logical
1442 */
1443FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1444{
1445 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1446 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1447 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked);
1448}
1449
1450
1451/**
1452 * @opcode 0x31
1453 * @opgroup og_gen_arith_bin
1454 * @opflclass logical
1455 */
1456FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1457{
1458 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1460 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1461 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1462}
1463
1464
1465/**
1466 * @opcode 0x32
1467 * @opgroup og_gen_arith_bin
1468 * @opflclass logical
1469 */
1470FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1471{
1472 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1474 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1475}
1476
1477
1478/**
1479 * @opcode 0x33
1480 * @opgroup og_gen_arith_bin
1481 * @opflclass logical
1482 */
1483FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1484{
1485 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1487 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1488}
1489
1490
1491/**
1492 * @opcode 0x34
1493 * @opgroup og_gen_arith_bin
1494 * @opflclass logical
1495 */
1496FNIEMOP_DEF(iemOp_xor_Al_Ib)
1497{
1498 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1501}
1502
1503
1504/**
1505 * @opcode 0x35
1506 * @opgroup og_gen_arith_bin
1507 * @opflclass logical
1508 */
1509FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1510{
1511 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1513 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1514}
1515
1516
1517/**
1518 * @opcode 0x36
1519 * @opmnemonic SEG
1520 * @op1 SS
1521 * @opgroup og_prefix
1522 * @openc prefix
1523 * @opdisenum OP_SEG
1524 * @ophints harmless
1525 */
1526FNIEMOP_DEF(iemOp_seg_SS)
1527{
1528 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1529 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1530 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1531
1532 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1533 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1534}
1535
1536
1537/**
1538 * @opcode 0x37
1539 * @opfltest af
1540 * @opflmodify cf,pf,af,zf,sf,of
1541 * @opflundef pf,zf,sf,of
1542 * @opgroup og_gen_arith_dec
1543 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1544 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1545 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1546 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1547 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1548 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1549 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1550 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1551 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1552 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1553 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1554 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1555 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1556 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1557 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1558 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1559 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1560 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1561 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1562 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1563 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1564 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1565 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1566 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1567 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1568 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1569 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1570 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1571 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1572 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1573 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1574 */
1575FNIEMOP_DEF(iemOp_aaa)
1576{
1577 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1578 IEMOP_HLP_NO_64BIT();
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1581
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1583}
1584
1585
1586/**
1587 * @opcode 0x38
1588 * @opflclass arithmetic
1589 */
1590FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1591{
1592 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1593 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1594}
1595
1596
1597/**
1598 * @opcode 0x39
1599 * @opflclass arithmetic
1600 */
1601FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1602{
1603 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1604 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1605}
1606
1607
1608/**
1609 * @opcode 0x3a
1610 * @opflclass arithmetic
1611 */
1612FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1613{
1614 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1615 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1616}
1617
1618
1619/**
1620 * @opcode 0x3b
1621 * @opflclass arithmetic
1622 */
1623FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1624{
1625 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1626 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1627}
1628
1629
1630/**
1631 * @opcode 0x3c
1632 * @opflclass arithmetic
1633 */
1634FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1635{
1636 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1637 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1638}
1639
1640
1641/**
1642 * @opcode 0x3d
1643 * @opflclass arithmetic
1644 */
1645FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1646{
1647 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1648 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1649}
1650
1651
1652/**
1653 * @opcode 0x3e
1654 */
1655FNIEMOP_DEF(iemOp_seg_DS)
1656{
1657 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1658 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1659 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1660
1661 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1662 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1663}
1664
1665
1666/**
1667 * @opcode 0x3f
1668 * @opfltest af
1669 * @opflmodify cf,pf,af,zf,sf,of
1670 * @opflundef pf,zf,sf,of
1671 * @opgroup og_gen_arith_dec
1672 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1673 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1674 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1675 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1676 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1677 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1678 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1679 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1680 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1681 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1682 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1683 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1684 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1685 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1686 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1687 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1688 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1689 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1690 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1691 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1692 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1693 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1694 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1695 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1696 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1697 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1698 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1699 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1700 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1701 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1702 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1703 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1704 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1705 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1706 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1707 */
1708FNIEMOP_DEF(iemOp_aas)
1709{
1710 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1711 IEMOP_HLP_NO_64BIT();
1712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1713 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1714
1715 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1716}
1717
1718
1719/**
1720 * Common 'inc/dec register' helper.
1721 *
1722 * Not for 64-bit code, only for what became the rex prefixes.
1723 */
1724#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1725 switch (pVCpu->iem.s.enmEffOpSize) \
1726 { \
1727 case IEMMODE_16BIT: \
1728 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1730 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1731 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1732 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1733 IEM_MC_REF_EFLAGS(pEFlags); \
1734 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1736 IEM_MC_END(); \
1737 break; \
1738 \
1739 case IEMMODE_32BIT: \
1740 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1742 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1743 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1744 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1745 IEM_MC_REF_EFLAGS(pEFlags); \
1746 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1747 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1748 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1749 IEM_MC_END(); \
1750 break; \
1751 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1752 } \
1753 (void)0
1754
1755/**
1756 * @opcode 0x40
1757 * @opflclass incdec
1758 */
1759FNIEMOP_DEF(iemOp_inc_eAX)
1760{
1761 /*
1762 * This is a REX prefix in 64-bit mode.
1763 */
1764 if (IEM_IS_64BIT_CODE(pVCpu))
1765 {
1766 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1767 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1768
1769 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1770 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1771 }
1772
1773 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1774 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1775}
1776
1777
1778/**
1779 * @opcode 0x41
1780 * @opflclass incdec
1781 */
1782FNIEMOP_DEF(iemOp_inc_eCX)
1783{
1784 /*
1785 * This is a REX prefix in 64-bit mode.
1786 */
1787 if (IEM_IS_64BIT_CODE(pVCpu))
1788 {
1789 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1790 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1791 pVCpu->iem.s.uRexB = 1 << 3;
1792
1793 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1794 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1795 }
1796
1797 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1798 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1799}
1800
1801
1802/**
1803 * @opcode 0x42
1804 * @opflclass incdec
1805 */
1806FNIEMOP_DEF(iemOp_inc_eDX)
1807{
1808 /*
1809 * This is a REX prefix in 64-bit mode.
1810 */
1811 if (IEM_IS_64BIT_CODE(pVCpu))
1812 {
1813 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1814 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1815 pVCpu->iem.s.uRexIndex = 1 << 3;
1816
1817 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1818 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1819 }
1820
1821 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1822 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1823}
1824
1825
1826
1827/**
1828 * @opcode 0x43
1829 * @opflclass incdec
1830 */
1831FNIEMOP_DEF(iemOp_inc_eBX)
1832{
1833 /*
1834 * This is a REX prefix in 64-bit mode.
1835 */
1836 if (IEM_IS_64BIT_CODE(pVCpu))
1837 {
1838 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1839 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1840 pVCpu->iem.s.uRexB = 1 << 3;
1841 pVCpu->iem.s.uRexIndex = 1 << 3;
1842
1843 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1844 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1845 }
1846
1847 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1848 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1849}
1850
1851
1852/**
1853 * @opcode 0x44
1854 * @opflclass incdec
1855 */
1856FNIEMOP_DEF(iemOp_inc_eSP)
1857{
1858 /*
1859 * This is a REX prefix in 64-bit mode.
1860 */
1861 if (IEM_IS_64BIT_CODE(pVCpu))
1862 {
1863 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1864 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1865 pVCpu->iem.s.uRexReg = 1 << 3;
1866
1867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1869 }
1870
1871 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1872 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1873}
1874
1875
1876/**
1877 * @opcode 0x45
1878 * @opflclass incdec
1879 */
1880FNIEMOP_DEF(iemOp_inc_eBP)
1881{
1882 /*
1883 * This is a REX prefix in 64-bit mode.
1884 */
1885 if (IEM_IS_64BIT_CODE(pVCpu))
1886 {
1887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1888 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1889 pVCpu->iem.s.uRexReg = 1 << 3;
1890 pVCpu->iem.s.uRexB = 1 << 3;
1891
1892 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1893 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1894 }
1895
1896 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1897 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1898}
1899
1900
1901/**
1902 * @opcode 0x46
1903 * @opflclass incdec
1904 */
1905FNIEMOP_DEF(iemOp_inc_eSI)
1906{
1907 /*
1908 * This is a REX prefix in 64-bit mode.
1909 */
1910 if (IEM_IS_64BIT_CODE(pVCpu))
1911 {
1912 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1913 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1914 pVCpu->iem.s.uRexReg = 1 << 3;
1915 pVCpu->iem.s.uRexIndex = 1 << 3;
1916
1917 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1918 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1919 }
1920
1921 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1922 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1923}
1924
1925
1926/**
1927 * @opcode 0x47
1928 * @opflclass incdec
1929 */
1930FNIEMOP_DEF(iemOp_inc_eDI)
1931{
1932 /*
1933 * This is a REX prefix in 64-bit mode.
1934 */
1935 if (IEM_IS_64BIT_CODE(pVCpu))
1936 {
1937 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1938 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1939 pVCpu->iem.s.uRexReg = 1 << 3;
1940 pVCpu->iem.s.uRexB = 1 << 3;
1941 pVCpu->iem.s.uRexIndex = 1 << 3;
1942
1943 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1944 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1945 }
1946
1947 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1948 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1949}
1950
1951
1952/**
1953 * @opcode 0x48
1954 * @opflclass incdec
1955 */
1956FNIEMOP_DEF(iemOp_dec_eAX)
1957{
1958 /*
1959 * This is a REX prefix in 64-bit mode.
1960 */
1961 if (IEM_IS_64BIT_CODE(pVCpu))
1962 {
1963 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1964 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1965 iemRecalEffOpSize(pVCpu);
1966
1967 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1968 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1969 }
1970
1971 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1972 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1973}
1974
1975
1976/**
1977 * @opcode 0x49
1978 * @opflclass incdec
1979 */
1980FNIEMOP_DEF(iemOp_dec_eCX)
1981{
1982 /*
1983 * This is a REX prefix in 64-bit mode.
1984 */
1985 if (IEM_IS_64BIT_CODE(pVCpu))
1986 {
1987 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1988 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1989 pVCpu->iem.s.uRexB = 1 << 3;
1990 iemRecalEffOpSize(pVCpu);
1991
1992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1993 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1994 }
1995
1996 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1997 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1998}
1999
2000
2001/**
2002 * @opcode 0x4a
2003 * @opflclass incdec
2004 */
2005FNIEMOP_DEF(iemOp_dec_eDX)
2006{
2007 /*
2008 * This is a REX prefix in 64-bit mode.
2009 */
2010 if (IEM_IS_64BIT_CODE(pVCpu))
2011 {
2012 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2013 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2014 pVCpu->iem.s.uRexIndex = 1 << 3;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2023}
2024
2025
2026/**
2027 * @opcode 0x4b
2028 * @opflclass incdec
2029 */
2030FNIEMOP_DEF(iemOp_dec_eBX)
2031{
2032 /*
2033 * This is a REX prefix in 64-bit mode.
2034 */
2035 if (IEM_IS_64BIT_CODE(pVCpu))
2036 {
2037 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2038 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2039 pVCpu->iem.s.uRexB = 1 << 3;
2040 pVCpu->iem.s.uRexIndex = 1 << 3;
2041 iemRecalEffOpSize(pVCpu);
2042
2043 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2044 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2045 }
2046
2047 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2048 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2049}
2050
2051
2052/**
2053 * @opcode 0x4c
2054 * @opflclass incdec
2055 */
2056FNIEMOP_DEF(iemOp_dec_eSP)
2057{
2058 /*
2059 * This is a REX prefix in 64-bit mode.
2060 */
2061 if (IEM_IS_64BIT_CODE(pVCpu))
2062 {
2063 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2064 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2065 pVCpu->iem.s.uRexReg = 1 << 3;
2066 iemRecalEffOpSize(pVCpu);
2067
2068 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2069 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2070 }
2071
2072 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2073 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2074}
2075
2076
2077/**
2078 * @opcode 0x4d
2079 * @opflclass incdec
2080 */
2081FNIEMOP_DEF(iemOp_dec_eBP)
2082{
2083 /*
2084 * This is a REX prefix in 64-bit mode.
2085 */
2086 if (IEM_IS_64BIT_CODE(pVCpu))
2087 {
2088 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2089 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2090 pVCpu->iem.s.uRexReg = 1 << 3;
2091 pVCpu->iem.s.uRexB = 1 << 3;
2092 iemRecalEffOpSize(pVCpu);
2093
2094 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2095 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2096 }
2097
2098 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2099 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2100}
2101
2102
2103/**
2104 * @opcode 0x4e
2105 * @opflclass incdec
2106 */
2107FNIEMOP_DEF(iemOp_dec_eSI)
2108{
2109 /*
2110 * This is a REX prefix in 64-bit mode.
2111 */
2112 if (IEM_IS_64BIT_CODE(pVCpu))
2113 {
2114 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2116 pVCpu->iem.s.uRexReg = 1 << 3;
2117 pVCpu->iem.s.uRexIndex = 1 << 3;
2118 iemRecalEffOpSize(pVCpu);
2119
2120 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2121 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2122 }
2123
2124 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2125 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2126}
2127
2128
2129/**
2130 * @opcode 0x4f
2131 * @opflclass incdec
2132 */
2133FNIEMOP_DEF(iemOp_dec_eDI)
2134{
2135 /*
2136 * This is a REX prefix in 64-bit mode.
2137 */
2138 if (IEM_IS_64BIT_CODE(pVCpu))
2139 {
2140 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2141 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2142 pVCpu->iem.s.uRexReg = 1 << 3;
2143 pVCpu->iem.s.uRexB = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2153}
2154
2155
2156/**
2157 * Common 'push register' helper.
2158 */
2159FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2160{
2161 if (IEM_IS_64BIT_CODE(pVCpu))
2162 {
2163 iReg |= pVCpu->iem.s.uRexB;
2164 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2165 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2166 }
2167
2168 switch (pVCpu->iem.s.enmEffOpSize)
2169 {
2170 case IEMMODE_16BIT:
2171 IEM_MC_BEGIN(0, 1, 0, 0);
2172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2173 IEM_MC_LOCAL(uint16_t, u16Value);
2174 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2175 IEM_MC_PUSH_U16(u16Value);
2176 IEM_MC_ADVANCE_RIP_AND_FINISH();
2177 IEM_MC_END();
2178 break;
2179
2180 case IEMMODE_32BIT:
2181 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2183 IEM_MC_LOCAL(uint32_t, u32Value);
2184 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2185 IEM_MC_PUSH_U32(u32Value);
2186 IEM_MC_ADVANCE_RIP_AND_FINISH();
2187 IEM_MC_END();
2188 break;
2189
2190 case IEMMODE_64BIT:
2191 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2193 IEM_MC_LOCAL(uint64_t, u64Value);
2194 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2195 IEM_MC_PUSH_U64(u64Value);
2196 IEM_MC_ADVANCE_RIP_AND_FINISH();
2197 IEM_MC_END();
2198 break;
2199
2200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2201 }
2202}
2203
2204
2205/**
2206 * @opcode 0x50
2207 */
2208FNIEMOP_DEF(iemOp_push_eAX)
2209{
2210 IEMOP_MNEMONIC(push_rAX, "push rAX");
2211 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2212}
2213
2214
2215/**
2216 * @opcode 0x51
2217 */
2218FNIEMOP_DEF(iemOp_push_eCX)
2219{
2220 IEMOP_MNEMONIC(push_rCX, "push rCX");
2221 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2222}
2223
2224
2225/**
2226 * @opcode 0x52
2227 */
2228FNIEMOP_DEF(iemOp_push_eDX)
2229{
2230 IEMOP_MNEMONIC(push_rDX, "push rDX");
2231 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2232}
2233
2234
2235/**
2236 * @opcode 0x53
2237 */
2238FNIEMOP_DEF(iemOp_push_eBX)
2239{
2240 IEMOP_MNEMONIC(push_rBX, "push rBX");
2241 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2242}
2243
2244
2245/**
2246 * @opcode 0x54
2247 */
2248FNIEMOP_DEF(iemOp_push_eSP)
2249{
2250 IEMOP_MNEMONIC(push_rSP, "push rSP");
2251 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2252 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2253
2254 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2255 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2257 IEM_MC_LOCAL(uint16_t, u16Value);
2258 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2259 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2260 IEM_MC_PUSH_U16(u16Value);
2261 IEM_MC_ADVANCE_RIP_AND_FINISH();
2262 IEM_MC_END();
2263}
2264
2265
2266/**
2267 * @opcode 0x55
2268 */
2269FNIEMOP_DEF(iemOp_push_eBP)
2270{
2271 IEMOP_MNEMONIC(push_rBP, "push rBP");
2272 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2273}
2274
2275
2276/**
2277 * @opcode 0x56
2278 */
2279FNIEMOP_DEF(iemOp_push_eSI)
2280{
2281 IEMOP_MNEMONIC(push_rSI, "push rSI");
2282 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2283}
2284
2285
2286/**
2287 * @opcode 0x57
2288 */
2289FNIEMOP_DEF(iemOp_push_eDI)
2290{
2291 IEMOP_MNEMONIC(push_rDI, "push rDI");
2292 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2293}
2294
2295
2296/**
2297 * Common 'pop register' helper.
2298 */
2299FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2300{
2301 if (IEM_IS_64BIT_CODE(pVCpu))
2302 {
2303 iReg |= pVCpu->iem.s.uRexB;
2304 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2305 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2306 }
2307
2308 switch (pVCpu->iem.s.enmEffOpSize)
2309 {
2310 case IEMMODE_16BIT:
2311 IEM_MC_BEGIN(0, 0, 0, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_POP_GREG_U16(iReg);
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 break;
2317
2318 case IEMMODE_32BIT:
2319 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_POP_GREG_U32(iReg);
2322 IEM_MC_ADVANCE_RIP_AND_FINISH();
2323 IEM_MC_END();
2324 break;
2325
2326 case IEMMODE_64BIT:
2327 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2329 IEM_MC_POP_GREG_U64(iReg);
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 break;
2333
2334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2335 }
2336}
2337
2338
2339/**
2340 * @opcode 0x58
2341 */
2342FNIEMOP_DEF(iemOp_pop_eAX)
2343{
2344 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2345 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2346}
2347
2348
2349/**
2350 * @opcode 0x59
2351 */
2352FNIEMOP_DEF(iemOp_pop_eCX)
2353{
2354 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2355 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2356}
2357
2358
2359/**
2360 * @opcode 0x5a
2361 */
2362FNIEMOP_DEF(iemOp_pop_eDX)
2363{
2364 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2365 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2366}
2367
2368
2369/**
2370 * @opcode 0x5b
2371 */
2372FNIEMOP_DEF(iemOp_pop_eBX)
2373{
2374 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2375 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2376}
2377
2378
2379/**
2380 * @opcode 0x5c
2381 */
2382FNIEMOP_DEF(iemOp_pop_eSP)
2383{
2384 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2385 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2386}
2387
2388
2389/**
2390 * @opcode 0x5d
2391 */
2392FNIEMOP_DEF(iemOp_pop_eBP)
2393{
2394 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2396}
2397
2398
2399/**
2400 * @opcode 0x5e
2401 */
2402FNIEMOP_DEF(iemOp_pop_eSI)
2403{
2404 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2406}
2407
2408
2409/**
2410 * @opcode 0x5f
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDI)
2413{
2414 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2416}
2417
2418
2419/**
2420 * @opcode 0x60
2421 */
2422FNIEMOP_DEF(iemOp_pusha)
2423{
2424 IEMOP_MNEMONIC(pusha, "pusha");
2425 IEMOP_HLP_MIN_186();
2426 IEMOP_HLP_NO_64BIT();
2427 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2428 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2429 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2430 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2431}
2432
2433
2434/**
2435 * @opcode 0x61
2436 */
2437FNIEMOP_DEF(iemOp_popa__mvex)
2438{
2439 if (!IEM_IS_64BIT_CODE(pVCpu))
2440 {
2441 IEMOP_MNEMONIC(popa, "popa");
2442 IEMOP_HLP_MIN_186();
2443 IEMOP_HLP_NO_64BIT();
2444 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2445 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2448 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2449 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2451 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2452 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2454 iemCImpl_popa_16);
2455 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2456 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2460 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2464 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2465 iemCImpl_popa_32);
2466 }
2467 IEMOP_MNEMONIC(mvex, "mvex");
2468 Log(("mvex prefix is not supported!\n"));
2469 IEMOP_RAISE_INVALID_OPCODE_RET();
2470}
2471
2472
2473/**
2474 * @opcode 0x62
2475 * @opmnemonic bound
2476 * @op1 Gv_RO
2477 * @op2 Ma
2478 * @opmincpu 80186
2479 * @ophints harmless x86_invalid_64
2480 * @optest op1=0 op2=0 ->
2481 * @optest op1=1 op2=0 -> value.xcpt=5
2482 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2483 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2484 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2485 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2486 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2487 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2488 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2489 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2490 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2491 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2492 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2493 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2494 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2495 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2496 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2497 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2498 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2499 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2500 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2501 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2502 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2503 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2504 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2505 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2506 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2507 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2508 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2509 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2510 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2511 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2512 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2513 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2514 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2515 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2516 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2517 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2518 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2519 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2520 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2521 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2522 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2523 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2524 */
2525FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2526{
2527 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2528 compatability mode it is invalid with MOD=3.
2529
2530 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2531 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2532 given as R and X without an exact description, so we assume it builds on
2533 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2534 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2535 uint8_t bRm;
2536 if (!IEM_IS_64BIT_CODE(pVCpu))
2537 {
2538 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2539 IEMOP_HLP_MIN_186();
2540 IEM_OPCODE_GET_NEXT_U8(&bRm);
2541 if (IEM_IS_MODRM_MEM_MODE(bRm))
2542 {
2543 /** @todo testcase: check that there are two memory accesses involved. Check
2544 * whether they're both read before the \#BR triggers. */
2545 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2546 {
2547 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2548 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2549 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2550 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2552
2553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555
2556 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2557 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2558 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2559
2560 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2561 IEM_MC_END();
2562 }
2563 else /* 32-bit operands */
2564 {
2565 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2566 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2567 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2568 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2570
2571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573
2574 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2575 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2577
2578 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2579 IEM_MC_END();
2580 }
2581 }
2582
2583 /*
2584 * @opdone
2585 */
2586 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2587 {
2588 /* Note that there is no need for the CPU to fetch further bytes
2589 here because MODRM.MOD == 3. */
2590 Log(("evex not supported by the guest CPU!\n"));
2591 IEMOP_RAISE_INVALID_OPCODE_RET();
2592 }
2593 }
2594 else
2595 {
2596 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2597 * does modr/m read, whereas AMD probably doesn't... */
2598 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2599 {
2600 Log(("evex not supported by the guest CPU!\n"));
2601 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2602 }
2603 IEM_OPCODE_GET_NEXT_U8(&bRm);
2604 }
2605
2606 IEMOP_MNEMONIC(evex, "evex");
2607 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2608 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2609 Log(("evex prefix is not implemented!\n"));
2610 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2611}
2612
2613
2614/**
2615 * @opcode 0x63
2616 * @opflmodify zf
2617 * @note non-64-bit modes.
2618 */
2619FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2620{
2621 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2622 IEMOP_HLP_MIN_286();
2623 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2625
2626 if (IEM_IS_MODRM_REG_MODE(bRm))
2627 {
2628 /* Register */
2629 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2630 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2631 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2632 IEM_MC_ARG(uint16_t, u16Src, 1);
2633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2634
2635 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2637 IEM_MC_REF_EFLAGS(pEFlags);
2638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2639
2640 IEM_MC_ADVANCE_RIP_AND_FINISH();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /* Memory */
2646 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2648 IEM_MC_ARG(uint16_t, u16Src, 1);
2649 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2651 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2652
2653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2654 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2655 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2656 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2657 IEM_MC_FETCH_EFLAGS(EFlags);
2658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2659
2660 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2661 IEM_MC_COMMIT_EFLAGS(EFlags);
2662 IEM_MC_ADVANCE_RIP_AND_FINISH();
2663 IEM_MC_END();
2664 }
2665}
2666
2667
2668/**
2669 * @opcode 0x63
2670 *
2671 * @note This is a weird one. It works like a regular move instruction if
2672 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2673 * @todo This definitely needs a testcase to verify the odd cases. */
2674FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2675{
2676 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2677
2678 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680
2681 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2682 {
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /*
2686 * Register to register.
2687 */
2688 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2690 IEM_MC_LOCAL(uint64_t, u64Value);
2691 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2692 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2693 IEM_MC_ADVANCE_RIP_AND_FINISH();
2694 IEM_MC_END();
2695 }
2696 else
2697 {
2698 /*
2699 * We're loading a register from memory.
2700 */
2701 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2702 IEM_MC_LOCAL(uint64_t, u64Value);
2703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2706 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2707 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2708 IEM_MC_ADVANCE_RIP_AND_FINISH();
2709 IEM_MC_END();
2710 }
2711 }
2712 else
2713 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2714}
2715
2716
2717/**
2718 * @opcode 0x64
2719 * @opmnemonic segfs
2720 * @opmincpu 80386
2721 * @opgroup og_prefixes
2722 */
2723FNIEMOP_DEF(iemOp_seg_FS)
2724{
2725 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2726 IEMOP_HLP_MIN_386();
2727
2728 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2729 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2730
2731 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2732 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2733}
2734
2735
2736/**
2737 * @opcode 0x65
2738 * @opmnemonic seggs
2739 * @opmincpu 80386
2740 * @opgroup og_prefixes
2741 */
2742FNIEMOP_DEF(iemOp_seg_GS)
2743{
2744 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2745 IEMOP_HLP_MIN_386();
2746
2747 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2748 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2749
2750 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2751 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2752}
2753
2754
2755/**
2756 * @opcode 0x66
2757 * @opmnemonic opsize
2758 * @openc prefix
2759 * @opmincpu 80386
2760 * @ophints harmless
2761 * @opgroup og_prefixes
2762 */
2763FNIEMOP_DEF(iemOp_op_size)
2764{
2765 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2766 IEMOP_HLP_MIN_386();
2767
2768 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2769 iemRecalEffOpSize(pVCpu);
2770
2771 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2772 when REPZ or REPNZ are present. */
2773 if (pVCpu->iem.s.idxPrefix == 0)
2774 pVCpu->iem.s.idxPrefix = 1;
2775
2776 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2777 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2778}
2779
2780
2781/**
2782 * @opcode 0x67
2783 * @opmnemonic addrsize
2784 * @openc prefix
2785 * @opmincpu 80386
2786 * @ophints harmless
2787 * @opgroup og_prefixes
2788 */
2789FNIEMOP_DEF(iemOp_addr_size)
2790{
2791 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2792 IEMOP_HLP_MIN_386();
2793
2794 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2795 switch (pVCpu->iem.s.enmDefAddrMode)
2796 {
2797 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2798 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2799 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2800 default: AssertFailed();
2801 }
2802
2803 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2804 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2805}
2806
2807
2808/**
2809 * @opcode 0x68
2810 */
2811FNIEMOP_DEF(iemOp_push_Iz)
2812{
2813 IEMOP_MNEMONIC(push_Iz, "push Iz");
2814 IEMOP_HLP_MIN_186();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816 switch (pVCpu->iem.s.enmEffOpSize)
2817 {
2818 case IEMMODE_16BIT:
2819 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2820 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2823 IEM_MC_PUSH_U16(u16Value);
2824 IEM_MC_ADVANCE_RIP_AND_FINISH();
2825 IEM_MC_END();
2826 break;
2827
2828 case IEMMODE_32BIT:
2829 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2830 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2832 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2833 IEM_MC_PUSH_U32(u32Value);
2834 IEM_MC_ADVANCE_RIP_AND_FINISH();
2835 IEM_MC_END();
2836 break;
2837
2838 case IEMMODE_64BIT:
2839 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2840 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2843 IEM_MC_PUSH_U64(u64Value);
2844 IEM_MC_ADVANCE_RIP_AND_FINISH();
2845 IEM_MC_END();
2846 break;
2847
2848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2849 }
2850}
2851
2852
2853/**
2854 * @opcode 0x69
2855 * @opflclass multiply
2856 */
2857FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2858{
2859 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2860 IEMOP_HLP_MIN_186();
2861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2863
2864 switch (pVCpu->iem.s.enmEffOpSize)
2865 {
2866 case IEMMODE_16BIT:
2867 {
2868 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2869 if (IEM_IS_MODRM_REG_MODE(bRm))
2870 {
2871 /* register operand */
2872 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2873 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2875 IEM_MC_LOCAL(uint16_t, u16Tmp);
2876 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2877 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2878 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2880 IEM_MC_REF_EFLAGS(pEFlags);
2881 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2882 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2883
2884 IEM_MC_ADVANCE_RIP_AND_FINISH();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /* memory operand */
2890 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2893
2894 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896
2897 IEM_MC_LOCAL(uint16_t, u16Tmp);
2898 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2899
2900 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2901 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2903 IEM_MC_REF_EFLAGS(pEFlags);
2904 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2905 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2906
2907 IEM_MC_ADVANCE_RIP_AND_FINISH();
2908 IEM_MC_END();
2909 }
2910 break;
2911 }
2912
2913 case IEMMODE_32BIT:
2914 {
2915 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2916 if (IEM_IS_MODRM_REG_MODE(bRm))
2917 {
2918 /* register operand */
2919 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2920 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_LOCAL(uint32_t, u32Tmp);
2923 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2924
2925 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2926 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
2927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2928 IEM_MC_REF_EFLAGS(pEFlags);
2929 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2930 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2931
2932 IEM_MC_ADVANCE_RIP_AND_FINISH();
2933 IEM_MC_END();
2934 }
2935 else
2936 {
2937 /* memory operand */
2938 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2941
2942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2944
2945 IEM_MC_LOCAL(uint32_t, u32Tmp);
2946 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2947
2948 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2949 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
2950 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2951 IEM_MC_REF_EFLAGS(pEFlags);
2952 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2954
2955 IEM_MC_ADVANCE_RIP_AND_FINISH();
2956 IEM_MC_END();
2957 }
2958 break;
2959 }
2960
2961 case IEMMODE_64BIT:
2962 {
2963 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2964 if (IEM_IS_MODRM_REG_MODE(bRm))
2965 {
2966 /* register operand */
2967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2968 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
2969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2970 IEM_MC_LOCAL(uint64_t, u64Tmp);
2971 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2972
2973 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2974 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
2975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2976 IEM_MC_REF_EFLAGS(pEFlags);
2977 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2978 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2979
2980 IEM_MC_ADVANCE_RIP_AND_FINISH();
2981 IEM_MC_END();
2982 }
2983 else
2984 {
2985 /* memory operand */
2986 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2989
2990 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
2992
2993 IEM_MC_LOCAL(uint64_t, u64Tmp);
2994 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2995
2996 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2997 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
2998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2999 IEM_MC_REF_EFLAGS(pEFlags);
3000 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3001 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3002
3003 IEM_MC_ADVANCE_RIP_AND_FINISH();
3004 IEM_MC_END();
3005 }
3006 break;
3007 }
3008
3009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3010 }
3011}
3012
3013
3014/**
3015 * @opcode 0x6a
3016 */
3017FNIEMOP_DEF(iemOp_push_Ib)
3018{
3019 IEMOP_MNEMONIC(push_Ib, "push Ib");
3020 IEMOP_HLP_MIN_186();
3021 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3023
3024 switch (pVCpu->iem.s.enmEffOpSize)
3025 {
3026 case IEMMODE_16BIT:
3027 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3029 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3030 IEM_MC_PUSH_U16(uValue);
3031 IEM_MC_ADVANCE_RIP_AND_FINISH();
3032 IEM_MC_END();
3033 break;
3034 case IEMMODE_32BIT:
3035 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3037 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3038 IEM_MC_PUSH_U32(uValue);
3039 IEM_MC_ADVANCE_RIP_AND_FINISH();
3040 IEM_MC_END();
3041 break;
3042 case IEMMODE_64BIT:
3043 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3045 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3046 IEM_MC_PUSH_U64(uValue);
3047 IEM_MC_ADVANCE_RIP_AND_FINISH();
3048 IEM_MC_END();
3049 break;
3050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3051 }
3052}
3053
3054
3055/**
3056 * @opcode 0x6b
3057 * @opflclass multiply
3058 */
3059FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3060{
3061 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3062 IEMOP_HLP_MIN_186();
3063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3065
3066 switch (pVCpu->iem.s.enmEffOpSize)
3067 {
3068 case IEMMODE_16BIT:
3069 {
3070 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3071 if (IEM_IS_MODRM_REG_MODE(bRm))
3072 {
3073 /* register operand */
3074 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077
3078 IEM_MC_LOCAL(uint16_t, u16Tmp);
3079 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3080
3081 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3082 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3084 IEM_MC_REF_EFLAGS(pEFlags);
3085 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3086 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3087
3088 IEM_MC_ADVANCE_RIP_AND_FINISH();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /* memory operand */
3094 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3095
3096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3098
3099 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101
3102 IEM_MC_LOCAL(uint16_t, u16Tmp);
3103 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3104
3105 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3106 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3108 IEM_MC_REF_EFLAGS(pEFlags);
3109 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3110 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3111
3112 IEM_MC_ADVANCE_RIP_AND_FINISH();
3113 IEM_MC_END();
3114 }
3115 break;
3116 }
3117
3118 case IEMMODE_32BIT:
3119 {
3120 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3121 if (IEM_IS_MODRM_REG_MODE(bRm))
3122 {
3123 /* register operand */
3124 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3125 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_LOCAL(uint32_t, u32Tmp);
3128 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3129
3130 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3131 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_REF_EFLAGS(pEFlags);
3134 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3135 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3136
3137 IEM_MC_ADVANCE_RIP_AND_FINISH();
3138 IEM_MC_END();
3139 }
3140 else
3141 {
3142 /* memory operand */
3143 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3146
3147 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3149
3150 IEM_MC_LOCAL(uint32_t, u32Tmp);
3151 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3152
3153 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3154 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_REF_EFLAGS(pEFlags);
3157 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3158 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3159
3160 IEM_MC_ADVANCE_RIP_AND_FINISH();
3161 IEM_MC_END();
3162 }
3163 break;
3164 }
3165
3166 case IEMMODE_64BIT:
3167 {
3168 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3169 if (IEM_IS_MODRM_REG_MODE(bRm))
3170 {
3171 /* register operand */
3172 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3173 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3175 IEM_MC_LOCAL(uint64_t, u64Tmp);
3176 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3177
3178 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3179 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3180 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3181 IEM_MC_REF_EFLAGS(pEFlags);
3182 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3183 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3184
3185 IEM_MC_ADVANCE_RIP_AND_FINISH();
3186 IEM_MC_END();
3187 }
3188 else
3189 {
3190 /* memory operand */
3191 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3194
3195 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3197
3198 IEM_MC_LOCAL(uint64_t, u64Tmp);
3199 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3200
3201 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3202 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3204 IEM_MC_REF_EFLAGS(pEFlags);
3205 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3206 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3207
3208 IEM_MC_ADVANCE_RIP_AND_FINISH();
3209 IEM_MC_END();
3210 }
3211 break;
3212 }
3213
3214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3215 }
3216}
3217
3218
3219/**
3220 * @opcode 0x6c
3221 * @opfltest iopl,df
3222 */
3223FNIEMOP_DEF(iemOp_insb_Yb_DX)
3224{
3225 IEMOP_HLP_MIN_186();
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3228 {
3229 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3230 switch (pVCpu->iem.s.enmEffAddrMode)
3231 {
3232 case IEMMODE_16BIT:
3233 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3234 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3235 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3236 iemCImpl_rep_ins_op8_addr16, false);
3237 case IEMMODE_32BIT:
3238 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3239 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3240 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3241 iemCImpl_rep_ins_op8_addr32, false);
3242 case IEMMODE_64BIT:
3243 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3244 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3245 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3246 iemCImpl_rep_ins_op8_addr64, false);
3247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3248 }
3249 }
3250 else
3251 {
3252 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3253 switch (pVCpu->iem.s.enmEffAddrMode)
3254 {
3255 case IEMMODE_16BIT:
3256 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3257 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3258 iemCImpl_ins_op8_addr16, false);
3259 case IEMMODE_32BIT:
3260 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3261 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3262 iemCImpl_ins_op8_addr32, false);
3263 case IEMMODE_64BIT:
3264 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3265 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3266 iemCImpl_ins_op8_addr64, false);
3267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3268 }
3269 }
3270}
3271
3272
3273/**
3274 * @opcode 0x6d
3275 * @opfltest iopl,df
3276 */
3277FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3278{
3279 IEMOP_HLP_MIN_186();
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3282 {
3283 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3284 switch (pVCpu->iem.s.enmEffOpSize)
3285 {
3286 case IEMMODE_16BIT:
3287 switch (pVCpu->iem.s.enmEffAddrMode)
3288 {
3289 case IEMMODE_16BIT:
3290 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3292 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3293 iemCImpl_rep_ins_op16_addr16, false);
3294 case IEMMODE_32BIT:
3295 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3296 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3297 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3298 iemCImpl_rep_ins_op16_addr32, false);
3299 case IEMMODE_64BIT:
3300 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3301 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3302 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3303 iemCImpl_rep_ins_op16_addr64, false);
3304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3305 }
3306 break;
3307 case IEMMODE_64BIT:
3308 case IEMMODE_32BIT:
3309 switch (pVCpu->iem.s.enmEffAddrMode)
3310 {
3311 case IEMMODE_16BIT:
3312 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3314 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3315 iemCImpl_rep_ins_op32_addr16, false);
3316 case IEMMODE_32BIT:
3317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3318 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3319 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3320 iemCImpl_rep_ins_op32_addr32, false);
3321 case IEMMODE_64BIT:
3322 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3323 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3324 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3325 iemCImpl_rep_ins_op32_addr64, false);
3326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3327 }
3328 break;
3329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3330 }
3331 }
3332 else
3333 {
3334 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3335 switch (pVCpu->iem.s.enmEffOpSize)
3336 {
3337 case IEMMODE_16BIT:
3338 switch (pVCpu->iem.s.enmEffAddrMode)
3339 {
3340 case IEMMODE_16BIT:
3341 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3342 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3343 iemCImpl_ins_op16_addr16, false);
3344 case IEMMODE_32BIT:
3345 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3346 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3347 iemCImpl_ins_op16_addr32, false);
3348 case IEMMODE_64BIT:
3349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3350 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3351 iemCImpl_ins_op16_addr64, false);
3352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3353 }
3354 break;
3355 case IEMMODE_64BIT:
3356 case IEMMODE_32BIT:
3357 switch (pVCpu->iem.s.enmEffAddrMode)
3358 {
3359 case IEMMODE_16BIT:
3360 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3362 iemCImpl_ins_op32_addr16, false);
3363 case IEMMODE_32BIT:
3364 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3365 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3366 iemCImpl_ins_op32_addr32, false);
3367 case IEMMODE_64BIT:
3368 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3369 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3370 iemCImpl_ins_op32_addr64, false);
3371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3372 }
3373 break;
3374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3375 }
3376 }
3377}
3378
3379
3380/**
3381 * @opcode 0x6e
3382 * @opfltest iopl,df
3383 */
3384FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3385{
3386 IEMOP_HLP_MIN_186();
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3389 {
3390 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3391 switch (pVCpu->iem.s.enmEffAddrMode)
3392 {
3393 case IEMMODE_16BIT:
3394 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3395 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3396 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3397 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3398 case IEMMODE_32BIT:
3399 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3401 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3402 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3403 case IEMMODE_64BIT:
3404 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3405 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3406 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3407 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3409 }
3410 }
3411 else
3412 {
3413 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3414 switch (pVCpu->iem.s.enmEffAddrMode)
3415 {
3416 case IEMMODE_16BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3419 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3420 case IEMMODE_32BIT:
3421 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3422 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3423 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3424 case IEMMODE_64BIT:
3425 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3426 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3427 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3429 }
3430 }
3431}
3432
3433
3434/**
3435 * @opcode 0x6f
3436 * @opfltest iopl,df
3437 */
3438FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3439{
3440 IEMOP_HLP_MIN_186();
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3443 {
3444 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3445 switch (pVCpu->iem.s.enmEffOpSize)
3446 {
3447 case IEMMODE_16BIT:
3448 switch (pVCpu->iem.s.enmEffAddrMode)
3449 {
3450 case IEMMODE_16BIT:
3451 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3454 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3455 case IEMMODE_32BIT:
3456 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3459 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_64BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3464 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3466 }
3467 break;
3468 case IEMMODE_64BIT:
3469 case IEMMODE_32BIT:
3470 switch (pVCpu->iem.s.enmEffAddrMode)
3471 {
3472 case IEMMODE_16BIT:
3473 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3476 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3477 case IEMMODE_32BIT:
3478 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3479 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3480 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3481 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3482 case IEMMODE_64BIT:
3483 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3484 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3485 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3486 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3488 }
3489 break;
3490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3491 }
3492 }
3493 else
3494 {
3495 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3496 switch (pVCpu->iem.s.enmEffOpSize)
3497 {
3498 case IEMMODE_16BIT:
3499 switch (pVCpu->iem.s.enmEffAddrMode)
3500 {
3501 case IEMMODE_16BIT:
3502 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3503 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3504 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3505 case IEMMODE_32BIT:
3506 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3508 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3509 case IEMMODE_64BIT:
3510 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3511 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3512 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3514 }
3515 break;
3516 case IEMMODE_64BIT:
3517 case IEMMODE_32BIT:
3518 switch (pVCpu->iem.s.enmEffAddrMode)
3519 {
3520 case IEMMODE_16BIT:
3521 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3522 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3523 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3524 case IEMMODE_32BIT:
3525 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3527 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3528 case IEMMODE_64BIT:
3529 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3530 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3531 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3533 }
3534 break;
3535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3536 }
3537 }
3538}
3539
3540
3541/**
3542 * @opcode 0x70
3543 * @opfltest of
3544 */
3545FNIEMOP_DEF(iemOp_jo_Jb)
3546{
3547 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3548 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3550
3551 IEM_MC_BEGIN(0, 0, 0, 0);
3552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3554 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3555 } IEM_MC_ELSE() {
3556 IEM_MC_ADVANCE_RIP_AND_FINISH();
3557 } IEM_MC_ENDIF();
3558 IEM_MC_END();
3559}
3560
3561
3562/**
3563 * @opcode 0x71
3564 * @opfltest of
3565 */
3566FNIEMOP_DEF(iemOp_jno_Jb)
3567{
3568 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3569 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3571
3572 IEM_MC_BEGIN(0, 0, 0, 0);
3573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3575 IEM_MC_ADVANCE_RIP_AND_FINISH();
3576 } IEM_MC_ELSE() {
3577 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3578 } IEM_MC_ENDIF();
3579 IEM_MC_END();
3580}
3581
3582/**
3583 * @opcode 0x72
3584 * @opfltest cf
3585 */
3586FNIEMOP_DEF(iemOp_jc_Jb)
3587{
3588 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3589 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3591
3592 IEM_MC_BEGIN(0, 0, 0, 0);
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3595 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3596 } IEM_MC_ELSE() {
3597 IEM_MC_ADVANCE_RIP_AND_FINISH();
3598 } IEM_MC_ENDIF();
3599 IEM_MC_END();
3600}
3601
3602
3603/**
3604 * @opcode 0x73
3605 * @opfltest cf
3606 */
3607FNIEMOP_DEF(iemOp_jnc_Jb)
3608{
3609 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0, 0, 0);
3614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3616 IEM_MC_ADVANCE_RIP_AND_FINISH();
3617 } IEM_MC_ELSE() {
3618 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3619 } IEM_MC_ENDIF();
3620 IEM_MC_END();
3621}
3622
3623
3624/**
3625 * @opcode 0x74
3626 * @opfltest zf
3627 */
3628FNIEMOP_DEF(iemOp_je_Jb)
3629{
3630 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3631 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3633
3634 IEM_MC_BEGIN(0, 0, 0, 0);
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3637 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3638 } IEM_MC_ELSE() {
3639 IEM_MC_ADVANCE_RIP_AND_FINISH();
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642}
3643
3644
3645/**
3646 * @opcode 0x75
3647 * @opfltest zf
3648 */
3649FNIEMOP_DEF(iemOp_jne_Jb)
3650{
3651 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0, 0, 0);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ELSE() {
3660 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3661 } IEM_MC_ENDIF();
3662 IEM_MC_END();
3663}
3664
3665
3666/**
3667 * @opcode 0x76
3668 * @opfltest cf,zf
3669 */
3670FNIEMOP_DEF(iemOp_jbe_Jb)
3671{
3672 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3673 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3675
3676 IEM_MC_BEGIN(0, 0, 0, 0);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3678 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3679 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3680 } IEM_MC_ELSE() {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ENDIF();
3683 IEM_MC_END();
3684}
3685
3686
3687/**
3688 * @opcode 0x77
3689 * @opfltest cf,zf
3690 */
3691FNIEMOP_DEF(iemOp_jnbe_Jb)
3692{
3693 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3694 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3695 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3696
3697 IEM_MC_BEGIN(0, 0, 0, 0);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3699 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3700 IEM_MC_ADVANCE_RIP_AND_FINISH();
3701 } IEM_MC_ELSE() {
3702 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3703 } IEM_MC_ENDIF();
3704 IEM_MC_END();
3705}
3706
3707
3708/**
3709 * @opcode 0x78
3710 * @opfltest sf
3711 */
3712FNIEMOP_DEF(iemOp_js_Jb)
3713{
3714 IEMOP_MNEMONIC(js_Jb, "js Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0, 0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3722 } IEM_MC_ELSE() {
3723 IEM_MC_ADVANCE_RIP_AND_FINISH();
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x79
3731 * @opfltest sf
3732 */
3733FNIEMOP_DEF(iemOp_jns_Jb)
3734{
3735 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3736 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3737 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3738
3739 IEM_MC_BEGIN(0, 0, 0, 0);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3742 IEM_MC_ADVANCE_RIP_AND_FINISH();
3743 } IEM_MC_ELSE() {
3744 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3745 } IEM_MC_ENDIF();
3746 IEM_MC_END();
3747}
3748
3749
3750/**
3751 * @opcode 0x7a
3752 * @opfltest pf
3753 */
3754FNIEMOP_DEF(iemOp_jp_Jb)
3755{
3756 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3757 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0, 0, 0);
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768}
3769
3770
3771/**
3772 * @opcode 0x7b
3773 * @opfltest pf
3774 */
3775FNIEMOP_DEF(iemOp_jnp_Jb)
3776{
3777 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3778 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3780
3781 IEM_MC_BEGIN(0, 0, 0, 0);
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3784 IEM_MC_ADVANCE_RIP_AND_FINISH();
3785 } IEM_MC_ELSE() {
3786 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3787 } IEM_MC_ENDIF();
3788 IEM_MC_END();
3789}
3790
3791
3792/**
3793 * @opcode 0x7c
3794 * @opfltest sf,of
3795 */
3796FNIEMOP_DEF(iemOp_jl_Jb)
3797{
3798 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3799 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3801
3802 IEM_MC_BEGIN(0, 0, 0, 0);
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3805 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3806 } IEM_MC_ELSE() {
3807 IEM_MC_ADVANCE_RIP_AND_FINISH();
3808 } IEM_MC_ENDIF();
3809 IEM_MC_END();
3810}
3811
3812
3813/**
3814 * @opcode 0x7d
3815 * @opfltest sf,of
3816 */
3817FNIEMOP_DEF(iemOp_jnl_Jb)
3818{
3819 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3821 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3822
3823 IEM_MC_BEGIN(0, 0, 0, 0);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3826 IEM_MC_ADVANCE_RIP_AND_FINISH();
3827 } IEM_MC_ELSE() {
3828 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3829 } IEM_MC_ENDIF();
3830 IEM_MC_END();
3831}
3832
3833
3834/**
3835 * @opcode 0x7e
3836 * @opfltest zf,sf,of
3837 */
3838FNIEMOP_DEF(iemOp_jle_Jb)
3839{
3840 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3841 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3842 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3843
3844 IEM_MC_BEGIN(0, 0, 0, 0);
3845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3846 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3847 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3848 } IEM_MC_ELSE() {
3849 IEM_MC_ADVANCE_RIP_AND_FINISH();
3850 } IEM_MC_ENDIF();
3851 IEM_MC_END();
3852}
3853
3854
3855/**
3856 * @opcode 0x7f
3857 * @opfltest zf,sf,of
3858 */
3859FNIEMOP_DEF(iemOp_jnle_Jb)
3860{
3861 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3862 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3864
3865 IEM_MC_BEGIN(0, 0, 0, 0);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3868 IEM_MC_ADVANCE_RIP_AND_FINISH();
3869 } IEM_MC_ELSE() {
3870 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3871 } IEM_MC_ENDIF();
3872 IEM_MC_END();
3873}
3874
3875
3876/**
3877 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3878 * iemOp_Grp1_Eb_Ib_80.
3879 */
3880#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3881 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3882 { \
3883 /* register target */ \
3884 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3885 IEM_MC_BEGIN(3, 0, 0, 0); \
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3887 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3888 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3889 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3890 \
3891 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3892 IEM_MC_REF_EFLAGS(pEFlags); \
3893 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3894 \
3895 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3896 IEM_MC_END(); \
3897 } \
3898 else \
3899 { \
3900 /* memory target */ \
3901 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3902 { \
3903 IEM_MC_BEGIN(3, 3, 0, 0); \
3904 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3905 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3907 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3908 \
3909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3910 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3911 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3912 IEMOP_HLP_DONE_DECODING(); \
3913 \
3914 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3915 IEM_MC_FETCH_EFLAGS(EFlags); \
3916 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3917 \
3918 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3919 IEM_MC_COMMIT_EFLAGS(EFlags); \
3920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3921 IEM_MC_END(); \
3922 } \
3923 else \
3924 { \
3925 (void)0
3926
3927#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3928 IEM_MC_BEGIN(3, 3, 0, 0); \
3929 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3930 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3932 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3933 \
3934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3935 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3936 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3937 IEMOP_HLP_DONE_DECODING(); \
3938 \
3939 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3940 IEM_MC_FETCH_EFLAGS(EFlags); \
3941 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3942 \
3943 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
3944 IEM_MC_COMMIT_EFLAGS(EFlags); \
3945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3946 IEM_MC_END(); \
3947 } \
3948 } \
3949 (void)0
3950
3951#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3952 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3953 { \
3954 /* register target */ \
3955 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3956 IEM_MC_BEGIN(3, 0, 0, 0); \
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3958 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3959 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3961 \
3962 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3963 IEM_MC_REF_EFLAGS(pEFlags); \
3964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3965 \
3966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3967 IEM_MC_END(); \
3968 } \
3969 else \
3970 { \
3971 /* memory target */ \
3972 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3973 { \
3974 IEM_MC_BEGIN(3, 3, 0, 0); \
3975 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3978 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3979 \
3980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3981 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3982 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3983 IEMOP_HLP_DONE_DECODING(); \
3984 \
3985 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3986 IEM_MC_FETCH_EFLAGS(EFlags); \
3987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3988 \
3989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
3990 IEM_MC_COMMIT_EFLAGS(EFlags); \
3991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3992 IEM_MC_END(); \
3993 } \
3994 else \
3995 { \
3996 (void)0
3997
3998#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3999 IEMOP_HLP_DONE_DECODING(); \
4000 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4001 } \
4002 } \
4003 (void)0
4004
4005
4006
4007/**
4008 * @opmaps grp1_80,grp1_83
4009 * @opcode /0
4010 * @opflclass arithmetic
4011 */
4012FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4013{
4014 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4015 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4016 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4017}
4018
4019
4020/**
4021 * @opmaps grp1_80,grp1_83
4022 * @opcode /1
4023 * @opflclass logical
4024 */
4025FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4026{
4027 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4028 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4029 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4030}
4031
4032
4033/**
4034 * @opmaps grp1_80,grp1_83
4035 * @opcode /2
4036 * @opflclass arithmetic_carry
4037 */
4038FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4039{
4040 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4041 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4042 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4043}
4044
4045
4046/**
4047 * @opmaps grp1_80,grp1_83
4048 * @opcode /3
4049 * @opflclass arithmetic_carry
4050 */
4051FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4052{
4053 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4054 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4055 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4056}
4057
4058
4059/**
4060 * @opmaps grp1_80,grp1_83
4061 * @opcode /4
4062 * @opflclass logical
4063 */
4064FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4065{
4066 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4067 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4068 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4069}
4070
4071
4072/**
4073 * @opmaps grp1_80,grp1_83
4074 * @opcode /5
4075 * @opflclass arithmetic
4076 */
4077FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4078{
4079 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4080 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4081 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4082}
4083
4084
4085/**
4086 * @opmaps grp1_80,grp1_83
4087 * @opcode /6
4088 * @opflclass logical
4089 */
4090FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4091{
4092 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4093 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4094 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4095}
4096
4097
4098/**
4099 * @opmaps grp1_80,grp1_83
4100 * @opcode /7
4101 * @opflclass arithmetic
4102 */
4103FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4104{
4105 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4106 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4107 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4108}
4109
4110
4111/**
4112 * @opcode 0x80
4113 */
4114FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4115{
4116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4117 switch (IEM_GET_MODRM_REG_8(bRm))
4118 {
4119 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4120 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4121 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4122 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4123 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4124 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4125 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4126 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4128 }
4129}
4130
4131
4132/**
4133 * Body for a group 1 binary operator.
4134 */
4135#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4136 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4137 { \
4138 /* register target */ \
4139 switch (pVCpu->iem.s.enmEffOpSize) \
4140 { \
4141 case IEMMODE_16BIT: \
4142 { \
4143 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4144 IEM_MC_BEGIN(3, 0, 0, 0); \
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4146 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4147 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4148 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4149 \
4150 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4151 IEM_MC_REF_EFLAGS(pEFlags); \
4152 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4153 \
4154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4155 IEM_MC_END(); \
4156 break; \
4157 } \
4158 \
4159 case IEMMODE_32BIT: \
4160 { \
4161 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4162 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4164 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4165 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4166 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4167 \
4168 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4169 IEM_MC_REF_EFLAGS(pEFlags); \
4170 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4171 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4172 \
4173 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4174 IEM_MC_END(); \
4175 break; \
4176 } \
4177 \
4178 case IEMMODE_64BIT: \
4179 { \
4180 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4181 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4183 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4184 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4185 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4186 \
4187 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4188 IEM_MC_REF_EFLAGS(pEFlags); \
4189 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4190 \
4191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4192 IEM_MC_END(); \
4193 break; \
4194 } \
4195 \
4196 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4197 } \
4198 } \
4199 else \
4200 { \
4201 /* memory target */ \
4202 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4203 { \
4204 switch (pVCpu->iem.s.enmEffOpSize) \
4205 { \
4206 case IEMMODE_16BIT: \
4207 { \
4208 IEM_MC_BEGIN(3, 3, 0, 0); \
4209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4211 \
4212 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4213 IEMOP_HLP_DONE_DECODING(); \
4214 \
4215 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4216 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4217 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4218 \
4219 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4220 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4221 IEM_MC_FETCH_EFLAGS(EFlags); \
4222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4223 \
4224 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4225 IEM_MC_COMMIT_EFLAGS(EFlags); \
4226 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4227 IEM_MC_END(); \
4228 break; \
4229 } \
4230 \
4231 case IEMMODE_32BIT: \
4232 { \
4233 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4236 \
4237 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4238 IEMOP_HLP_DONE_DECODING(); \
4239 \
4240 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4241 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4242 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4243 \
4244 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4246 IEM_MC_FETCH_EFLAGS(EFlags); \
4247 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4248 \
4249 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4250 IEM_MC_COMMIT_EFLAGS(EFlags); \
4251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4252 IEM_MC_END(); \
4253 break; \
4254 } \
4255 \
4256 case IEMMODE_64BIT: \
4257 { \
4258 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4259 \
4260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4262 \
4263 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4264 IEMOP_HLP_DONE_DECODING(); \
4265 \
4266 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4267 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4268 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4269 \
4270 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4272 IEM_MC_FETCH_EFLAGS(EFlags); \
4273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4274 \
4275 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4276 IEM_MC_COMMIT_EFLAGS(EFlags); \
4277 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4278 IEM_MC_END(); \
4279 break; \
4280 } \
4281 \
4282 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4283 } \
4284 } \
4285 else \
4286 { \
4287 (void)0
4288/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4289#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4290 switch (pVCpu->iem.s.enmEffOpSize) \
4291 { \
4292 case IEMMODE_16BIT: \
4293 { \
4294 IEM_MC_BEGIN(3, 3, 0, 0); \
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4297 \
4298 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 \
4301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4302 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4303 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4304 \
4305 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_FETCH_EFLAGS(EFlags); \
4308 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4309 \
4310 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4311 IEM_MC_COMMIT_EFLAGS(EFlags); \
4312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4313 IEM_MC_END(); \
4314 break; \
4315 } \
4316 \
4317 case IEMMODE_32BIT: \
4318 { \
4319 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4322 \
4323 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4324 IEMOP_HLP_DONE_DECODING(); \
4325 \
4326 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4327 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4328 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4329 \
4330 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4332 IEM_MC_FETCH_EFLAGS(EFlags); \
4333 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4334 \
4335 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4336 IEM_MC_COMMIT_EFLAGS(EFlags); \
4337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4338 IEM_MC_END(); \
4339 break; \
4340 } \
4341 \
4342 case IEMMODE_64BIT: \
4343 { \
4344 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4347 \
4348 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4349 IEMOP_HLP_DONE_DECODING(); \
4350 \
4351 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4352 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4353 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4354 \
4355 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4356 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4357 IEM_MC_FETCH_EFLAGS(EFlags); \
4358 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4359 \
4360 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4361 IEM_MC_COMMIT_EFLAGS(EFlags); \
4362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4363 IEM_MC_END(); \
4364 break; \
4365 } \
4366 \
4367 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4368 } \
4369 } \
4370 } \
4371 (void)0
4372
4373/* read-only version */
4374#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4375 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4376 { \
4377 /* register target */ \
4378 switch (pVCpu->iem.s.enmEffOpSize) \
4379 { \
4380 case IEMMODE_16BIT: \
4381 { \
4382 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4383 IEM_MC_BEGIN(3, 0, 0, 0); \
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4385 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4386 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4387 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4388 \
4389 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4390 IEM_MC_REF_EFLAGS(pEFlags); \
4391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4392 \
4393 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4394 IEM_MC_END(); \
4395 break; \
4396 } \
4397 \
4398 case IEMMODE_32BIT: \
4399 { \
4400 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4401 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4403 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4404 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4405 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4406 \
4407 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4408 IEM_MC_REF_EFLAGS(pEFlags); \
4409 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4410 \
4411 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4412 IEM_MC_END(); \
4413 break; \
4414 } \
4415 \
4416 case IEMMODE_64BIT: \
4417 { \
4418 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4419 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4421 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4422 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4423 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4424 \
4425 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4426 IEM_MC_REF_EFLAGS(pEFlags); \
4427 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4428 \
4429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4430 IEM_MC_END(); \
4431 break; \
4432 } \
4433 \
4434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4435 } \
4436 } \
4437 else \
4438 { \
4439 /* memory target */ \
4440 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4441 { \
4442 switch (pVCpu->iem.s.enmEffOpSize) \
4443 { \
4444 case IEMMODE_16BIT: \
4445 { \
4446 IEM_MC_BEGIN(3, 3, 0, 0); \
4447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4449 \
4450 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4451 IEMOP_HLP_DONE_DECODING(); \
4452 \
4453 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4454 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4455 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4456 \
4457 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4458 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4459 IEM_MC_FETCH_EFLAGS(EFlags); \
4460 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4461 \
4462 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4463 IEM_MC_COMMIT_EFLAGS(EFlags); \
4464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4465 IEM_MC_END(); \
4466 break; \
4467 } \
4468 \
4469 case IEMMODE_32BIT: \
4470 { \
4471 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4474 \
4475 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4476 IEMOP_HLP_DONE_DECODING(); \
4477 \
4478 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4479 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4480 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4481 \
4482 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4483 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4484 IEM_MC_FETCH_EFLAGS(EFlags); \
4485 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4486 \
4487 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4488 IEM_MC_COMMIT_EFLAGS(EFlags); \
4489 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4490 IEM_MC_END(); \
4491 break; \
4492 } \
4493 \
4494 case IEMMODE_64BIT: \
4495 { \
4496 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4499 \
4500 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4501 IEMOP_HLP_DONE_DECODING(); \
4502 \
4503 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4504 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4505 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4506 \
4507 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4508 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4509 IEM_MC_FETCH_EFLAGS(EFlags); \
4510 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4511 \
4512 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4513 IEM_MC_COMMIT_EFLAGS(EFlags); \
4514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4515 IEM_MC_END(); \
4516 break; \
4517 } \
4518 \
4519 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4520 } \
4521 } \
4522 else \
4523 { \
4524 IEMOP_HLP_DONE_DECODING(); \
4525 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4526 } \
4527 } \
4528 (void)0
4529
4530
4531/**
4532 * @opmaps grp1_81
4533 * @opcode /0
4534 * @opflclass arithmetic
4535 */
4536FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4537{
4538 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4539 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4540 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4541}
4542
4543
4544/**
4545 * @opmaps grp1_81
4546 * @opcode /1
4547 * @opflclass logical
4548 */
4549FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4550{
4551 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4552 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4553 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4554}
4555
4556
4557/**
4558 * @opmaps grp1_81
4559 * @opcode /2
4560 * @opflclass arithmetic_carry
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /3
4573 * @opflclass arithmetic_carry
4574 */
4575FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4576{
4577 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4578 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4579 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4580}
4581
4582
4583/**
4584 * @opmaps grp1_81
4585 * @opcode /4
4586 * @opflclass logical
4587 */
4588FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4589{
4590 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4591 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4592 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4593}
4594
4595
4596/**
4597 * @opmaps grp1_81
4598 * @opcode /5
4599 * @opflclass arithmetic
4600 */
4601FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4602{
4603 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4604 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4605 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4606}
4607
4608
4609/**
4610 * @opmaps grp1_81
4611 * @opcode /6
4612 * @opflclass logical
4613 */
4614FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4615{
4616 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4617 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4618 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4619}
4620
4621
4622/**
4623 * @opmaps grp1_81
4624 * @opcode /7
4625 * @opflclass arithmetic
4626 */
4627FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4628{
4629 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4630 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4631}
4632
4633
4634/**
4635 * @opcode 0x81
4636 */
4637FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4638{
4639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4640 switch (IEM_GET_MODRM_REG_8(bRm))
4641 {
4642 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4643 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4644 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4645 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4646 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4647 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4648 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4649 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4651 }
4652}
4653
4654
4655/**
4656 * @opcode 0x82
4657 * @opmnemonic grp1_82
4658 * @opgroup og_groups
4659 */
4660FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4661{
4662 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4663 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4664}
4665
4666
4667/**
4668 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4669 * iemOp_Grp1_Ev_Ib.
4670 */
4671#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4672 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4673 { \
4674 /* \
4675 * Register target \
4676 */ \
4677 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4678 switch (pVCpu->iem.s.enmEffOpSize) \
4679 { \
4680 case IEMMODE_16BIT: \
4681 IEM_MC_BEGIN(3, 0, 0, 0); \
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4683 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4684 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4685 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4686 \
4687 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4688 IEM_MC_REF_EFLAGS(pEFlags); \
4689 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4690 \
4691 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4692 IEM_MC_END(); \
4693 break; \
4694 \
4695 case IEMMODE_32BIT: \
4696 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4698 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4699 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4700 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4701 \
4702 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4703 IEM_MC_REF_EFLAGS(pEFlags); \
4704 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4705 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4706 \
4707 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4708 IEM_MC_END(); \
4709 break; \
4710 \
4711 case IEMMODE_64BIT: \
4712 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4714 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4715 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4716 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4717 \
4718 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4719 IEM_MC_REF_EFLAGS(pEFlags); \
4720 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4721 \
4722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4723 IEM_MC_END(); \
4724 break; \
4725 \
4726 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4727 } \
4728 } \
4729 else \
4730 { \
4731 /* \
4732 * Memory target. \
4733 */ \
4734 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4735 { \
4736 switch (pVCpu->iem.s.enmEffOpSize) \
4737 { \
4738 case IEMMODE_16BIT: \
4739 IEM_MC_BEGIN(3, 3, 0, 0); \
4740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4742 \
4743 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4744 IEMOP_HLP_DONE_DECODING(); \
4745 \
4746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4747 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4748 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4749 \
4750 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4751 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4752 IEM_MC_FETCH_EFLAGS(EFlags); \
4753 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4754 \
4755 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4756 IEM_MC_COMMIT_EFLAGS(EFlags); \
4757 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4758 IEM_MC_END(); \
4759 break; \
4760 \
4761 case IEMMODE_32BIT: \
4762 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4765 \
4766 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4767 IEMOP_HLP_DONE_DECODING(); \
4768 \
4769 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4770 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4771 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4772 \
4773 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4775 IEM_MC_FETCH_EFLAGS(EFlags); \
4776 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4777 \
4778 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4779 IEM_MC_COMMIT_EFLAGS(EFlags); \
4780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4781 IEM_MC_END(); \
4782 break; \
4783 \
4784 case IEMMODE_64BIT: \
4785 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4788 \
4789 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4790 IEMOP_HLP_DONE_DECODING(); \
4791 \
4792 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4793 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4794 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4795 \
4796 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4798 IEM_MC_FETCH_EFLAGS(EFlags); \
4799 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4800 \
4801 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4802 IEM_MC_COMMIT_EFLAGS(EFlags); \
4803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4804 IEM_MC_END(); \
4805 break; \
4806 \
4807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4808 } \
4809 } \
4810 else \
4811 { \
4812 (void)0
4813/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4814#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4815 switch (pVCpu->iem.s.enmEffOpSize) \
4816 { \
4817 case IEMMODE_16BIT: \
4818 IEM_MC_BEGIN(3, 3, 0, 0); \
4819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4821 \
4822 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4823 IEMOP_HLP_DONE_DECODING(); \
4824 \
4825 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4826 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4827 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4828 \
4829 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4831 IEM_MC_FETCH_EFLAGS(EFlags); \
4832 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4833 \
4834 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4835 IEM_MC_COMMIT_EFLAGS(EFlags); \
4836 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4837 IEM_MC_END(); \
4838 break; \
4839 \
4840 case IEMMODE_32BIT: \
4841 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4844 \
4845 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4846 IEMOP_HLP_DONE_DECODING(); \
4847 \
4848 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4849 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4850 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4851 \
4852 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4854 IEM_MC_FETCH_EFLAGS(EFlags); \
4855 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4856 \
4857 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4858 IEM_MC_COMMIT_EFLAGS(EFlags); \
4859 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4860 IEM_MC_END(); \
4861 break; \
4862 \
4863 case IEMMODE_64BIT: \
4864 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4867 \
4868 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4869 IEMOP_HLP_DONE_DECODING(); \
4870 \
4871 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4872 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4873 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4874 \
4875 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4876 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4877 IEM_MC_FETCH_EFLAGS(EFlags); \
4878 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4879 \
4880 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4881 IEM_MC_COMMIT_EFLAGS(EFlags); \
4882 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4883 IEM_MC_END(); \
4884 break; \
4885 \
4886 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4887 } \
4888 } \
4889 } \
4890 (void)0
4891
4892/* read-only variant */
4893#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4894 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4895 { \
4896 /* \
4897 * Register target \
4898 */ \
4899 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4900 switch (pVCpu->iem.s.enmEffOpSize) \
4901 { \
4902 case IEMMODE_16BIT: \
4903 IEM_MC_BEGIN(3, 0, 0, 0); \
4904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4905 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4906 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4907 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4908 \
4909 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4910 IEM_MC_REF_EFLAGS(pEFlags); \
4911 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4912 \
4913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4914 IEM_MC_END(); \
4915 break; \
4916 \
4917 case IEMMODE_32BIT: \
4918 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4920 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4921 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4922 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4923 \
4924 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4925 IEM_MC_REF_EFLAGS(pEFlags); \
4926 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4927 \
4928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4929 IEM_MC_END(); \
4930 break; \
4931 \
4932 case IEMMODE_64BIT: \
4933 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4935 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4936 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4937 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4938 \
4939 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4940 IEM_MC_REF_EFLAGS(pEFlags); \
4941 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4942 \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 \
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4948 } \
4949 } \
4950 else \
4951 { \
4952 /* \
4953 * Memory target. \
4954 */ \
4955 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4956 { \
4957 switch (pVCpu->iem.s.enmEffOpSize) \
4958 { \
4959 case IEMMODE_16BIT: \
4960 IEM_MC_BEGIN(3, 3, 0, 0); \
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4963 \
4964 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4965 IEMOP_HLP_DONE_DECODING(); \
4966 \
4967 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4968 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4969 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4970 \
4971 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4972 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4973 IEM_MC_FETCH_EFLAGS(EFlags); \
4974 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4975 \
4976 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4977 IEM_MC_COMMIT_EFLAGS(EFlags); \
4978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4979 IEM_MC_END(); \
4980 break; \
4981 \
4982 case IEMMODE_32BIT: \
4983 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4986 \
4987 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4988 IEMOP_HLP_DONE_DECODING(); \
4989 \
4990 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4991 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4992 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4993 \
4994 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4995 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4996 IEM_MC_FETCH_EFLAGS(EFlags); \
4997 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4998 \
4999 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5000 IEM_MC_COMMIT_EFLAGS(EFlags); \
5001 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5002 IEM_MC_END(); \
5003 break; \
5004 \
5005 case IEMMODE_64BIT: \
5006 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5009 \
5010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5011 IEMOP_HLP_DONE_DECODING(); \
5012 \
5013 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5014 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5015 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5016 \
5017 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5019 IEM_MC_FETCH_EFLAGS(EFlags); \
5020 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5021 \
5022 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5023 IEM_MC_COMMIT_EFLAGS(EFlags); \
5024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5025 IEM_MC_END(); \
5026 break; \
5027 \
5028 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5029 } \
5030 } \
5031 else \
5032 { \
5033 IEMOP_HLP_DONE_DECODING(); \
5034 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5035 } \
5036 } \
5037 (void)0
5038
5039/**
5040 * @opmaps grp1_83
5041 * @opcode /0
5042 * @opflclass arithmetic
5043 */
5044FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5045{
5046 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5047 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5048 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5049}
5050
5051
5052/**
5053 * @opmaps grp1_83
5054 * @opcode /1
5055 * @opflclass logical
5056 */
5057FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5058{
5059 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5060 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5061 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5062}
5063
5064
5065/**
5066 * @opmaps grp1_83
5067 * @opcode /2
5068 * @opflclass arithmetic_carry
5069 */
5070FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5071{
5072 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5073 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5074 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5075}
5076
5077
5078/**
5079 * @opmaps grp1_83
5080 * @opcode /3
5081 * @opflclass arithmetic_carry
5082 */
5083FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5084{
5085 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5086 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5087 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5088}
5089
5090
5091/**
5092 * @opmaps grp1_83
5093 * @opcode /4
5094 * @opflclass logical
5095 */
5096FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5097{
5098 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5099 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5100 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5101}
5102
5103
5104/**
5105 * @opmaps grp1_83
5106 * @opcode /5
5107 * @opflclass arithmetic
5108 */
5109FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5110{
5111 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5112 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5113 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5114}
5115
5116
5117/**
5118 * @opmaps grp1_83
5119 * @opcode /6
5120 * @opflclass logical
5121 */
5122FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5123{
5124 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5125 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5126 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5127}
5128
5129
5130/**
5131 * @opmaps grp1_83
5132 * @opcode /7
5133 * @opflclass arithmetic
5134 */
5135FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5136{
5137 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5138 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5139}
5140
5141
5142/**
5143 * @opcode 0x83
5144 */
5145FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5146{
5147 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5148 to the 386 even if absent in the intel reference manuals and some
5149 3rd party opcode listings. */
5150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5151 switch (IEM_GET_MODRM_REG_8(bRm))
5152 {
5153 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5154 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5155 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5156 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5157 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5158 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5159 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5160 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5162 }
5163}
5164
5165
5166/**
5167 * @opcode 0x84
5168 * @opflclass logical
5169 */
5170FNIEMOP_DEF(iemOp_test_Eb_Gb)
5171{
5172 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5174 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5175}
5176
5177
5178/**
5179 * @opcode 0x85
5180 * @opflclass logical
5181 */
5182FNIEMOP_DEF(iemOp_test_Ev_Gv)
5183{
5184 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5186 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5187}
5188
5189
5190/**
5191 * @opcode 0x86
5192 */
5193FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5194{
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5197
5198 /*
5199 * If rm is denoting a register, no more instruction bytes.
5200 */
5201 if (IEM_IS_MODRM_REG_MODE(bRm))
5202 {
5203 IEM_MC_BEGIN(0, 2, 0, 0);
5204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5205 IEM_MC_LOCAL(uint8_t, uTmp1);
5206 IEM_MC_LOCAL(uint8_t, uTmp2);
5207
5208 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5209 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5210 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5211 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5212
5213 IEM_MC_ADVANCE_RIP_AND_FINISH();
5214 IEM_MC_END();
5215 }
5216 else
5217 {
5218 /*
5219 * We're accessing memory.
5220 */
5221#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5222 IEM_MC_BEGIN(2, 4, 0, 0); \
5223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5224 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5225 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5226 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5227 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5228 \
5229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5230 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5231 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5232 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5233 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5234 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5235 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5236 \
5237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5238 IEM_MC_END()
5239
5240 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5241 {
5242 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5243 }
5244 else
5245 {
5246 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5247 }
5248 }
5249}
5250
5251
5252/**
5253 * @opcode 0x87
5254 */
5255FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5256{
5257 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5259
5260 /*
5261 * If rm is denoting a register, no more instruction bytes.
5262 */
5263 if (IEM_IS_MODRM_REG_MODE(bRm))
5264 {
5265 switch (pVCpu->iem.s.enmEffOpSize)
5266 {
5267 case IEMMODE_16BIT:
5268 IEM_MC_BEGIN(0, 2, 0, 0);
5269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5270 IEM_MC_LOCAL(uint16_t, uTmp1);
5271 IEM_MC_LOCAL(uint16_t, uTmp2);
5272
5273 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5274 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5275 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5276 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5277
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 break;
5281
5282 case IEMMODE_32BIT:
5283 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5285 IEM_MC_LOCAL(uint32_t, uTmp1);
5286 IEM_MC_LOCAL(uint32_t, uTmp2);
5287
5288 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5289 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5290 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5291 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5292
5293 IEM_MC_ADVANCE_RIP_AND_FINISH();
5294 IEM_MC_END();
5295 break;
5296
5297 case IEMMODE_64BIT:
5298 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 IEM_MC_LOCAL(uint64_t, uTmp1);
5301 IEM_MC_LOCAL(uint64_t, uTmp2);
5302
5303 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5304 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5305 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5306 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5307
5308 IEM_MC_ADVANCE_RIP_AND_FINISH();
5309 IEM_MC_END();
5310 break;
5311
5312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5313 }
5314 }
5315 else
5316 {
5317 /*
5318 * We're accessing memory.
5319 */
5320#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5321 do { \
5322 switch (pVCpu->iem.s.enmEffOpSize) \
5323 { \
5324 case IEMMODE_16BIT: \
5325 IEM_MC_BEGIN(2, 4, 0, 0); \
5326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5328 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5329 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5331 \
5332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5333 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5334 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5335 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5336 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5337 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5338 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5339 \
5340 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5341 IEM_MC_END(); \
5342 break; \
5343 \
5344 case IEMMODE_32BIT: \
5345 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5347 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5348 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5349 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5350 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5351 \
5352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5353 IEMOP_HLP_DONE_DECODING(); \
5354 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5355 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5356 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5357 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5358 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5359 \
5360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5361 IEM_MC_END(); \
5362 break; \
5363 \
5364 case IEMMODE_64BIT: \
5365 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5367 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5368 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5369 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5370 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5371 \
5372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5373 IEMOP_HLP_DONE_DECODING(); \
5374 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5375 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5376 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5377 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5378 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5379 \
5380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5381 IEM_MC_END(); \
5382 break; \
5383 \
5384 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5385 } \
5386 } while (0)
5387 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5388 {
5389 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5390 }
5391 else
5392 {
5393 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5394 }
5395 }
5396}
5397
5398
5399/**
5400 * @opcode 0x88
5401 */
5402FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5403{
5404 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5405
5406 uint8_t bRm;
5407 IEM_OPCODE_GET_NEXT_U8(&bRm);
5408
5409 /*
5410 * If rm is denoting a register, no more instruction bytes.
5411 */
5412 if (IEM_IS_MODRM_REG_MODE(bRm))
5413 {
5414 IEM_MC_BEGIN(0, 1, 0, 0);
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 IEM_MC_LOCAL(uint8_t, u8Value);
5417 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5418 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5419 IEM_MC_ADVANCE_RIP_AND_FINISH();
5420 IEM_MC_END();
5421 }
5422 else
5423 {
5424 /*
5425 * We're writing a register to memory.
5426 */
5427 IEM_MC_BEGIN(0, 2, 0, 0);
5428 IEM_MC_LOCAL(uint8_t, u8Value);
5429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5433 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5434 IEM_MC_ADVANCE_RIP_AND_FINISH();
5435 IEM_MC_END();
5436 }
5437}
5438
5439
5440/**
5441 * @opcode 0x89
5442 */
5443FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5444{
5445 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5446
5447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5448
5449 /*
5450 * If rm is denoting a register, no more instruction bytes.
5451 */
5452 if (IEM_IS_MODRM_REG_MODE(bRm))
5453 {
5454 switch (pVCpu->iem.s.enmEffOpSize)
5455 {
5456 case IEMMODE_16BIT:
5457 IEM_MC_BEGIN(0, 1, 0, 0);
5458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5459 IEM_MC_LOCAL(uint16_t, u16Value);
5460 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5461 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5462 IEM_MC_ADVANCE_RIP_AND_FINISH();
5463 IEM_MC_END();
5464 break;
5465
5466 case IEMMODE_32BIT:
5467 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469 IEM_MC_LOCAL(uint32_t, u32Value);
5470 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5471 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5472 IEM_MC_ADVANCE_RIP_AND_FINISH();
5473 IEM_MC_END();
5474 break;
5475
5476 case IEMMODE_64BIT:
5477 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_LOCAL(uint64_t, u64Value);
5480 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5481 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5482 IEM_MC_ADVANCE_RIP_AND_FINISH();
5483 IEM_MC_END();
5484 break;
5485
5486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5487 }
5488 }
5489 else
5490 {
5491 /*
5492 * We're writing a register to memory.
5493 */
5494 switch (pVCpu->iem.s.enmEffOpSize)
5495 {
5496 case IEMMODE_16BIT:
5497 IEM_MC_BEGIN(0, 2, 0, 0);
5498 IEM_MC_LOCAL(uint16_t, u16Value);
5499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5503 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5504 IEM_MC_ADVANCE_RIP_AND_FINISH();
5505 IEM_MC_END();
5506 break;
5507
5508 case IEMMODE_32BIT:
5509 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5510 IEM_MC_LOCAL(uint32_t, u32Value);
5511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5515 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5516 IEM_MC_ADVANCE_RIP_AND_FINISH();
5517 IEM_MC_END();
5518 break;
5519
5520 case IEMMODE_64BIT:
5521 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5522 IEM_MC_LOCAL(uint64_t, u64Value);
5523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5527 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5528 IEM_MC_ADVANCE_RIP_AND_FINISH();
5529 IEM_MC_END();
5530 break;
5531
5532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5533 }
5534 }
5535}
5536
5537
5538/**
5539 * @opcode 0x8a
5540 */
5541FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5542{
5543 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5544
5545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5546
5547 /*
5548 * If rm is denoting a register, no more instruction bytes.
5549 */
5550 if (IEM_IS_MODRM_REG_MODE(bRm))
5551 {
5552 IEM_MC_BEGIN(0, 1, 0, 0);
5553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5554 IEM_MC_LOCAL(uint8_t, u8Value);
5555 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5556 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 }
5560 else
5561 {
5562 /*
5563 * We're loading a register from memory.
5564 */
5565 IEM_MC_BEGIN(0, 2, 0, 0);
5566 IEM_MC_LOCAL(uint8_t, u8Value);
5567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5570 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5571 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5572 IEM_MC_ADVANCE_RIP_AND_FINISH();
5573 IEM_MC_END();
5574 }
5575}
5576
5577
5578/**
5579 * @opcode 0x8b
5580 */
5581FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5582{
5583 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5584
5585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5586
5587 /*
5588 * If rm is denoting a register, no more instruction bytes.
5589 */
5590 if (IEM_IS_MODRM_REG_MODE(bRm))
5591 {
5592 switch (pVCpu->iem.s.enmEffOpSize)
5593 {
5594 case IEMMODE_16BIT:
5595 IEM_MC_BEGIN(0, 1, 0, 0);
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597 IEM_MC_LOCAL(uint16_t, u16Value);
5598 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5599 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5600 IEM_MC_ADVANCE_RIP_AND_FINISH();
5601 IEM_MC_END();
5602 break;
5603
5604 case IEMMODE_32BIT:
5605 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_LOCAL(uint32_t, u32Value);
5608 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5609 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5610 IEM_MC_ADVANCE_RIP_AND_FINISH();
5611 IEM_MC_END();
5612 break;
5613
5614 case IEMMODE_64BIT:
5615 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5617 IEM_MC_LOCAL(uint64_t, u64Value);
5618 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5619 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5620 IEM_MC_ADVANCE_RIP_AND_FINISH();
5621 IEM_MC_END();
5622 break;
5623
5624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5625 }
5626 }
5627 else
5628 {
5629 /*
5630 * We're loading a register from memory.
5631 */
5632 switch (pVCpu->iem.s.enmEffOpSize)
5633 {
5634 case IEMMODE_16BIT:
5635 IEM_MC_BEGIN(0, 2, 0, 0);
5636 IEM_MC_LOCAL(uint16_t, u16Value);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5641 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5642 IEM_MC_ADVANCE_RIP_AND_FINISH();
5643 IEM_MC_END();
5644 break;
5645
5646 case IEMMODE_32BIT:
5647 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5648 IEM_MC_LOCAL(uint32_t, u32Value);
5649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5653 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5654 IEM_MC_ADVANCE_RIP_AND_FINISH();
5655 IEM_MC_END();
5656 break;
5657
5658 case IEMMODE_64BIT:
5659 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5660 IEM_MC_LOCAL(uint64_t, u64Value);
5661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5664 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5665 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5666 IEM_MC_ADVANCE_RIP_AND_FINISH();
5667 IEM_MC_END();
5668 break;
5669
5670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5671 }
5672 }
5673}
5674
5675
5676/**
5677 * opcode 0x63
5678 * @todo Table fixme
5679 */
5680FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5681{
5682 if (!IEM_IS_64BIT_CODE(pVCpu))
5683 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5684 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5685 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5686 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5687}
5688
5689
5690/**
5691 * @opcode 0x8c
5692 */
5693FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5694{
5695 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5696
5697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5698
5699 /*
5700 * Check that the destination register exists. The REX.R prefix is ignored.
5701 */
5702 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5703 if (iSegReg > X86_SREG_GS)
5704 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5705
5706 /*
5707 * If rm is denoting a register, no more instruction bytes.
5708 * In that case, the operand size is respected and the upper bits are
5709 * cleared (starting with some pentium).
5710 */
5711 if (IEM_IS_MODRM_REG_MODE(bRm))
5712 {
5713 switch (pVCpu->iem.s.enmEffOpSize)
5714 {
5715 case IEMMODE_16BIT:
5716 IEM_MC_BEGIN(0, 1, 0, 0);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_LOCAL(uint16_t, u16Value);
5719 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5720 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5721 IEM_MC_ADVANCE_RIP_AND_FINISH();
5722 IEM_MC_END();
5723 break;
5724
5725 case IEMMODE_32BIT:
5726 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_LOCAL(uint32_t, u32Value);
5729 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5730 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5731 IEM_MC_ADVANCE_RIP_AND_FINISH();
5732 IEM_MC_END();
5733 break;
5734
5735 case IEMMODE_64BIT:
5736 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5738 IEM_MC_LOCAL(uint64_t, u64Value);
5739 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5740 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5741 IEM_MC_ADVANCE_RIP_AND_FINISH();
5742 IEM_MC_END();
5743 break;
5744
5745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5746 }
5747 }
5748 else
5749 {
5750 /*
5751 * We're saving the register to memory. The access is word sized
5752 * regardless of operand size prefixes.
5753 */
5754#if 0 /* not necessary */
5755 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5756#endif
5757 IEM_MC_BEGIN(0, 2, 0, 0);
5758 IEM_MC_LOCAL(uint16_t, u16Value);
5759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5762 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5763 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5764 IEM_MC_ADVANCE_RIP_AND_FINISH();
5765 IEM_MC_END();
5766 }
5767}
5768
5769
5770
5771
5772/**
5773 * @opcode 0x8d
5774 */
5775FNIEMOP_DEF(iemOp_lea_Gv_M)
5776{
5777 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5779 if (IEM_IS_MODRM_REG_MODE(bRm))
5780 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5781
5782 switch (pVCpu->iem.s.enmEffOpSize)
5783 {
5784 case IEMMODE_16BIT:
5785 IEM_MC_BEGIN(0, 2, 0, 0);
5786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5789 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5790 * operand-size, which is usually the case. It'll save an instruction
5791 * and a register. */
5792 IEM_MC_LOCAL(uint16_t, u16Cast);
5793 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5794 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5795 IEM_MC_ADVANCE_RIP_AND_FINISH();
5796 IEM_MC_END();
5797 break;
5798
5799 case IEMMODE_32BIT:
5800 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5804 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5805 * operand-size, which is usually the case. It'll save an instruction
5806 * and a register. */
5807 IEM_MC_LOCAL(uint32_t, u32Cast);
5808 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5809 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5810 IEM_MC_ADVANCE_RIP_AND_FINISH();
5811 IEM_MC_END();
5812 break;
5813
5814 case IEMMODE_64BIT:
5815 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5819 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5820 IEM_MC_ADVANCE_RIP_AND_FINISH();
5821 IEM_MC_END();
5822 break;
5823
5824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5825 }
5826}
5827
5828
5829/**
5830 * @opcode 0x8e
5831 */
5832FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5833{
5834 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5835
5836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5837
5838 /*
5839 * The practical operand size is 16-bit.
5840 */
5841#if 0 /* not necessary */
5842 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5843#endif
5844
5845 /*
5846 * Check that the destination register exists and can be used with this
5847 * instruction. The REX.R prefix is ignored.
5848 */
5849 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5850 /** @todo r=bird: What does 8086 do here wrt CS? */
5851 if ( iSegReg == X86_SREG_CS
5852 || iSegReg > X86_SREG_GS)
5853 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5854
5855 /*
5856 * If rm is denoting a register, no more instruction bytes.
5857 *
5858 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5859 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5860 * register. This is a restriction of the current recompiler
5861 * approach.
5862 */
5863 if (IEM_IS_MODRM_REG_MODE(bRm))
5864 {
5865#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5866 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5868 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5869 IEM_MC_ARG(uint16_t, u16Value, 1); \
5870 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5871 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5872 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5873 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5874 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5875 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5876 iemCImpl_load_SReg, iSRegArg, u16Value); \
5877 IEM_MC_END()
5878
5879 if (iSegReg == X86_SREG_SS)
5880 {
5881 if (IEM_IS_32BIT_CODE(pVCpu))
5882 {
5883 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5884 }
5885 else
5886 {
5887 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5888 }
5889 }
5890 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5891 {
5892 IEMOP_MOV_SW_EV_REG_BODY(0);
5893 }
5894 else
5895 {
5896 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5897 }
5898#undef IEMOP_MOV_SW_EV_REG_BODY
5899 }
5900 else
5901 {
5902 /*
5903 * We're loading the register from memory. The access is word sized
5904 * regardless of operand size prefixes.
5905 */
5906#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5907 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5908 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5909 IEM_MC_ARG(uint16_t, u16Value, 1); \
5910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5913 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5914 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5915 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5916 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5917 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5918 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5919 iemCImpl_load_SReg, iSRegArg, u16Value); \
5920 IEM_MC_END()
5921
5922 if (iSegReg == X86_SREG_SS)
5923 {
5924 if (IEM_IS_32BIT_CODE(pVCpu))
5925 {
5926 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5927 }
5928 else
5929 {
5930 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5931 }
5932 }
5933 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5934 {
5935 IEMOP_MOV_SW_EV_MEM_BODY(0);
5936 }
5937 else
5938 {
5939 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5940 }
5941#undef IEMOP_MOV_SW_EV_MEM_BODY
5942 }
5943}
5944
5945
5946/** Opcode 0x8f /0. */
5947FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5948{
5949 /* This bugger is rather annoying as it requires rSP to be updated before
5950 doing the effective address calculations. Will eventually require a
5951 split between the R/M+SIB decoding and the effective address
5952 calculation - which is something that is required for any attempt at
5953 reusing this code for a recompiler. It may also be good to have if we
5954 need to delay #UD exception caused by invalid lock prefixes.
5955
5956 For now, we'll do a mostly safe interpreter-only implementation here. */
5957 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5958 * now until tests show it's checked.. */
5959 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5960
5961 /* Register access is relatively easy and can share code. */
5962 if (IEM_IS_MODRM_REG_MODE(bRm))
5963 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5964
5965 /*
5966 * Memory target.
5967 *
5968 * Intel says that RSP is incremented before it's used in any effective
5969 * address calcuations. This means some serious extra annoyance here since
5970 * we decode and calculate the effective address in one step and like to
5971 * delay committing registers till everything is done.
5972 *
5973 * So, we'll decode and calculate the effective address twice. This will
5974 * require some recoding if turned into a recompiler.
5975 */
5976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5977
5978#if 1 /* This can be compiled, optimize later if needed. */
5979 switch (pVCpu->iem.s.enmEffOpSize)
5980 {
5981 case IEMMODE_16BIT:
5982 IEM_MC_BEGIN(2, 0, 0, 0);
5983 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5987 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5988 IEM_MC_END();
5989 break;
5990
5991 case IEMMODE_32BIT:
5992 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5993 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5996 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5997 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5998 IEM_MC_END();
5999 break;
6000
6001 case IEMMODE_64BIT:
6002 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6003 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6007 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6008 IEM_MC_END();
6009 break;
6010
6011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6012 }
6013
6014#else
6015# ifndef TST_IEM_CHECK_MC
6016 /* Calc effective address with modified ESP. */
6017/** @todo testcase */
6018 RTGCPTR GCPtrEff;
6019 VBOXSTRICTRC rcStrict;
6020 switch (pVCpu->iem.s.enmEffOpSize)
6021 {
6022 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6023 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6024 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6026 }
6027 if (rcStrict != VINF_SUCCESS)
6028 return rcStrict;
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030
6031 /* Perform the operation - this should be CImpl. */
6032 RTUINT64U TmpRsp;
6033 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6034 switch (pVCpu->iem.s.enmEffOpSize)
6035 {
6036 case IEMMODE_16BIT:
6037 {
6038 uint16_t u16Value;
6039 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6040 if (rcStrict == VINF_SUCCESS)
6041 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6042 break;
6043 }
6044
6045 case IEMMODE_32BIT:
6046 {
6047 uint32_t u32Value;
6048 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6049 if (rcStrict == VINF_SUCCESS)
6050 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6051 break;
6052 }
6053
6054 case IEMMODE_64BIT:
6055 {
6056 uint64_t u64Value;
6057 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6058 if (rcStrict == VINF_SUCCESS)
6059 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6060 break;
6061 }
6062
6063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6064 }
6065 if (rcStrict == VINF_SUCCESS)
6066 {
6067 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6068 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6069 }
6070 return rcStrict;
6071
6072# else
6073 return VERR_IEM_IPE_2;
6074# endif
6075#endif
6076}
6077
6078
6079/**
6080 * @opcode 0x8f
6081 */
6082FNIEMOP_DEF(iemOp_Grp1A__xop)
6083{
6084 /*
6085 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6086 * three byte VEX prefix, except that the mmmmm field cannot have the values
6087 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6088 */
6089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6090 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6091 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6092
6093 IEMOP_MNEMONIC(xop, "xop");
6094 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6095 {
6096 /** @todo Test when exctly the XOP conformance checks kick in during
6097 * instruction decoding and fetching (using \#PF). */
6098 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6099 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6100 if ( ( pVCpu->iem.s.fPrefixes
6101 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6102 == 0)
6103 {
6104 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6105 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6106 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6107 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6108 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6109 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6110 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6111 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6112 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6113
6114 /** @todo XOP: Just use new tables and decoders. */
6115 switch (bRm & 0x1f)
6116 {
6117 case 8: /* xop opcode map 8. */
6118 IEMOP_BITCH_ABOUT_STUB();
6119 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6120
6121 case 9: /* xop opcode map 9. */
6122 IEMOP_BITCH_ABOUT_STUB();
6123 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6124
6125 case 10: /* xop opcode map 10. */
6126 IEMOP_BITCH_ABOUT_STUB();
6127 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6128
6129 default:
6130 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6131 IEMOP_RAISE_INVALID_OPCODE_RET();
6132 }
6133 }
6134 else
6135 Log(("XOP: Invalid prefix mix!\n"));
6136 }
6137 else
6138 Log(("XOP: XOP support disabled!\n"));
6139 IEMOP_RAISE_INVALID_OPCODE_RET();
6140}
6141
6142
6143/**
6144 * Common 'xchg reg,rAX' helper.
6145 */
6146FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6147{
6148 iReg |= pVCpu->iem.s.uRexB;
6149 switch (pVCpu->iem.s.enmEffOpSize)
6150 {
6151 case IEMMODE_16BIT:
6152 IEM_MC_BEGIN(0, 2, 0, 0);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6155 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6156 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6157 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6158 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6159 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6160 IEM_MC_ADVANCE_RIP_AND_FINISH();
6161 IEM_MC_END();
6162 break;
6163
6164 case IEMMODE_32BIT:
6165 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6167 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6168 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6169 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6170 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6171 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6172 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6173 IEM_MC_ADVANCE_RIP_AND_FINISH();
6174 IEM_MC_END();
6175 break;
6176
6177 case IEMMODE_64BIT:
6178 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6181 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6182 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6183 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6184 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6185 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6186 IEM_MC_ADVANCE_RIP_AND_FINISH();
6187 IEM_MC_END();
6188 break;
6189
6190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6191 }
6192}
6193
6194
6195/**
6196 * @opcode 0x90
6197 */
6198FNIEMOP_DEF(iemOp_nop)
6199{
6200 /* R8/R8D and RAX/EAX can be exchanged. */
6201 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6202 {
6203 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6204 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6205 }
6206
6207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6208 {
6209 IEMOP_MNEMONIC(pause, "pause");
6210 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6211 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6212 if (!IEM_IS_IN_GUEST(pVCpu))
6213 { /* probable */ }
6214#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6215 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6216 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6217#endif
6218#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6219 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6220 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6221#endif
6222 }
6223 else
6224 IEMOP_MNEMONIC(nop, "nop");
6225 /** @todo testcase: lock nop; lock pause */
6226 IEM_MC_BEGIN(0, 0, 0, 0);
6227 IEMOP_HLP_DONE_DECODING();
6228 IEM_MC_ADVANCE_RIP_AND_FINISH();
6229 IEM_MC_END();
6230}
6231
6232
6233/**
6234 * @opcode 0x91
6235 */
6236FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6237{
6238 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6239 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6240}
6241
6242
6243/**
6244 * @opcode 0x92
6245 */
6246FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6247{
6248 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6249 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6250}
6251
6252
6253/**
6254 * @opcode 0x93
6255 */
6256FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6257{
6258 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6259 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6260}
6261
6262
6263/**
6264 * @opcode 0x94
6265 */
6266FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6267{
6268 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6269 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6270}
6271
6272
6273/**
6274 * @opcode 0x95
6275 */
6276FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6277{
6278 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6279 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6280}
6281
6282
6283/**
6284 * @opcode 0x96
6285 */
6286FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6287{
6288 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6289 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6290}
6291
6292
6293/**
6294 * @opcode 0x97
6295 */
6296FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6297{
6298 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6299 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6300}
6301
6302
6303/**
6304 * @opcode 0x98
6305 */
6306FNIEMOP_DEF(iemOp_cbw)
6307{
6308 switch (pVCpu->iem.s.enmEffOpSize)
6309 {
6310 case IEMMODE_16BIT:
6311 IEMOP_MNEMONIC(cbw, "cbw");
6312 IEM_MC_BEGIN(0, 1, 0, 0);
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6314 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6315 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6316 } IEM_MC_ELSE() {
6317 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6318 } IEM_MC_ENDIF();
6319 IEM_MC_ADVANCE_RIP_AND_FINISH();
6320 IEM_MC_END();
6321 break;
6322
6323 case IEMMODE_32BIT:
6324 IEMOP_MNEMONIC(cwde, "cwde");
6325 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6327 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6328 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6329 } IEM_MC_ELSE() {
6330 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6331 } IEM_MC_ENDIF();
6332 IEM_MC_ADVANCE_RIP_AND_FINISH();
6333 IEM_MC_END();
6334 break;
6335
6336 case IEMMODE_64BIT:
6337 IEMOP_MNEMONIC(cdqe, "cdqe");
6338 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6340 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6341 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6342 } IEM_MC_ELSE() {
6343 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6344 } IEM_MC_ENDIF();
6345 IEM_MC_ADVANCE_RIP_AND_FINISH();
6346 IEM_MC_END();
6347 break;
6348
6349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6350 }
6351}
6352
6353
6354/**
6355 * @opcode 0x99
6356 */
6357FNIEMOP_DEF(iemOp_cwd)
6358{
6359 switch (pVCpu->iem.s.enmEffOpSize)
6360 {
6361 case IEMMODE_16BIT:
6362 IEMOP_MNEMONIC(cwd, "cwd");
6363 IEM_MC_BEGIN(0, 1, 0, 0);
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6365 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6366 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6367 } IEM_MC_ELSE() {
6368 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6369 } IEM_MC_ENDIF();
6370 IEM_MC_ADVANCE_RIP_AND_FINISH();
6371 IEM_MC_END();
6372 break;
6373
6374 case IEMMODE_32BIT:
6375 IEMOP_MNEMONIC(cdq, "cdq");
6376 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6379 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6380 } IEM_MC_ELSE() {
6381 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6382 } IEM_MC_ENDIF();
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385 break;
6386
6387 case IEMMODE_64BIT:
6388 IEMOP_MNEMONIC(cqo, "cqo");
6389 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6392 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6393 } IEM_MC_ELSE() {
6394 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6395 } IEM_MC_ENDIF();
6396 IEM_MC_ADVANCE_RIP_AND_FINISH();
6397 IEM_MC_END();
6398 break;
6399
6400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6401 }
6402}
6403
6404
6405/**
6406 * @opcode 0x9a
6407 */
6408FNIEMOP_DEF(iemOp_call_Ap)
6409{
6410 IEMOP_MNEMONIC(call_Ap, "call Ap");
6411 IEMOP_HLP_NO_64BIT();
6412
6413 /* Decode the far pointer address and pass it on to the far call C implementation. */
6414 uint32_t off32Seg;
6415 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6416 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6417 else
6418 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6419 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6421 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6422 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6423 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6424 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6425}
6426
6427
6428/** Opcode 0x9b. (aka fwait) */
6429FNIEMOP_DEF(iemOp_wait)
6430{
6431 IEMOP_MNEMONIC(wait, "wait");
6432 IEM_MC_BEGIN(0, 0, 0, 0);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6436 IEM_MC_ADVANCE_RIP_AND_FINISH();
6437 IEM_MC_END();
6438}
6439
6440
6441/**
6442 * @opcode 0x9c
6443 */
6444FNIEMOP_DEF(iemOp_pushf_Fv)
6445{
6446 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6449 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6450 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6451}
6452
6453
6454/**
6455 * @opcode 0x9d
6456 */
6457FNIEMOP_DEF(iemOp_popf_Fv)
6458{
6459 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6462 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6463 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6464 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6465}
6466
6467
6468/**
6469 * @opcode 0x9e
6470 * @opflmodify cf,pf,af,zf,sf
6471 */
6472FNIEMOP_DEF(iemOp_sahf)
6473{
6474 IEMOP_MNEMONIC(sahf, "sahf");
6475 if ( IEM_IS_64BIT_CODE(pVCpu)
6476 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6477 IEMOP_RAISE_INVALID_OPCODE_RET();
6478 IEM_MC_BEGIN(0, 2, 0, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_LOCAL(uint32_t, u32Flags);
6481 IEM_MC_LOCAL(uint32_t, EFlags);
6482 IEM_MC_FETCH_EFLAGS(EFlags);
6483 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6484 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6485 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6486 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6487 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6488 IEM_MC_COMMIT_EFLAGS(EFlags);
6489 IEM_MC_ADVANCE_RIP_AND_FINISH();
6490 IEM_MC_END();
6491}
6492
6493
6494/**
6495 * @opcode 0x9f
6496 * @opfltest cf,pf,af,zf,sf
6497 */
6498FNIEMOP_DEF(iemOp_lahf)
6499{
6500 IEMOP_MNEMONIC(lahf, "lahf");
6501 if ( IEM_IS_64BIT_CODE(pVCpu)
6502 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6503 IEMOP_RAISE_INVALID_OPCODE_RET();
6504 IEM_MC_BEGIN(0, 1, 0, 0);
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 IEM_MC_LOCAL(uint8_t, u8Flags);
6507 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6508 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6509 IEM_MC_ADVANCE_RIP_AND_FINISH();
6510 IEM_MC_END();
6511}
6512
6513
6514/**
6515 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6516 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6517 * Will return/throw on failures.
6518 * @param a_GCPtrMemOff The variable to store the offset in.
6519 */
6520#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6521 do \
6522 { \
6523 switch (pVCpu->iem.s.enmEffAddrMode) \
6524 { \
6525 case IEMMODE_16BIT: \
6526 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6527 break; \
6528 case IEMMODE_32BIT: \
6529 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6530 break; \
6531 case IEMMODE_64BIT: \
6532 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6533 break; \
6534 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6535 } \
6536 } while (0)
6537
6538/**
6539 * @opcode 0xa0
6540 */
6541FNIEMOP_DEF(iemOp_mov_AL_Ob)
6542{
6543 /*
6544 * Get the offset.
6545 */
6546 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6547 RTGCPTR GCPtrMemOffDecode;
6548 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6549
6550 /*
6551 * Fetch AL.
6552 */
6553 IEM_MC_BEGIN(0, 2, 0, 0);
6554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6555 IEM_MC_LOCAL(uint8_t, u8Tmp);
6556 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6557 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6558 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6559 IEM_MC_ADVANCE_RIP_AND_FINISH();
6560 IEM_MC_END();
6561}
6562
6563
6564/**
6565 * @opcode 0xa1
6566 */
6567FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6568{
6569 /*
6570 * Get the offset.
6571 */
6572 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6573 RTGCPTR GCPtrMemOffDecode;
6574 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6575
6576 /*
6577 * Fetch rAX.
6578 */
6579 switch (pVCpu->iem.s.enmEffOpSize)
6580 {
6581 case IEMMODE_16BIT:
6582 IEM_MC_BEGIN(0, 2, 0, 0);
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 IEM_MC_LOCAL(uint16_t, u16Tmp);
6585 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6586 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6587 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6588 IEM_MC_ADVANCE_RIP_AND_FINISH();
6589 IEM_MC_END();
6590 break;
6591
6592 case IEMMODE_32BIT:
6593 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6595 IEM_MC_LOCAL(uint32_t, u32Tmp);
6596 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6597 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6598 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6599 IEM_MC_ADVANCE_RIP_AND_FINISH();
6600 IEM_MC_END();
6601 break;
6602
6603 case IEMMODE_64BIT:
6604 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6606 IEM_MC_LOCAL(uint64_t, u64Tmp);
6607 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6608 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6609 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6610 IEM_MC_ADVANCE_RIP_AND_FINISH();
6611 IEM_MC_END();
6612 break;
6613
6614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6615 }
6616}
6617
6618
6619/**
6620 * @opcode 0xa2
6621 */
6622FNIEMOP_DEF(iemOp_mov_Ob_AL)
6623{
6624 /*
6625 * Get the offset.
6626 */
6627 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6628 RTGCPTR GCPtrMemOffDecode;
6629 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6630
6631 /*
6632 * Store AL.
6633 */
6634 IEM_MC_BEGIN(0, 2, 0, 0);
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6636 IEM_MC_LOCAL(uint8_t, u8Tmp);
6637 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6638 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6639 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6640 IEM_MC_ADVANCE_RIP_AND_FINISH();
6641 IEM_MC_END();
6642}
6643
6644
6645/**
6646 * @opcode 0xa3
6647 */
6648FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6649{
6650 /*
6651 * Get the offset.
6652 */
6653 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6654 RTGCPTR GCPtrMemOffDecode;
6655 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6656
6657 /*
6658 * Store rAX.
6659 */
6660 switch (pVCpu->iem.s.enmEffOpSize)
6661 {
6662 case IEMMODE_16BIT:
6663 IEM_MC_BEGIN(0, 2, 0, 0);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665 IEM_MC_LOCAL(uint16_t, u16Tmp);
6666 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6667 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6668 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6669 IEM_MC_ADVANCE_RIP_AND_FINISH();
6670 IEM_MC_END();
6671 break;
6672
6673 case IEMMODE_32BIT:
6674 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6676 IEM_MC_LOCAL(uint32_t, u32Tmp);
6677 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6678 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6679 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6680 IEM_MC_ADVANCE_RIP_AND_FINISH();
6681 IEM_MC_END();
6682 break;
6683
6684 case IEMMODE_64BIT:
6685 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6687 IEM_MC_LOCAL(uint64_t, u64Tmp);
6688 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6689 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6690 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6691 IEM_MC_ADVANCE_RIP_AND_FINISH();
6692 IEM_MC_END();
6693 break;
6694
6695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6696 }
6697}
6698
6699/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6700#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6701 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6703 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6704 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6705 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6706 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6707 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6708 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6710 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6711 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6712 } IEM_MC_ELSE() { \
6713 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6714 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6715 } IEM_MC_ENDIF(); \
6716 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6717 IEM_MC_END() \
6718
6719/**
6720 * @opcode 0xa4
6721 * @opfltest df
6722 */
6723FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6724{
6725 /*
6726 * Use the C implementation if a repeat prefix is encountered.
6727 */
6728 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6729 {
6730 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6732 switch (pVCpu->iem.s.enmEffAddrMode)
6733 {
6734 case IEMMODE_16BIT:
6735 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6736 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6737 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6738 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6739 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6740 case IEMMODE_32BIT:
6741 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6742 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6743 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6744 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6745 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6746 case IEMMODE_64BIT:
6747 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6748 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6749 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6751 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6753 }
6754 }
6755
6756 /*
6757 * Sharing case implementation with movs[wdq] below.
6758 */
6759 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6760 switch (pVCpu->iem.s.enmEffAddrMode)
6761 {
6762 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6763 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6764 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6766 }
6767}
6768
6769
6770/**
6771 * @opcode 0xa5
6772 * @opfltest df
6773 */
6774FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6775{
6776
6777 /*
6778 * Use the C implementation if a repeat prefix is encountered.
6779 */
6780 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6781 {
6782 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6784 switch (pVCpu->iem.s.enmEffOpSize)
6785 {
6786 case IEMMODE_16BIT:
6787 switch (pVCpu->iem.s.enmEffAddrMode)
6788 {
6789 case IEMMODE_16BIT:
6790 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6792 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6793 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6794 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6795 case IEMMODE_32BIT:
6796 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6797 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6798 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6800 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6801 case IEMMODE_64BIT:
6802 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6803 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6804 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6806 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6808 }
6809 break;
6810 case IEMMODE_32BIT:
6811 switch (pVCpu->iem.s.enmEffAddrMode)
6812 {
6813 case IEMMODE_16BIT:
6814 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6815 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6817 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6818 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6819 case IEMMODE_32BIT:
6820 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6821 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6822 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6824 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6825 case IEMMODE_64BIT:
6826 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6829 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6830 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6832 }
6833 case IEMMODE_64BIT:
6834 switch (pVCpu->iem.s.enmEffAddrMode)
6835 {
6836 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6837 case IEMMODE_32BIT:
6838 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6839 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6841 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6842 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6843 case IEMMODE_64BIT:
6844 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6845 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6846 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6847 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6848 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6850 }
6851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6852 }
6853 }
6854
6855 /*
6856 * Annoying double switch here.
6857 * Using ugly macro for implementing the cases, sharing it with movsb.
6858 */
6859 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6860 switch (pVCpu->iem.s.enmEffOpSize)
6861 {
6862 case IEMMODE_16BIT:
6863 switch (pVCpu->iem.s.enmEffAddrMode)
6864 {
6865 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6866 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6867 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6869 }
6870 break;
6871
6872 case IEMMODE_32BIT:
6873 switch (pVCpu->iem.s.enmEffAddrMode)
6874 {
6875 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6876 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6877 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6879 }
6880 break;
6881
6882 case IEMMODE_64BIT:
6883 switch (pVCpu->iem.s.enmEffAddrMode)
6884 {
6885 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6886 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6887 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6889 }
6890 break;
6891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6892 }
6893}
6894
6895#undef IEM_MOVS_CASE
6896
6897/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6898#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6899 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6901 \
6902 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6903 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6904 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6905 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6906 \
6907 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6908 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6909 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6910 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6911 \
6912 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6913 IEM_MC_REF_EFLAGS(pEFlags); \
6914 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6915 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6916 \
6917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6918 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6919 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6920 } IEM_MC_ELSE() { \
6921 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6922 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6923 } IEM_MC_ENDIF(); \
6924 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6925 IEM_MC_END() \
6926
6927/**
6928 * @opcode 0xa6
6929 * @opflclass arithmetic
6930 * @opfltest df
6931 */
6932FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6933{
6934
6935 /*
6936 * Use the C implementation if a repeat prefix is encountered.
6937 */
6938 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6939 {
6940 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6942 switch (pVCpu->iem.s.enmEffAddrMode)
6943 {
6944 case IEMMODE_16BIT:
6945 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6946 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6947 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6948 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6949 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6950 case IEMMODE_32BIT:
6951 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6952 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6953 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6954 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6955 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6956 case IEMMODE_64BIT:
6957 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6958 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6959 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6960 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6961 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6963 }
6964 }
6965 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6966 {
6967 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 switch (pVCpu->iem.s.enmEffAddrMode)
6970 {
6971 case IEMMODE_16BIT:
6972 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6973 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6975 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6976 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6977 case IEMMODE_32BIT:
6978 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6979 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6980 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6982 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6983 case IEMMODE_64BIT:
6984 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6985 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6986 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6987 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6988 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6990 }
6991 }
6992
6993 /*
6994 * Sharing case implementation with cmps[wdq] below.
6995 */
6996 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6997 switch (pVCpu->iem.s.enmEffAddrMode)
6998 {
6999 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7000 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7001 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7003 }
7004}
7005
7006
7007/**
7008 * @opcode 0xa7
7009 * @opflclass arithmetic
7010 * @opfltest df
7011 */
7012FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7013{
7014 /*
7015 * Use the C implementation if a repeat prefix is encountered.
7016 */
7017 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7018 {
7019 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7021 switch (pVCpu->iem.s.enmEffOpSize)
7022 {
7023 case IEMMODE_16BIT:
7024 switch (pVCpu->iem.s.enmEffAddrMode)
7025 {
7026 case IEMMODE_16BIT:
7027 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7028 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7029 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7030 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7031 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7032 case IEMMODE_32BIT:
7033 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7034 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7036 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7037 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7038 case IEMMODE_64BIT:
7039 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7040 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7041 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7042 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7043 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7045 }
7046 break;
7047 case IEMMODE_32BIT:
7048 switch (pVCpu->iem.s.enmEffAddrMode)
7049 {
7050 case IEMMODE_16BIT:
7051 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7052 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7053 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7054 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7055 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7056 case IEMMODE_32BIT:
7057 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7058 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7059 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7060 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7061 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7062 case IEMMODE_64BIT:
7063 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7064 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7065 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7066 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7067 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7069 }
7070 case IEMMODE_64BIT:
7071 switch (pVCpu->iem.s.enmEffAddrMode)
7072 {
7073 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7074 case IEMMODE_32BIT:
7075 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7076 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7077 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7078 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7079 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7080 case IEMMODE_64BIT:
7081 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7082 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7083 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7084 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7085 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7087 }
7088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7089 }
7090 }
7091
7092 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7093 {
7094 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7096 switch (pVCpu->iem.s.enmEffOpSize)
7097 {
7098 case IEMMODE_16BIT:
7099 switch (pVCpu->iem.s.enmEffAddrMode)
7100 {
7101 case IEMMODE_16BIT:
7102 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7103 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7104 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7105 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7106 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7107 case IEMMODE_32BIT:
7108 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7109 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7110 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7111 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7112 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7113 case IEMMODE_64BIT:
7114 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7115 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7116 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7117 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7118 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7120 }
7121 break;
7122 case IEMMODE_32BIT:
7123 switch (pVCpu->iem.s.enmEffAddrMode)
7124 {
7125 case IEMMODE_16BIT:
7126 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7127 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7128 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7129 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7130 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7131 case IEMMODE_32BIT:
7132 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7135 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7136 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7137 case IEMMODE_64BIT:
7138 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7139 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7140 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7141 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7142 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7144 }
7145 case IEMMODE_64BIT:
7146 switch (pVCpu->iem.s.enmEffAddrMode)
7147 {
7148 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7149 case IEMMODE_32BIT:
7150 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7151 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7152 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7153 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7154 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7155 case IEMMODE_64BIT:
7156 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7157 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7158 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7159 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7160 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7164 }
7165 }
7166
7167 /*
7168 * Annoying double switch here.
7169 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7170 */
7171 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7172 switch (pVCpu->iem.s.enmEffOpSize)
7173 {
7174 case IEMMODE_16BIT:
7175 switch (pVCpu->iem.s.enmEffAddrMode)
7176 {
7177 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7178 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7179 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7181 }
7182 break;
7183
7184 case IEMMODE_32BIT:
7185 switch (pVCpu->iem.s.enmEffAddrMode)
7186 {
7187 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7188 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7189 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7191 }
7192 break;
7193
7194 case IEMMODE_64BIT:
7195 switch (pVCpu->iem.s.enmEffAddrMode)
7196 {
7197 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7198 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7199 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7201 }
7202 break;
7203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7204 }
7205}
7206
7207#undef IEM_CMPS_CASE
7208
7209/**
7210 * @opcode 0xa8
7211 * @opflclass logical
7212 */
7213FNIEMOP_DEF(iemOp_test_AL_Ib)
7214{
7215 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7217 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7218}
7219
7220
7221/**
7222 * @opcode 0xa9
7223 * @opflclass logical
7224 */
7225FNIEMOP_DEF(iemOp_test_eAX_Iz)
7226{
7227 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7228 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7229 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7230}
7231
7232
7233/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7234#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7235 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7237 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7238 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7239 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7240 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7241 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7243 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7244 } IEM_MC_ELSE() { \
7245 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7246 } IEM_MC_ENDIF(); \
7247 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7248 IEM_MC_END() \
7249
7250/**
7251 * @opcode 0xaa
7252 */
7253FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7254{
7255 /*
7256 * Use the C implementation if a repeat prefix is encountered.
7257 */
7258 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7259 {
7260 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7262 switch (pVCpu->iem.s.enmEffAddrMode)
7263 {
7264 case IEMMODE_16BIT:
7265 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7266 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7267 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7268 iemCImpl_stos_al_m16);
7269 case IEMMODE_32BIT:
7270 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7271 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7272 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7273 iemCImpl_stos_al_m32);
7274 case IEMMODE_64BIT:
7275 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7276 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7277 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7278 iemCImpl_stos_al_m64);
7279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7280 }
7281 }
7282
7283 /*
7284 * Sharing case implementation with stos[wdq] below.
7285 */
7286 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7287 switch (pVCpu->iem.s.enmEffAddrMode)
7288 {
7289 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7290 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7291 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7293 }
7294}
7295
7296
7297/**
7298 * @opcode 0xab
7299 */
7300FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7301{
7302 /*
7303 * Use the C implementation if a repeat prefix is encountered.
7304 */
7305 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7306 {
7307 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7309 switch (pVCpu->iem.s.enmEffOpSize)
7310 {
7311 case IEMMODE_16BIT:
7312 switch (pVCpu->iem.s.enmEffAddrMode)
7313 {
7314 case IEMMODE_16BIT:
7315 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7316 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7317 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7318 iemCImpl_stos_ax_m16);
7319 case IEMMODE_32BIT:
7320 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7321 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7322 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7323 iemCImpl_stos_ax_m32);
7324 case IEMMODE_64BIT:
7325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7326 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7327 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7328 iemCImpl_stos_ax_m64);
7329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7330 }
7331 break;
7332 case IEMMODE_32BIT:
7333 switch (pVCpu->iem.s.enmEffAddrMode)
7334 {
7335 case IEMMODE_16BIT:
7336 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7337 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7338 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7339 iemCImpl_stos_eax_m16);
7340 case IEMMODE_32BIT:
7341 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7342 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7343 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7344 iemCImpl_stos_eax_m32);
7345 case IEMMODE_64BIT:
7346 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7347 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7348 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7349 iemCImpl_stos_eax_m64);
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 case IEMMODE_64BIT:
7353 switch (pVCpu->iem.s.enmEffAddrMode)
7354 {
7355 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7356 case IEMMODE_32BIT:
7357 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7358 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7359 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7360 iemCImpl_stos_rax_m32);
7361 case IEMMODE_64BIT:
7362 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7363 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7364 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7365 iemCImpl_stos_rax_m64);
7366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7367 }
7368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7369 }
7370 }
7371
7372 /*
7373 * Annoying double switch here.
7374 * Using ugly macro for implementing the cases, sharing it with stosb.
7375 */
7376 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7377 switch (pVCpu->iem.s.enmEffOpSize)
7378 {
7379 case IEMMODE_16BIT:
7380 switch (pVCpu->iem.s.enmEffAddrMode)
7381 {
7382 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7383 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7384 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7386 }
7387 break;
7388
7389 case IEMMODE_32BIT:
7390 switch (pVCpu->iem.s.enmEffAddrMode)
7391 {
7392 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7393 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7394 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7396 }
7397 break;
7398
7399 case IEMMODE_64BIT:
7400 switch (pVCpu->iem.s.enmEffAddrMode)
7401 {
7402 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7403 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7404 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7406 }
7407 break;
7408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7409 }
7410}
7411
7412#undef IEM_STOS_CASE
7413
7414/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7415#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7416 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7418 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7419 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7420 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7421 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7422 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7423 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7424 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7425 } IEM_MC_ELSE() { \
7426 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7427 } IEM_MC_ENDIF(); \
7428 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7429 IEM_MC_END() \
7430
7431/**
7432 * @opcode 0xac
7433 * @opfltest df
7434 */
7435FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7436{
7437 /*
7438 * Use the C implementation if a repeat prefix is encountered.
7439 */
7440 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7441 {
7442 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7444 switch (pVCpu->iem.s.enmEffAddrMode)
7445 {
7446 case IEMMODE_16BIT:
7447 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7448 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7449 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7451 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7452 case IEMMODE_32BIT:
7453 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7455 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7456 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7457 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7458 case IEMMODE_64BIT:
7459 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7460 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7463 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7465 }
7466 }
7467
7468 /*
7469 * Sharing case implementation with stos[wdq] below.
7470 */
7471 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7472 switch (pVCpu->iem.s.enmEffAddrMode)
7473 {
7474 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7475 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7476 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7478 }
7479}
7480
7481
7482/**
7483 * @opcode 0xad
7484 * @opfltest df
7485 */
7486FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7487{
7488 /*
7489 * Use the C implementation if a repeat prefix is encountered.
7490 */
7491 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7492 {
7493 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7495 switch (pVCpu->iem.s.enmEffOpSize)
7496 {
7497 case IEMMODE_16BIT:
7498 switch (pVCpu->iem.s.enmEffAddrMode)
7499 {
7500 case IEMMODE_16BIT:
7501 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7502 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7503 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7504 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7505 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7506 case IEMMODE_32BIT:
7507 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7508 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7509 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7510 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7511 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7512 case IEMMODE_64BIT:
7513 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7514 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7515 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7517 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7519 }
7520 break;
7521 case IEMMODE_32BIT:
7522 switch (pVCpu->iem.s.enmEffAddrMode)
7523 {
7524 case IEMMODE_16BIT:
7525 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7527 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7528 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7529 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7530 case IEMMODE_32BIT:
7531 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7532 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7533 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7534 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7535 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7536 case IEMMODE_64BIT:
7537 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7538 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7539 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7540 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7541 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7543 }
7544 case IEMMODE_64BIT:
7545 switch (pVCpu->iem.s.enmEffAddrMode)
7546 {
7547 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7548 case IEMMODE_32BIT:
7549 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7550 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7551 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7553 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7554 case IEMMODE_64BIT:
7555 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7556 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7558 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7559 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7561 }
7562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7563 }
7564 }
7565
7566 /*
7567 * Annoying double switch here.
7568 * Using ugly macro for implementing the cases, sharing it with lodsb.
7569 */
7570 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7571 switch (pVCpu->iem.s.enmEffOpSize)
7572 {
7573 case IEMMODE_16BIT:
7574 switch (pVCpu->iem.s.enmEffAddrMode)
7575 {
7576 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7577 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7578 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7580 }
7581 break;
7582
7583 case IEMMODE_32BIT:
7584 switch (pVCpu->iem.s.enmEffAddrMode)
7585 {
7586 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7587 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7588 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7590 }
7591 break;
7592
7593 case IEMMODE_64BIT:
7594 switch (pVCpu->iem.s.enmEffAddrMode)
7595 {
7596 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7597 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7598 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7600 }
7601 break;
7602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7603 }
7604}
7605
7606#undef IEM_LODS_CASE
7607
7608/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7609#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7610 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7612 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7613 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7614 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7615 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7616 \
7617 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7618 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7619 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7620 IEM_MC_REF_EFLAGS(pEFlags); \
7621 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7622 \
7623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7624 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7625 } IEM_MC_ELSE() { \
7626 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7627 } IEM_MC_ENDIF(); \
7628 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7629 IEM_MC_END();
7630
7631/**
7632 * @opcode 0xae
7633 * @opflclass arithmetic
7634 * @opfltest df
7635 */
7636FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7637{
7638 /*
7639 * Use the C implementation if a repeat prefix is encountered.
7640 */
7641 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7642 {
7643 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7645 switch (pVCpu->iem.s.enmEffAddrMode)
7646 {
7647 case IEMMODE_16BIT:
7648 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7651 iemCImpl_repe_scas_al_m16);
7652 case IEMMODE_32BIT:
7653 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7654 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7655 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7656 iemCImpl_repe_scas_al_m32);
7657 case IEMMODE_64BIT:
7658 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7659 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7660 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7661 iemCImpl_repe_scas_al_m64);
7662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7663 }
7664 }
7665 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7666 {
7667 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7669 switch (pVCpu->iem.s.enmEffAddrMode)
7670 {
7671 case IEMMODE_16BIT:
7672 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7673 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7674 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7675 iemCImpl_repne_scas_al_m16);
7676 case IEMMODE_32BIT:
7677 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7678 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7679 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7680 iemCImpl_repne_scas_al_m32);
7681 case IEMMODE_64BIT:
7682 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7683 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7684 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7685 iemCImpl_repne_scas_al_m64);
7686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7687 }
7688 }
7689
7690 /*
7691 * Sharing case implementation with stos[wdq] below.
7692 */
7693 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7694 switch (pVCpu->iem.s.enmEffAddrMode)
7695 {
7696 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7697 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7698 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7700 }
7701}
7702
7703
7704/**
7705 * @opcode 0xaf
7706 * @opflclass arithmetic
7707 * @opfltest df
7708 */
7709FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7710{
7711 /*
7712 * Use the C implementation if a repeat prefix is encountered.
7713 */
7714 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7715 {
7716 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7718 switch (pVCpu->iem.s.enmEffOpSize)
7719 {
7720 case IEMMODE_16BIT:
7721 switch (pVCpu->iem.s.enmEffAddrMode)
7722 {
7723 case IEMMODE_16BIT:
7724 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7725 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7726 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7727 iemCImpl_repe_scas_ax_m16);
7728 case IEMMODE_32BIT:
7729 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7730 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7731 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7732 iemCImpl_repe_scas_ax_m32);
7733 case IEMMODE_64BIT:
7734 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7735 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7736 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7737 iemCImpl_repe_scas_ax_m64);
7738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7739 }
7740 break;
7741 case IEMMODE_32BIT:
7742 switch (pVCpu->iem.s.enmEffAddrMode)
7743 {
7744 case IEMMODE_16BIT:
7745 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7746 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7747 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7748 iemCImpl_repe_scas_eax_m16);
7749 case IEMMODE_32BIT:
7750 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7751 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7752 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7753 iemCImpl_repe_scas_eax_m32);
7754 case IEMMODE_64BIT:
7755 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7756 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7758 iemCImpl_repe_scas_eax_m64);
7759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7760 }
7761 case IEMMODE_64BIT:
7762 switch (pVCpu->iem.s.enmEffAddrMode)
7763 {
7764 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7765 case IEMMODE_32BIT:
7766 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7767 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7768 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7769 iemCImpl_repe_scas_rax_m32);
7770 case IEMMODE_64BIT:
7771 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7774 iemCImpl_repe_scas_rax_m64);
7775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7776 }
7777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7778 }
7779 }
7780 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7781 {
7782 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784 switch (pVCpu->iem.s.enmEffOpSize)
7785 {
7786 case IEMMODE_16BIT:
7787 switch (pVCpu->iem.s.enmEffAddrMode)
7788 {
7789 case IEMMODE_16BIT:
7790 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7792 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7793 iemCImpl_repne_scas_ax_m16);
7794 case IEMMODE_32BIT:
7795 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7796 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7797 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7798 iemCImpl_repne_scas_ax_m32);
7799 case IEMMODE_64BIT:
7800 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7801 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7803 iemCImpl_repne_scas_ax_m64);
7804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7805 }
7806 break;
7807 case IEMMODE_32BIT:
7808 switch (pVCpu->iem.s.enmEffAddrMode)
7809 {
7810 case IEMMODE_16BIT:
7811 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7814 iemCImpl_repne_scas_eax_m16);
7815 case IEMMODE_32BIT:
7816 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7818 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7819 iemCImpl_repne_scas_eax_m32);
7820 case IEMMODE_64BIT:
7821 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7822 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7824 iemCImpl_repne_scas_eax_m64);
7825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7826 }
7827 case IEMMODE_64BIT:
7828 switch (pVCpu->iem.s.enmEffAddrMode)
7829 {
7830 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7831 case IEMMODE_32BIT:
7832 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7835 iemCImpl_repne_scas_rax_m32);
7836 case IEMMODE_64BIT:
7837 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7840 iemCImpl_repne_scas_rax_m64);
7841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7842 }
7843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7844 }
7845 }
7846
7847 /*
7848 * Annoying double switch here.
7849 * Using ugly macro for implementing the cases, sharing it with scasb.
7850 */
7851 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7852 switch (pVCpu->iem.s.enmEffOpSize)
7853 {
7854 case IEMMODE_16BIT:
7855 switch (pVCpu->iem.s.enmEffAddrMode)
7856 {
7857 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7858 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7859 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7861 }
7862 break;
7863
7864 case IEMMODE_32BIT:
7865 switch (pVCpu->iem.s.enmEffAddrMode)
7866 {
7867 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7868 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7869 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7871 }
7872 break;
7873
7874 case IEMMODE_64BIT:
7875 switch (pVCpu->iem.s.enmEffAddrMode)
7876 {
7877 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7878 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7879 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7881 }
7882 break;
7883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7884 }
7885}
7886
7887#undef IEM_SCAS_CASE
7888
7889/**
7890 * Common 'mov r8, imm8' helper.
7891 */
7892FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7893{
7894 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7895 IEM_MC_BEGIN(0, 0, 0, 0);
7896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7897 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7898 IEM_MC_ADVANCE_RIP_AND_FINISH();
7899 IEM_MC_END();
7900}
7901
7902
7903/**
7904 * @opcode 0xb0
7905 */
7906FNIEMOP_DEF(iemOp_mov_AL_Ib)
7907{
7908 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7909 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7910}
7911
7912
7913/**
7914 * @opcode 0xb1
7915 */
7916FNIEMOP_DEF(iemOp_CL_Ib)
7917{
7918 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7919 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7920}
7921
7922
7923/**
7924 * @opcode 0xb2
7925 */
7926FNIEMOP_DEF(iemOp_DL_Ib)
7927{
7928 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7929 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7930}
7931
7932
7933/**
7934 * @opcode 0xb3
7935 */
7936FNIEMOP_DEF(iemOp_BL_Ib)
7937{
7938 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7939 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7940}
7941
7942
7943/**
7944 * @opcode 0xb4
7945 */
7946FNIEMOP_DEF(iemOp_mov_AH_Ib)
7947{
7948 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7949 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7950}
7951
7952
7953/**
7954 * @opcode 0xb5
7955 */
7956FNIEMOP_DEF(iemOp_CH_Ib)
7957{
7958 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7959 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7960}
7961
7962
7963/**
7964 * @opcode 0xb6
7965 */
7966FNIEMOP_DEF(iemOp_DH_Ib)
7967{
7968 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7969 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7970}
7971
7972
7973/**
7974 * @opcode 0xb7
7975 */
7976FNIEMOP_DEF(iemOp_BH_Ib)
7977{
7978 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7979 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7980}
7981
7982
7983/**
7984 * Common 'mov regX,immX' helper.
7985 */
7986FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7987{
7988 switch (pVCpu->iem.s.enmEffOpSize)
7989 {
7990 case IEMMODE_16BIT:
7991 IEM_MC_BEGIN(0, 0, 0, 0);
7992 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7994 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7995 IEM_MC_ADVANCE_RIP_AND_FINISH();
7996 IEM_MC_END();
7997 break;
7998
7999 case IEMMODE_32BIT:
8000 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8001 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8003 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8004 IEM_MC_ADVANCE_RIP_AND_FINISH();
8005 IEM_MC_END();
8006 break;
8007
8008 case IEMMODE_64BIT:
8009 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8010 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8013 IEM_MC_ADVANCE_RIP_AND_FINISH();
8014 IEM_MC_END();
8015 break;
8016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8017 }
8018}
8019
8020
8021/**
8022 * @opcode 0xb8
8023 */
8024FNIEMOP_DEF(iemOp_eAX_Iv)
8025{
8026 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8027 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8028}
8029
8030
8031/**
8032 * @opcode 0xb9
8033 */
8034FNIEMOP_DEF(iemOp_eCX_Iv)
8035{
8036 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8037 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8038}
8039
8040
8041/**
8042 * @opcode 0xba
8043 */
8044FNIEMOP_DEF(iemOp_eDX_Iv)
8045{
8046 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8047 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8048}
8049
8050
8051/**
8052 * @opcode 0xbb
8053 */
8054FNIEMOP_DEF(iemOp_eBX_Iv)
8055{
8056 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8057 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8058}
8059
8060
8061/**
8062 * @opcode 0xbc
8063 */
8064FNIEMOP_DEF(iemOp_eSP_Iv)
8065{
8066 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8067 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8068}
8069
8070
8071/**
8072 * @opcode 0xbd
8073 */
8074FNIEMOP_DEF(iemOp_eBP_Iv)
8075{
8076 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8077 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8078}
8079
8080
8081/**
8082 * @opcode 0xbe
8083 */
8084FNIEMOP_DEF(iemOp_eSI_Iv)
8085{
8086 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8087 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8088}
8089
8090
8091/**
8092 * @opcode 0xbf
8093 */
8094FNIEMOP_DEF(iemOp_eDI_Iv)
8095{
8096 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8097 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8098}
8099
8100
8101/**
8102 * @opcode 0xc0
8103 */
8104FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8105{
8106 IEMOP_HLP_MIN_186();
8107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8108
8109 /* Need to use a body macro here since the EFLAGS behaviour differs between
8110 the shifts, rotates and rotate w/ carry. Sigh. */
8111#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8112 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8113 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8114 { \
8115 /* register */ \
8116 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8117 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8119 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8120 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8121 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8122 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8123 IEM_MC_REF_EFLAGS(pEFlags); \
8124 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8125 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8126 IEM_MC_END(); \
8127 } \
8128 else \
8129 { \
8130 /* memory */ \
8131 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8134 \
8135 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8137 \
8138 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8139 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8140 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8141 \
8142 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8143 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8144 IEM_MC_FETCH_EFLAGS(EFlags); \
8145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8146 \
8147 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8148 IEM_MC_COMMIT_EFLAGS(EFlags); \
8149 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8150 IEM_MC_END(); \
8151 } (void)0
8152
8153 switch (IEM_GET_MODRM_REG_8(bRm))
8154 {
8155 /**
8156 * @opdone
8157 * @opmaps grp2_c0
8158 * @opcode /0
8159 * @opflclass rotate_count
8160 */
8161 case 0:
8162 {
8163 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8164 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8165 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8166 break;
8167 }
8168 /**
8169 * @opdone
8170 * @opmaps grp2_c0
8171 * @opcode /1
8172 * @opflclass rotate_count
8173 */
8174 case 1:
8175 {
8176 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8177 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8178 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8179 break;
8180 }
8181 /**
8182 * @opdone
8183 * @opmaps grp2_c0
8184 * @opcode /2
8185 * @opflclass rotate_carry_count
8186 */
8187 case 2:
8188 {
8189 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8190 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8191 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8192 break;
8193 }
8194 /**
8195 * @opdone
8196 * @opmaps grp2_c0
8197 * @opcode /3
8198 * @opflclass rotate_carry_count
8199 */
8200 case 3:
8201 {
8202 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8204 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8205 break;
8206 }
8207 /**
8208 * @opdone
8209 * @opmaps grp2_c0
8210 * @opcode /4
8211 * @opflclass shift_count
8212 */
8213 case 4:
8214 {
8215 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8217 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8218 break;
8219 }
8220 /**
8221 * @opdone
8222 * @opmaps grp2_c0
8223 * @opcode /5
8224 * @opflclass shift_count
8225 */
8226 case 5:
8227 {
8228 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8230 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8231 break;
8232 }
8233 /**
8234 * @opdone
8235 * @opmaps grp2_c0
8236 * @opcode /7
8237 * @opflclass shift_count
8238 */
8239 case 7:
8240 {
8241 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8242 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8243 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8244 break;
8245 }
8246
8247 /** @opdone */
8248 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8249 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8250 }
8251#undef GRP2_BODY_Eb_Ib
8252}
8253
8254
8255/**
8256 * @opcode 0xc1
8257 */
8258FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8259{
8260 IEMOP_HLP_MIN_186();
8261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8262
8263 /* Need to use a body macro here since the EFLAGS behaviour differs between
8264 the shifts, rotates and rotate w/ carry. Sigh. */
8265#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8266 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8267 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8268 { \
8269 /* register */ \
8270 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8271 switch (pVCpu->iem.s.enmEffOpSize) \
8272 { \
8273 case IEMMODE_16BIT: \
8274 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8277 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8279 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8280 IEM_MC_REF_EFLAGS(pEFlags); \
8281 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8282 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8283 IEM_MC_END(); \
8284 break; \
8285 \
8286 case IEMMODE_32BIT: \
8287 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8289 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8290 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8291 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8292 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8293 IEM_MC_REF_EFLAGS(pEFlags); \
8294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8295 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8297 IEM_MC_END(); \
8298 break; \
8299 \
8300 case IEMMODE_64BIT: \
8301 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8303 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8304 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8305 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8306 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8307 IEM_MC_REF_EFLAGS(pEFlags); \
8308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8309 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8310 IEM_MC_END(); \
8311 break; \
8312 \
8313 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8314 } \
8315 } \
8316 else \
8317 { \
8318 /* memory */ \
8319 switch (pVCpu->iem.s.enmEffOpSize) \
8320 { \
8321 case IEMMODE_16BIT: \
8322 IEM_MC_BEGIN(3, 3, 0, 0); \
8323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8325 \
8326 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8328 \
8329 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8330 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8331 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8332 \
8333 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8334 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8335 IEM_MC_FETCH_EFLAGS(EFlags); \
8336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8337 \
8338 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8339 IEM_MC_COMMIT_EFLAGS(EFlags); \
8340 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8341 IEM_MC_END(); \
8342 break; \
8343 \
8344 case IEMMODE_32BIT: \
8345 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8348 \
8349 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8351 \
8352 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8353 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8354 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8355 \
8356 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8357 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8358 IEM_MC_FETCH_EFLAGS(EFlags); \
8359 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8360 \
8361 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8362 IEM_MC_COMMIT_EFLAGS(EFlags); \
8363 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8364 IEM_MC_END(); \
8365 break; \
8366 \
8367 case IEMMODE_64BIT: \
8368 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8371 \
8372 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8374 \
8375 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8376 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8377 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8378 \
8379 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8380 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8381 IEM_MC_FETCH_EFLAGS(EFlags); \
8382 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8383 \
8384 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8385 IEM_MC_COMMIT_EFLAGS(EFlags); \
8386 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8387 IEM_MC_END(); \
8388 break; \
8389 \
8390 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8391 } \
8392 } (void)0
8393
8394 switch (IEM_GET_MODRM_REG_8(bRm))
8395 {
8396 /**
8397 * @opdone
8398 * @opmaps grp2_c1
8399 * @opcode /0
8400 * @opflclass rotate_count
8401 */
8402 case 0:
8403 {
8404 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8405 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8406 break;
8407 }
8408 /**
8409 * @opdone
8410 * @opmaps grp2_c1
8411 * @opcode /1
8412 * @opflclass rotate_count
8413 */
8414 case 1:
8415 {
8416 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8417 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8418 break;
8419 }
8420 /**
8421 * @opdone
8422 * @opmaps grp2_c1
8423 * @opcode /2
8424 * @opflclass rotate_carry_count
8425 */
8426 case 2:
8427 {
8428 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8429 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8430 break;
8431 }
8432 /**
8433 * @opdone
8434 * @opmaps grp2_c1
8435 * @opcode /3
8436 * @opflclass rotate_carry_count
8437 */
8438 case 3:
8439 {
8440 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8441 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8442 break;
8443 }
8444 /**
8445 * @opdone
8446 * @opmaps grp2_c1
8447 * @opcode /4
8448 * @opflclass shift_count
8449 */
8450 case 4:
8451 {
8452 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8453 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8454 break;
8455 }
8456 /**
8457 * @opdone
8458 * @opmaps grp2_c1
8459 * @opcode /5
8460 * @opflclass shift_count
8461 */
8462 case 5:
8463 {
8464 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8465 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8466 break;
8467 }
8468 /**
8469 * @opdone
8470 * @opmaps grp2_c1
8471 * @opcode /7
8472 * @opflclass shift_count
8473 */
8474 case 7:
8475 {
8476 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8477 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8478 break;
8479 }
8480
8481 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8482 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8483 }
8484#undef GRP2_BODY_Ev_Ib
8485}
8486
8487
8488/**
8489 * @opcode 0xc2
8490 */
8491FNIEMOP_DEF(iemOp_retn_Iw)
8492{
8493 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8494 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8495 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8497 switch (pVCpu->iem.s.enmEffOpSize)
8498 {
8499 case IEMMODE_16BIT:
8500 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8501 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8502 case IEMMODE_32BIT:
8503 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8504 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8505 case IEMMODE_64BIT:
8506 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8509 }
8510}
8511
8512
8513/**
8514 * @opcode 0xc3
8515 */
8516FNIEMOP_DEF(iemOp_retn)
8517{
8518 IEMOP_MNEMONIC(retn, "retn");
8519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8521 switch (pVCpu->iem.s.enmEffOpSize)
8522 {
8523 case IEMMODE_16BIT:
8524 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8525 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8526 case IEMMODE_32BIT:
8527 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8528 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8529 case IEMMODE_64BIT:
8530 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8531 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8533 }
8534}
8535
8536
8537/**
8538 * @opcode 0xc4
8539 */
8540FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8541{
8542 /* The LDS instruction is invalid 64-bit mode. In legacy and
8543 compatability mode it is invalid with MOD=3.
8544 The use as a VEX prefix is made possible by assigning the inverted
8545 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8546 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8548 if ( IEM_IS_64BIT_CODE(pVCpu)
8549 || IEM_IS_MODRM_REG_MODE(bRm) )
8550 {
8551 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8552 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8553 {
8554 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8555 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8556 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8557 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8558 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8559 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8560 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8561 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8562 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8563 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8564 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8565 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8566 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8567
8568 switch (bRm & 0x1f)
8569 {
8570 case 1: /* 0x0f lead opcode byte. */
8571#ifdef IEM_WITH_VEX
8572 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8573#else
8574 IEMOP_BITCH_ABOUT_STUB();
8575 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8576#endif
8577
8578 case 2: /* 0x0f 0x38 lead opcode bytes. */
8579#ifdef IEM_WITH_VEX
8580 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8581#else
8582 IEMOP_BITCH_ABOUT_STUB();
8583 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8584#endif
8585
8586 case 3: /* 0x0f 0x3a lead opcode bytes. */
8587#ifdef IEM_WITH_VEX
8588 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8589#else
8590 IEMOP_BITCH_ABOUT_STUB();
8591 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8592#endif
8593
8594 default:
8595 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8596 IEMOP_RAISE_INVALID_OPCODE_RET();
8597 }
8598 }
8599 Log(("VEX3: VEX support disabled!\n"));
8600 IEMOP_RAISE_INVALID_OPCODE_RET();
8601 }
8602
8603 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8604 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8605}
8606
8607
8608/**
8609 * @opcode 0xc5
8610 */
8611FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8612{
8613 /* The LES instruction is invalid 64-bit mode. In legacy and
8614 compatability mode it is invalid with MOD=3.
8615 The use as a VEX prefix is made possible by assigning the inverted
8616 REX.R to the top MOD bit, and the top bit in the inverted register
8617 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8618 to accessing registers 0..7 in this VEX form. */
8619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8620 if ( IEM_IS_64BIT_CODE(pVCpu)
8621 || IEM_IS_MODRM_REG_MODE(bRm))
8622 {
8623 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8624 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8625 {
8626 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8627 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8628 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8629 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8630 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8631 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8632 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8633 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8634
8635#ifdef IEM_WITH_VEX
8636 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8637#else
8638 IEMOP_BITCH_ABOUT_STUB();
8639 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8640#endif
8641 }
8642
8643 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8644 Log(("VEX2: VEX support disabled!\n"));
8645 IEMOP_RAISE_INVALID_OPCODE_RET();
8646 }
8647
8648 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8649 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8650}
8651
8652
8653/**
8654 * @opcode 0xc6
8655 */
8656FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8657{
8658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8659 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8660 IEMOP_RAISE_INVALID_OPCODE_RET();
8661 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8662
8663 if (IEM_IS_MODRM_REG_MODE(bRm))
8664 {
8665 /* register access */
8666 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8667 IEM_MC_BEGIN(0, 0, 0, 0);
8668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8669 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8670 IEM_MC_ADVANCE_RIP_AND_FINISH();
8671 IEM_MC_END();
8672 }
8673 else
8674 {
8675 /* memory access. */
8676 IEM_MC_BEGIN(0, 1, 0, 0);
8677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8679 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8681 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8682 IEM_MC_ADVANCE_RIP_AND_FINISH();
8683 IEM_MC_END();
8684 }
8685}
8686
8687
8688/**
8689 * @opcode 0xc7
8690 */
8691FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8692{
8693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8694 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8695 IEMOP_RAISE_INVALID_OPCODE_RET();
8696 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8697
8698 if (IEM_IS_MODRM_REG_MODE(bRm))
8699 {
8700 /* register access */
8701 switch (pVCpu->iem.s.enmEffOpSize)
8702 {
8703 case IEMMODE_16BIT:
8704 IEM_MC_BEGIN(0, 0, 0, 0);
8705 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8707 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8708 IEM_MC_ADVANCE_RIP_AND_FINISH();
8709 IEM_MC_END();
8710 break;
8711
8712 case IEMMODE_32BIT:
8713 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8714 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8716 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8717 IEM_MC_ADVANCE_RIP_AND_FINISH();
8718 IEM_MC_END();
8719 break;
8720
8721 case IEMMODE_64BIT:
8722 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8723 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8725 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8726 IEM_MC_ADVANCE_RIP_AND_FINISH();
8727 IEM_MC_END();
8728 break;
8729
8730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8731 }
8732 }
8733 else
8734 {
8735 /* memory access. */
8736 switch (pVCpu->iem.s.enmEffOpSize)
8737 {
8738 case IEMMODE_16BIT:
8739 IEM_MC_BEGIN(0, 1, 0, 0);
8740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8742 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8744 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8745 IEM_MC_ADVANCE_RIP_AND_FINISH();
8746 IEM_MC_END();
8747 break;
8748
8749 case IEMMODE_32BIT:
8750 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8753 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8755 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8756 IEM_MC_ADVANCE_RIP_AND_FINISH();
8757 IEM_MC_END();
8758 break;
8759
8760 case IEMMODE_64BIT:
8761 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8764 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8766 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8767 IEM_MC_ADVANCE_RIP_AND_FINISH();
8768 IEM_MC_END();
8769 break;
8770
8771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8772 }
8773 }
8774}
8775
8776
8777
8778
8779/**
8780 * @opcode 0xc8
8781 */
8782FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8783{
8784 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8785 IEMOP_HLP_MIN_186();
8786 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8787 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8788 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8790 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8792 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8793 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8794}
8795
8796
8797/**
8798 * @opcode 0xc9
8799 */
8800FNIEMOP_DEF(iemOp_leave)
8801{
8802 IEMOP_MNEMONIC(leave, "leave");
8803 IEMOP_HLP_MIN_186();
8804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8806 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8807 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8809 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8810}
8811
8812
8813/**
8814 * @opcode 0xca
8815 */
8816FNIEMOP_DEF(iemOp_retf_Iw)
8817{
8818 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8819 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8821 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8822 | IEM_CIMPL_F_MODE,
8823 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8824 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8825 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8826 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8827 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8828 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8829 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8830 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8831 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8832 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8833 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8834 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8835 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8836 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8837 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8838 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8839 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8840 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8841}
8842
8843
8844/**
8845 * @opcode 0xcb
8846 */
8847FNIEMOP_DEF(iemOp_retf)
8848{
8849 IEMOP_MNEMONIC(retf, "retf");
8850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8851 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8852 | IEM_CIMPL_F_MODE,
8853 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8854 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8855 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8856 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8857 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8858 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8859 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8860 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8861 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8862 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8863 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8864 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8865 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8866 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8867 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8868 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8869 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8870 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8871}
8872
8873
8874/**
8875 * @opcode 0xcc
8876 */
8877FNIEMOP_DEF(iemOp_int3)
8878{
8879 IEMOP_MNEMONIC(int3, "int3");
8880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8881 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8882 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8883 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8884}
8885
8886
8887/**
8888 * @opcode 0xcd
8889 */
8890FNIEMOP_DEF(iemOp_int_Ib)
8891{
8892 IEMOP_MNEMONIC(int_Ib, "int Ib");
8893 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8895 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8896 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8897 iemCImpl_int, u8Int, IEMINT_INTN);
8898 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8899}
8900
8901
8902/**
8903 * @opcode 0xce
8904 */
8905FNIEMOP_DEF(iemOp_into)
8906{
8907 IEMOP_MNEMONIC(into, "into");
8908 IEMOP_HLP_NO_64BIT();
8909 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8910 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8911 UINT64_MAX,
8912 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8913 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8914}
8915
8916
8917/**
8918 * @opcode 0xcf
8919 */
8920FNIEMOP_DEF(iemOp_iret)
8921{
8922 IEMOP_MNEMONIC(iret, "iret");
8923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8924 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8925 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8926 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8927 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8928 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8929 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8930 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8931 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8932 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8933 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8934 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8935 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8936 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8937 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8938 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8939 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8940 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8941 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8942 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8943 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8944 /* Segment registers are sanitized when returning to an outer ring, or fully
8945 reloaded when returning to v86 mode. Thus the large flush list above. */
8946}
8947
8948
8949/**
8950 * @opcode 0xd0
8951 */
8952FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8953{
8954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8955
8956 /* Need to use a body macro here since the EFLAGS behaviour differs between
8957 the shifts, rotates and rotate w/ carry. Sigh. */
8958#define GRP2_BODY_Eb_1(a_pImplExpr) \
8959 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8960 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8961 { \
8962 /* register */ \
8963 IEM_MC_BEGIN(3, 0, 0, 0); \
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8965 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8966 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
8967 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8968 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8969 IEM_MC_REF_EFLAGS(pEFlags); \
8970 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8971 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8972 IEM_MC_END(); \
8973 } \
8974 else \
8975 { \
8976 /* memory */ \
8977 IEM_MC_BEGIN(3, 3, 0, 0); \
8978 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8979 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
8980 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
8981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8982 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8983 \
8984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8986 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8987 IEM_MC_FETCH_EFLAGS(EFlags); \
8988 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8989 \
8990 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8991 IEM_MC_COMMIT_EFLAGS(EFlags); \
8992 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8993 IEM_MC_END(); \
8994 } (void)0
8995
8996 switch (IEM_GET_MODRM_REG_8(bRm))
8997 {
8998 /**
8999 * @opdone
9000 * @opmaps grp2_d0
9001 * @opcode /0
9002 * @opflclass rotate_1
9003 */
9004 case 0:
9005 {
9006 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9007 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9008 break;
9009 }
9010 /**
9011 * @opdone
9012 * @opmaps grp2_d0
9013 * @opcode /1
9014 * @opflclass rotate_1
9015 */
9016 case 1:
9017 {
9018 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9019 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9020 break;
9021 }
9022 /**
9023 * @opdone
9024 * @opmaps grp2_d0
9025 * @opcode /2
9026 * @opflclass rotate_carry_1
9027 */
9028 case 2:
9029 {
9030 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9031 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9032 break;
9033 }
9034 /**
9035 * @opdone
9036 * @opmaps grp2_d0
9037 * @opcode /3
9038 * @opflclass rotate_carry_1
9039 */
9040 case 3:
9041 {
9042 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9043 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9044 break;
9045 }
9046 /**
9047 * @opdone
9048 * @opmaps grp2_d0
9049 * @opcode /4
9050 * @opflclass shift_1
9051 */
9052 case 4:
9053 {
9054 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9055 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9056 break;
9057 }
9058 /**
9059 * @opdone
9060 * @opmaps grp2_d0
9061 * @opcode /5
9062 * @opflclass shift_1
9063 */
9064 case 5:
9065 {
9066 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9067 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9068 break;
9069 }
9070 /**
9071 * @opdone
9072 * @opmaps grp2_d0
9073 * @opcode /7
9074 * @opflclass shift_1
9075 */
9076 case 7:
9077 {
9078 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9079 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9080 break;
9081 }
9082 /** @opdone */
9083 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9084 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9085 }
9086#undef GRP2_BODY_Eb_1
9087}
9088
9089
9090
9091/**
9092 * @opcode 0xd1
9093 */
9094FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9095{
9096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9097
9098 /* Need to use a body macro here since the EFLAGS behaviour differs between
9099 the shifts, rotates and rotate w/ carry. Sigh. */
9100#define GRP2_BODY_Ev_1(a_pImplExpr) \
9101 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9102 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9103 { \
9104 /* register */ \
9105 switch (pVCpu->iem.s.enmEffOpSize) \
9106 { \
9107 case IEMMODE_16BIT: \
9108 IEM_MC_BEGIN(3, 0, 0, 0); \
9109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9110 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9111 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9112 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9113 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9114 IEM_MC_REF_EFLAGS(pEFlags); \
9115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9116 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9117 IEM_MC_END(); \
9118 break; \
9119 \
9120 case IEMMODE_32BIT: \
9121 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9123 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9124 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9125 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9126 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9127 IEM_MC_REF_EFLAGS(pEFlags); \
9128 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9129 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9130 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9131 IEM_MC_END(); \
9132 break; \
9133 \
9134 case IEMMODE_64BIT: \
9135 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9137 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9138 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9139 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9140 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9141 IEM_MC_REF_EFLAGS(pEFlags); \
9142 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9143 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9144 IEM_MC_END(); \
9145 break; \
9146 \
9147 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9148 } \
9149 } \
9150 else \
9151 { \
9152 /* memory */ \
9153 switch (pVCpu->iem.s.enmEffOpSize) \
9154 { \
9155 case IEMMODE_16BIT: \
9156 IEM_MC_BEGIN(3, 3, 0, 0); \
9157 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9158 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9159 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9162 \
9163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9165 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9166 IEM_MC_FETCH_EFLAGS(EFlags); \
9167 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9168 \
9169 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9170 IEM_MC_COMMIT_EFLAGS(EFlags); \
9171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9172 IEM_MC_END(); \
9173 break; \
9174 \
9175 case IEMMODE_32BIT: \
9176 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9177 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9178 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9179 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9181 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9182 \
9183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9185 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9186 IEM_MC_FETCH_EFLAGS(EFlags); \
9187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9188 \
9189 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9190 IEM_MC_COMMIT_EFLAGS(EFlags); \
9191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9192 IEM_MC_END(); \
9193 break; \
9194 \
9195 case IEMMODE_64BIT: \
9196 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9197 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9198 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9201 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9202 \
9203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9205 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9206 IEM_MC_FETCH_EFLAGS(EFlags); \
9207 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9208 \
9209 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9210 IEM_MC_COMMIT_EFLAGS(EFlags); \
9211 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9212 IEM_MC_END(); \
9213 break; \
9214 \
9215 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9216 } \
9217 } (void)0
9218
9219 switch (IEM_GET_MODRM_REG_8(bRm))
9220 {
9221 /**
9222 * @opdone
9223 * @opmaps grp2_d1
9224 * @opcode /0
9225 * @opflclass rotate_1
9226 */
9227 case 0:
9228 {
9229 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9230 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9231 break;
9232 }
9233 /**
9234 * @opdone
9235 * @opmaps grp2_d1
9236 * @opcode /1
9237 * @opflclass rotate_1
9238 */
9239 case 1:
9240 {
9241 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9242 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9243 break;
9244 }
9245 /**
9246 * @opdone
9247 * @opmaps grp2_d1
9248 * @opcode /2
9249 * @opflclass rotate_carry_1
9250 */
9251 case 2:
9252 {
9253 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9254 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9255 break;
9256 }
9257 /**
9258 * @opdone
9259 * @opmaps grp2_d1
9260 * @opcode /3
9261 * @opflclass rotate_carry_1
9262 */
9263 case 3:
9264 {
9265 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9266 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9267 break;
9268 }
9269 /**
9270 * @opdone
9271 * @opmaps grp2_d1
9272 * @opcode /4
9273 * @opflclass shift_1
9274 */
9275 case 4:
9276 {
9277 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9278 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9279 break;
9280 }
9281 /**
9282 * @opdone
9283 * @opmaps grp2_d1
9284 * @opcode /5
9285 * @opflclass shift_1
9286 */
9287 case 5:
9288 {
9289 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9290 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9291 break;
9292 }
9293 /**
9294 * @opdone
9295 * @opmaps grp2_d1
9296 * @opcode /7
9297 * @opflclass shift_1
9298 */
9299 case 7:
9300 {
9301 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9302 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9303 break;
9304 }
9305 /** @opdone */
9306 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9307 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9308 }
9309#undef GRP2_BODY_Ev_1
9310}
9311
9312
9313/**
9314 * @opcode 0xd2
9315 */
9316FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9317{
9318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9319
9320 /* Need to use a body macro here since the EFLAGS behaviour differs between
9321 the shifts, rotates and rotate w/ carry. Sigh. */
9322#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9323 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9324 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9325 { \
9326 /* register */ \
9327 IEM_MC_BEGIN(3, 0, 0, 0); \
9328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9329 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9330 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9331 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9332 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9333 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9334 IEM_MC_REF_EFLAGS(pEFlags); \
9335 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9336 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9337 IEM_MC_END(); \
9338 } \
9339 else \
9340 { \
9341 /* memory */ \
9342 IEM_MC_BEGIN(3, 3, 0, 0); \
9343 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9344 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9345 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9347 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9348 \
9349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9351 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9352 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9353 IEM_MC_FETCH_EFLAGS(EFlags); \
9354 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9355 \
9356 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9357 IEM_MC_COMMIT_EFLAGS(EFlags); \
9358 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9359 IEM_MC_END(); \
9360 } (void)0
9361
9362 switch (IEM_GET_MODRM_REG_8(bRm))
9363 {
9364 /**
9365 * @opdone
9366 * @opmaps grp2_d0
9367 * @opcode /0
9368 * @opflclass rotate_count
9369 */
9370 case 0:
9371 {
9372 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9373 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9374 break;
9375 }
9376 /**
9377 * @opdone
9378 * @opmaps grp2_d0
9379 * @opcode /1
9380 * @opflclass rotate_count
9381 */
9382 case 1:
9383 {
9384 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9385 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9386 break;
9387 }
9388 /**
9389 * @opdone
9390 * @opmaps grp2_d0
9391 * @opcode /2
9392 * @opflclass rotate_carry_count
9393 */
9394 case 2:
9395 {
9396 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9397 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9398 break;
9399 }
9400 /**
9401 * @opdone
9402 * @opmaps grp2_d0
9403 * @opcode /3
9404 * @opflclass rotate_carry_count
9405 */
9406 case 3:
9407 {
9408 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9409 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9410 break;
9411 }
9412 /**
9413 * @opdone
9414 * @opmaps grp2_d0
9415 * @opcode /4
9416 * @opflclass shift_count
9417 */
9418 case 4:
9419 {
9420 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9421 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9422 break;
9423 }
9424 /**
9425 * @opdone
9426 * @opmaps grp2_d0
9427 * @opcode /5
9428 * @opflclass shift_count
9429 */
9430 case 5:
9431 {
9432 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9433 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9434 break;
9435 }
9436 /**
9437 * @opdone
9438 * @opmaps grp2_d0
9439 * @opcode /7
9440 * @opflclass shift_count
9441 */
9442 case 7:
9443 {
9444 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9445 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9446 break;
9447 }
9448 /** @opdone */
9449 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9450 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9451 }
9452#undef GRP2_BODY_Eb_CL
9453}
9454
9455
9456/**
9457 * @opcode 0xd3
9458 */
9459FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9460{
9461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9462
9463 /* Need to use a body macro here since the EFLAGS behaviour differs between
9464 the shifts, rotates and rotate w/ carry. Sigh. */
9465#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9466 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9467 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9468 { \
9469 /* register */ \
9470 switch (pVCpu->iem.s.enmEffOpSize) \
9471 { \
9472 case IEMMODE_16BIT: \
9473 IEM_MC_BEGIN(3, 0, 0, 0); \
9474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9475 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9476 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9477 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9478 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9479 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9480 IEM_MC_REF_EFLAGS(pEFlags); \
9481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9482 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9483 IEM_MC_END(); \
9484 break; \
9485 \
9486 case IEMMODE_32BIT: \
9487 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9489 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9490 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9491 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9492 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9493 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9494 IEM_MC_REF_EFLAGS(pEFlags); \
9495 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9496 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9498 IEM_MC_END(); \
9499 break; \
9500 \
9501 case IEMMODE_64BIT: \
9502 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9504 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9505 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9506 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9507 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9508 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9509 IEM_MC_REF_EFLAGS(pEFlags); \
9510 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9511 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9512 IEM_MC_END(); \
9513 break; \
9514 \
9515 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9516 } \
9517 } \
9518 else \
9519 { \
9520 /* memory */ \
9521 switch (pVCpu->iem.s.enmEffOpSize) \
9522 { \
9523 case IEMMODE_16BIT: \
9524 IEM_MC_BEGIN(3, 3, 0, 0); \
9525 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9526 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9527 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9529 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9530 \
9531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9533 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9534 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9535 IEM_MC_FETCH_EFLAGS(EFlags); \
9536 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9537 \
9538 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9539 IEM_MC_COMMIT_EFLAGS(EFlags); \
9540 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9541 IEM_MC_END(); \
9542 break; \
9543 \
9544 case IEMMODE_32BIT: \
9545 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9546 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9547 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9548 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9551 \
9552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9554 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9555 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9556 IEM_MC_FETCH_EFLAGS(EFlags); \
9557 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9558 \
9559 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9560 IEM_MC_COMMIT_EFLAGS(EFlags); \
9561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9562 IEM_MC_END(); \
9563 break; \
9564 \
9565 case IEMMODE_64BIT: \
9566 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9567 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9568 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9569 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9572 \
9573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9575 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9576 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9577 IEM_MC_FETCH_EFLAGS(EFlags); \
9578 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9579 \
9580 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9581 IEM_MC_COMMIT_EFLAGS(EFlags); \
9582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9583 IEM_MC_END(); \
9584 break; \
9585 \
9586 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9587 } \
9588 } (void)0
9589 switch (IEM_GET_MODRM_REG_8(bRm))
9590 {
9591 /**
9592 * @opdone
9593 * @opmaps grp2_d0
9594 * @opcode /0
9595 * @opflclass rotate_count
9596 */
9597 case 0:
9598 {
9599 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9600 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9601 break;
9602 }
9603 /**
9604 * @opdone
9605 * @opmaps grp2_d0
9606 * @opcode /1
9607 * @opflclass rotate_count
9608 */
9609 case 1:
9610 {
9611 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9612 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9613 break;
9614 }
9615 /**
9616 * @opdone
9617 * @opmaps grp2_d0
9618 * @opcode /2
9619 * @opflclass rotate_carry_count
9620 */
9621 case 2:
9622 {
9623 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9624 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9625 break;
9626 }
9627 /**
9628 * @opdone
9629 * @opmaps grp2_d0
9630 * @opcode /3
9631 * @opflclass rotate_carry_count
9632 */
9633 case 3:
9634 {
9635 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9636 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9637 break;
9638 }
9639 /**
9640 * @opdone
9641 * @opmaps grp2_d0
9642 * @opcode /4
9643 * @opflclass shift_count
9644 */
9645 case 4:
9646 {
9647 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9648 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9649 break;
9650 }
9651 /**
9652 * @opdone
9653 * @opmaps grp2_d0
9654 * @opcode /5
9655 * @opflclass shift_count
9656 */
9657 case 5:
9658 {
9659 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9660 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9661 break;
9662 }
9663 /**
9664 * @opdone
9665 * @opmaps grp2_d0
9666 * @opcode /7
9667 * @opflclass shift_count
9668 */
9669 case 7:
9670 {
9671 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9672 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9673 break;
9674 }
9675 /** @opdone */
9676 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9677 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9678 }
9679#undef GRP2_BODY_Ev_CL
9680}
9681
9682
9683/**
9684 * @opcode 0xd4
9685 * @opflmodify cf,pf,af,zf,sf,of
9686 * @opflundef cf,af,of
9687 */
9688FNIEMOP_DEF(iemOp_aam_Ib)
9689{
9690/** @todo testcase: aam */
9691 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9692 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9694 IEMOP_HLP_NO_64BIT();
9695 if (!bImm)
9696 IEMOP_RAISE_DIVIDE_ERROR_RET();
9697 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9698}
9699
9700
9701/**
9702 * @opcode 0xd5
9703 * @opflmodify cf,pf,af,zf,sf,of
9704 * @opflundef cf,af,of
9705 */
9706FNIEMOP_DEF(iemOp_aad_Ib)
9707{
9708/** @todo testcase: aad? */
9709 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9710 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9712 IEMOP_HLP_NO_64BIT();
9713 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9714}
9715
9716
9717/**
9718 * @opcode 0xd6
9719 */
9720FNIEMOP_DEF(iemOp_salc)
9721{
9722 IEMOP_MNEMONIC(salc, "salc");
9723 IEMOP_HLP_NO_64BIT();
9724
9725 IEM_MC_BEGIN(0, 0, 0, 0);
9726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9727 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9728 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9729 } IEM_MC_ELSE() {
9730 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9731 } IEM_MC_ENDIF();
9732 IEM_MC_ADVANCE_RIP_AND_FINISH();
9733 IEM_MC_END();
9734}
9735
9736
9737/**
9738 * @opcode 0xd7
9739 */
9740FNIEMOP_DEF(iemOp_xlat)
9741{
9742 IEMOP_MNEMONIC(xlat, "xlat");
9743 switch (pVCpu->iem.s.enmEffAddrMode)
9744 {
9745 case IEMMODE_16BIT:
9746 IEM_MC_BEGIN(2, 0, 0, 0);
9747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9748 IEM_MC_LOCAL(uint8_t, u8Tmp);
9749 IEM_MC_LOCAL(uint16_t, u16Addr);
9750 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9751 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9752 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9753 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9754 IEM_MC_ADVANCE_RIP_AND_FINISH();
9755 IEM_MC_END();
9756 break;
9757
9758 case IEMMODE_32BIT:
9759 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9761 IEM_MC_LOCAL(uint8_t, u8Tmp);
9762 IEM_MC_LOCAL(uint32_t, u32Addr);
9763 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9764 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9765 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9766 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9767 IEM_MC_ADVANCE_RIP_AND_FINISH();
9768 IEM_MC_END();
9769 break;
9770
9771 case IEMMODE_64BIT:
9772 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9774 IEM_MC_LOCAL(uint8_t, u8Tmp);
9775 IEM_MC_LOCAL(uint64_t, u64Addr);
9776 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9777 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9778 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9779 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9780 IEM_MC_ADVANCE_RIP_AND_FINISH();
9781 IEM_MC_END();
9782 break;
9783
9784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9785 }
9786}
9787
9788
9789/**
9790 * Common worker for FPU instructions working on ST0 and STn, and storing the
9791 * result in ST0.
9792 *
9793 * @param bRm Mod R/M byte.
9794 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9795 */
9796FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9797{
9798 IEM_MC_BEGIN(3, 1, 0, 0);
9799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9800 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9801 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9802 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9803 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9804
9805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9806 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9807 IEM_MC_PREPARE_FPU_USAGE();
9808 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9809 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9810 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9811 } IEM_MC_ELSE() {
9812 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9813 } IEM_MC_ENDIF();
9814 IEM_MC_ADVANCE_RIP_AND_FINISH();
9815
9816 IEM_MC_END();
9817}
9818
9819
9820/**
9821 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9822 * flags.
9823 *
9824 * @param bRm Mod R/M byte.
9825 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9826 */
9827FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9828{
9829 IEM_MC_BEGIN(3, 1, 0, 0);
9830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9831 IEM_MC_LOCAL(uint16_t, u16Fsw);
9832 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9833 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9834 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9835
9836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9837 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9838 IEM_MC_PREPARE_FPU_USAGE();
9839 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9840 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9841 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9842 } IEM_MC_ELSE() {
9843 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9844 } IEM_MC_ENDIF();
9845 IEM_MC_ADVANCE_RIP_AND_FINISH();
9846
9847 IEM_MC_END();
9848}
9849
9850
9851/**
9852 * Common worker for FPU instructions working on ST0 and STn, only affecting
9853 * flags, and popping when done.
9854 *
9855 * @param bRm Mod R/M byte.
9856 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9857 */
9858FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9859{
9860 IEM_MC_BEGIN(3, 1, 0, 0);
9861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9862 IEM_MC_LOCAL(uint16_t, u16Fsw);
9863 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9864 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9865 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9866
9867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9869 IEM_MC_PREPARE_FPU_USAGE();
9870 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9871 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9872 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9873 } IEM_MC_ELSE() {
9874 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9875 } IEM_MC_ENDIF();
9876 IEM_MC_ADVANCE_RIP_AND_FINISH();
9877
9878 IEM_MC_END();
9879}
9880
9881
9882/** Opcode 0xd8 11/0. */
9883FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9884{
9885 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9886 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9887}
9888
9889
9890/** Opcode 0xd8 11/1. */
9891FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9892{
9893 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9894 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9895}
9896
9897
9898/** Opcode 0xd8 11/2. */
9899FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9900{
9901 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9902 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9903}
9904
9905
9906/** Opcode 0xd8 11/3. */
9907FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9908{
9909 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9910 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9911}
9912
9913
9914/** Opcode 0xd8 11/4. */
9915FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9916{
9917 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9918 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9919}
9920
9921
9922/** Opcode 0xd8 11/5. */
9923FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9924{
9925 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9926 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9927}
9928
9929
9930/** Opcode 0xd8 11/6. */
9931FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9932{
9933 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9934 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9935}
9936
9937
9938/** Opcode 0xd8 11/7. */
9939FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9940{
9941 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9942 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9943}
9944
9945
9946/**
9947 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9948 * the result in ST0.
9949 *
9950 * @param bRm Mod R/M byte.
9951 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9952 */
9953FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9954{
9955 IEM_MC_BEGIN(3, 3, 0, 0);
9956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9957 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9958 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9959 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9960 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9961 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9962
9963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9965
9966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9967 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9968 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9969
9970 IEM_MC_PREPARE_FPU_USAGE();
9971 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9972 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9973 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9974 } IEM_MC_ELSE() {
9975 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9976 } IEM_MC_ENDIF();
9977 IEM_MC_ADVANCE_RIP_AND_FINISH();
9978
9979 IEM_MC_END();
9980}
9981
9982
9983/** Opcode 0xd8 !11/0. */
9984FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9985{
9986 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9987 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9988}
9989
9990
9991/** Opcode 0xd8 !11/1. */
9992FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9993{
9994 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9995 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9996}
9997
9998
9999/** Opcode 0xd8 !11/2. */
10000FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10001{
10002 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10003
10004 IEM_MC_BEGIN(3, 3, 0, 0);
10005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10006 IEM_MC_LOCAL(uint16_t, u16Fsw);
10007 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10008 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10009 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10010 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10011
10012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10014
10015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10017 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10018
10019 IEM_MC_PREPARE_FPU_USAGE();
10020 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10021 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10022 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10023 } IEM_MC_ELSE() {
10024 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10025 } IEM_MC_ENDIF();
10026 IEM_MC_ADVANCE_RIP_AND_FINISH();
10027
10028 IEM_MC_END();
10029}
10030
10031
10032/** Opcode 0xd8 !11/3. */
10033FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10034{
10035 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10036
10037 IEM_MC_BEGIN(3, 3, 0, 0);
10038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10039 IEM_MC_LOCAL(uint16_t, u16Fsw);
10040 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10041 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10042 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10043 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10044
10045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10047
10048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10050 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10051
10052 IEM_MC_PREPARE_FPU_USAGE();
10053 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10054 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10055 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10056 } IEM_MC_ELSE() {
10057 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10058 } IEM_MC_ENDIF();
10059 IEM_MC_ADVANCE_RIP_AND_FINISH();
10060
10061 IEM_MC_END();
10062}
10063
10064
10065/** Opcode 0xd8 !11/4. */
10066FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10067{
10068 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10069 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10070}
10071
10072
10073/** Opcode 0xd8 !11/5. */
10074FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10075{
10076 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10077 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10078}
10079
10080
10081/** Opcode 0xd8 !11/6. */
10082FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10083{
10084 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10085 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10086}
10087
10088
10089/** Opcode 0xd8 !11/7. */
10090FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10091{
10092 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10093 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10094}
10095
10096
10097/**
10098 * @opcode 0xd8
10099 */
10100FNIEMOP_DEF(iemOp_EscF0)
10101{
10102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10103 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10104
10105 if (IEM_IS_MODRM_REG_MODE(bRm))
10106 {
10107 switch (IEM_GET_MODRM_REG_8(bRm))
10108 {
10109 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10110 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10111 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10112 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10113 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10114 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10115 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10116 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10118 }
10119 }
10120 else
10121 {
10122 switch (IEM_GET_MODRM_REG_8(bRm))
10123 {
10124 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10125 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10126 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10127 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10128 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10129 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10130 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10131 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10133 }
10134 }
10135}
10136
10137
10138/** Opcode 0xd9 /0 mem32real
10139 * @sa iemOp_fld_m64r */
10140FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10141{
10142 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10143
10144 IEM_MC_BEGIN(2, 3, 0, 0);
10145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10146 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10147 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10148 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10149 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10150
10151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10153
10154 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10155 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10156 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10157 IEM_MC_PREPARE_FPU_USAGE();
10158 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10159 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10160 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10161 } IEM_MC_ELSE() {
10162 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10163 } IEM_MC_ENDIF();
10164 IEM_MC_ADVANCE_RIP_AND_FINISH();
10165
10166 IEM_MC_END();
10167}
10168
10169
10170/** Opcode 0xd9 !11/2 mem32real */
10171FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10172{
10173 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10174 IEM_MC_BEGIN(3, 3, 0, 0);
10175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10177
10178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10179 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10180 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10181 IEM_MC_PREPARE_FPU_USAGE();
10182
10183 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10184 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10185 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10186
10187 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10188 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10189 IEM_MC_LOCAL(uint16_t, u16Fsw);
10190 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10191 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10192 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10193 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10194 } IEM_MC_ELSE() {
10195 IEM_MC_IF_FCW_IM() {
10196 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10197 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10198 } IEM_MC_ELSE() {
10199 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10200 } IEM_MC_ENDIF();
10201 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10202 } IEM_MC_ENDIF();
10203 IEM_MC_ADVANCE_RIP_AND_FINISH();
10204
10205 IEM_MC_END();
10206}
10207
10208
10209/** Opcode 0xd9 !11/3 */
10210FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10211{
10212 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10213 IEM_MC_BEGIN(3, 3, 0, 0);
10214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10216
10217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10220 IEM_MC_PREPARE_FPU_USAGE();
10221
10222 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10223 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10224 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10225
10226 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10227 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10228 IEM_MC_LOCAL(uint16_t, u16Fsw);
10229 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10230 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10231 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10232 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10233 } IEM_MC_ELSE() {
10234 IEM_MC_IF_FCW_IM() {
10235 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10236 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10237 } IEM_MC_ELSE() {
10238 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10239 } IEM_MC_ENDIF();
10240 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10241 } IEM_MC_ENDIF();
10242 IEM_MC_ADVANCE_RIP_AND_FINISH();
10243
10244 IEM_MC_END();
10245}
10246
10247
10248/** Opcode 0xd9 !11/4 */
10249FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10250{
10251 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10252 IEM_MC_BEGIN(3, 0, 0, 0);
10253 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10255
10256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10257 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10258 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10259
10260 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10261 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10262 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10263 IEM_MC_END();
10264}
10265
10266
10267/** Opcode 0xd9 !11/5 */
10268FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10269{
10270 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10271 IEM_MC_BEGIN(1, 1, 0, 0);
10272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10274
10275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10277 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10278
10279 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10280 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10281
10282 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
10283 IEM_MC_END();
10284}
10285
10286
10287/** Opcode 0xd9 !11/6 */
10288FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10289{
10290 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10291 IEM_MC_BEGIN(3, 0, 0, 0);
10292 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10294
10295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10296 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10297 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10298
10299 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10300 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10301 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10302 IEM_MC_END();
10303}
10304
10305
10306/** Opcode 0xd9 !11/7 */
10307FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10308{
10309 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10310 IEM_MC_BEGIN(2, 0, 0, 0);
10311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10312 IEM_MC_LOCAL(uint16_t, u16Fcw);
10313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10315 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10316 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10317 IEM_MC_FETCH_FCW(u16Fcw);
10318 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10319 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10320 IEM_MC_END();
10321}
10322
10323
10324/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10325FNIEMOP_DEF(iemOp_fnop)
10326{
10327 IEMOP_MNEMONIC(fnop, "fnop");
10328 IEM_MC_BEGIN(0, 0, 0, 0);
10329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10332 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10333 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10334 * intel optimizations. Investigate. */
10335 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10336 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10337 IEM_MC_END();
10338}
10339
10340
10341/** Opcode 0xd9 11/0 stN */
10342FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10343{
10344 IEMOP_MNEMONIC(fld_stN, "fld stN");
10345 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10346 * indicates that it does. */
10347 IEM_MC_BEGIN(0, 2, 0, 0);
10348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10349 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10350 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10351 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10352 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10353
10354 IEM_MC_PREPARE_FPU_USAGE();
10355 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10356 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10357 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10358 } IEM_MC_ELSE() {
10359 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10360 } IEM_MC_ENDIF();
10361
10362 IEM_MC_ADVANCE_RIP_AND_FINISH();
10363 IEM_MC_END();
10364}
10365
10366
10367/** Opcode 0xd9 11/3 stN */
10368FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10369{
10370 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10371 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10372 * indicates that it does. */
10373 IEM_MC_BEGIN(2, 3, 0, 0);
10374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10375 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10376 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10377 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10378 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10379 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10380 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10381 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10382
10383 IEM_MC_PREPARE_FPU_USAGE();
10384 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10385 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10386 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10387 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10388 } IEM_MC_ELSE() {
10389 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10390 } IEM_MC_ENDIF();
10391
10392 IEM_MC_ADVANCE_RIP_AND_FINISH();
10393 IEM_MC_END();
10394}
10395
10396
10397/** Opcode 0xd9 11/4, 0xdd 11/2. */
10398FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10399{
10400 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10401
10402 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10403 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10404 if (!iDstReg)
10405 {
10406 IEM_MC_BEGIN(0, 1, 0, 0);
10407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10408 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10409 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10410 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10411
10412 IEM_MC_PREPARE_FPU_USAGE();
10413 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10414 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10415 } IEM_MC_ELSE() {
10416 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10417 } IEM_MC_ENDIF();
10418
10419 IEM_MC_ADVANCE_RIP_AND_FINISH();
10420 IEM_MC_END();
10421 }
10422 else
10423 {
10424 IEM_MC_BEGIN(0, 2, 0, 0);
10425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10426 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10427 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10428 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10429 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10430
10431 IEM_MC_PREPARE_FPU_USAGE();
10432 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10433 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10434 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10435 } IEM_MC_ELSE() {
10436 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10437 } IEM_MC_ENDIF();
10438
10439 IEM_MC_ADVANCE_RIP_AND_FINISH();
10440 IEM_MC_END();
10441 }
10442}
10443
10444
10445/**
10446 * Common worker for FPU instructions working on ST0 and replaces it with the
10447 * result, i.e. unary operators.
10448 *
10449 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10450 */
10451FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10452{
10453 IEM_MC_BEGIN(2, 1, 0, 0);
10454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10455 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10456 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10457 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10458
10459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10461 IEM_MC_PREPARE_FPU_USAGE();
10462 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10463 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10464 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10465 } IEM_MC_ELSE() {
10466 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10467 } IEM_MC_ENDIF();
10468 IEM_MC_ADVANCE_RIP_AND_FINISH();
10469
10470 IEM_MC_END();
10471}
10472
10473
10474/** Opcode 0xd9 0xe0. */
10475FNIEMOP_DEF(iemOp_fchs)
10476{
10477 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10478 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10479}
10480
10481
10482/** Opcode 0xd9 0xe1. */
10483FNIEMOP_DEF(iemOp_fabs)
10484{
10485 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10486 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10487}
10488
10489
10490/** Opcode 0xd9 0xe4. */
10491FNIEMOP_DEF(iemOp_ftst)
10492{
10493 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10494 IEM_MC_BEGIN(2, 1, 0, 0);
10495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10496 IEM_MC_LOCAL(uint16_t, u16Fsw);
10497 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10498 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10499
10500 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10501 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10502 IEM_MC_PREPARE_FPU_USAGE();
10503 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10504 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10505 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10506 } IEM_MC_ELSE() {
10507 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10508 } IEM_MC_ENDIF();
10509 IEM_MC_ADVANCE_RIP_AND_FINISH();
10510
10511 IEM_MC_END();
10512}
10513
10514
10515/** Opcode 0xd9 0xe5. */
10516FNIEMOP_DEF(iemOp_fxam)
10517{
10518 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10519 IEM_MC_BEGIN(2, 1, 0, 0);
10520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10521 IEM_MC_LOCAL(uint16_t, u16Fsw);
10522 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10523 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10524
10525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10526 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10527 IEM_MC_PREPARE_FPU_USAGE();
10528 IEM_MC_REF_FPUREG(pr80Value, 0);
10529 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10530 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10531 IEM_MC_ADVANCE_RIP_AND_FINISH();
10532
10533 IEM_MC_END();
10534}
10535
10536
10537/**
10538 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10539 *
10540 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10541 */
10542FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10543{
10544 IEM_MC_BEGIN(1, 1, 0, 0);
10545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10546 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10547 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10548
10549 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10550 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10551 IEM_MC_PREPARE_FPU_USAGE();
10552 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10553 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10554 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10555 } IEM_MC_ELSE() {
10556 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10557 } IEM_MC_ENDIF();
10558 IEM_MC_ADVANCE_RIP_AND_FINISH();
10559
10560 IEM_MC_END();
10561}
10562
10563
10564/** Opcode 0xd9 0xe8. */
10565FNIEMOP_DEF(iemOp_fld1)
10566{
10567 IEMOP_MNEMONIC(fld1, "fld1");
10568 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10569}
10570
10571
10572/** Opcode 0xd9 0xe9. */
10573FNIEMOP_DEF(iemOp_fldl2t)
10574{
10575 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10576 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10577}
10578
10579
10580/** Opcode 0xd9 0xea. */
10581FNIEMOP_DEF(iemOp_fldl2e)
10582{
10583 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10584 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10585}
10586
10587/** Opcode 0xd9 0xeb. */
10588FNIEMOP_DEF(iemOp_fldpi)
10589{
10590 IEMOP_MNEMONIC(fldpi, "fldpi");
10591 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10592}
10593
10594
10595/** Opcode 0xd9 0xec. */
10596FNIEMOP_DEF(iemOp_fldlg2)
10597{
10598 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10599 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10600}
10601
10602/** Opcode 0xd9 0xed. */
10603FNIEMOP_DEF(iemOp_fldln2)
10604{
10605 IEMOP_MNEMONIC(fldln2, "fldln2");
10606 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10607}
10608
10609
10610/** Opcode 0xd9 0xee. */
10611FNIEMOP_DEF(iemOp_fldz)
10612{
10613 IEMOP_MNEMONIC(fldz, "fldz");
10614 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10615}
10616
10617
10618/** Opcode 0xd9 0xf0.
10619 *
10620 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10621 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10622 * to produce proper results for +Inf and -Inf.
10623 *
10624 * This is probably usful in the implementation pow() and similar.
10625 */
10626FNIEMOP_DEF(iemOp_f2xm1)
10627{
10628 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10629 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10630}
10631
10632
10633/**
10634 * Common worker for FPU instructions working on STn and ST0, storing the result
10635 * in STn, and popping the stack unless IE, DE or ZE was raised.
10636 *
10637 * @param bRm Mod R/M byte.
10638 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10639 */
10640FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10641{
10642 IEM_MC_BEGIN(3, 1, 0, 0);
10643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10644 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10645 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10646 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10647 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10648
10649 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10650 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10651
10652 IEM_MC_PREPARE_FPU_USAGE();
10653 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10654 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10655 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10656 } IEM_MC_ELSE() {
10657 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10658 } IEM_MC_ENDIF();
10659 IEM_MC_ADVANCE_RIP_AND_FINISH();
10660
10661 IEM_MC_END();
10662}
10663
10664
10665/** Opcode 0xd9 0xf1. */
10666FNIEMOP_DEF(iemOp_fyl2x)
10667{
10668 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10669 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10670}
10671
10672
10673/**
10674 * Common worker for FPU instructions working on ST0 and having two outputs, one
10675 * replacing ST0 and one pushed onto the stack.
10676 *
10677 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10678 */
10679FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10680{
10681 IEM_MC_BEGIN(2, 1, 0, 0);
10682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10683 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10684 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10685 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10686
10687 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10688 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10689 IEM_MC_PREPARE_FPU_USAGE();
10690 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10691 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10692 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10693 } IEM_MC_ELSE() {
10694 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10695 } IEM_MC_ENDIF();
10696 IEM_MC_ADVANCE_RIP_AND_FINISH();
10697
10698 IEM_MC_END();
10699}
10700
10701
10702/** Opcode 0xd9 0xf2. */
10703FNIEMOP_DEF(iemOp_fptan)
10704{
10705 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10706 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10707}
10708
10709
10710/** Opcode 0xd9 0xf3. */
10711FNIEMOP_DEF(iemOp_fpatan)
10712{
10713 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10714 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10715}
10716
10717
10718/** Opcode 0xd9 0xf4. */
10719FNIEMOP_DEF(iemOp_fxtract)
10720{
10721 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10722 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10723}
10724
10725
10726/** Opcode 0xd9 0xf5. */
10727FNIEMOP_DEF(iemOp_fprem1)
10728{
10729 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10730 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10731}
10732
10733
10734/** Opcode 0xd9 0xf6. */
10735FNIEMOP_DEF(iemOp_fdecstp)
10736{
10737 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10738 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10739 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10740 * FINCSTP and FDECSTP. */
10741 IEM_MC_BEGIN(0, 0, 0, 0);
10742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10743
10744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10746
10747 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10748 IEM_MC_FPU_STACK_DEC_TOP();
10749 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10750
10751 IEM_MC_ADVANCE_RIP_AND_FINISH();
10752 IEM_MC_END();
10753}
10754
10755
10756/** Opcode 0xd9 0xf7. */
10757FNIEMOP_DEF(iemOp_fincstp)
10758{
10759 IEMOP_MNEMONIC(fincstp, "fincstp");
10760 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10761 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10762 * FINCSTP and FDECSTP. */
10763 IEM_MC_BEGIN(0, 0, 0, 0);
10764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10765
10766 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10767 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10768
10769 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10770 IEM_MC_FPU_STACK_INC_TOP();
10771 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10772
10773 IEM_MC_ADVANCE_RIP_AND_FINISH();
10774 IEM_MC_END();
10775}
10776
10777
10778/** Opcode 0xd9 0xf8. */
10779FNIEMOP_DEF(iemOp_fprem)
10780{
10781 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10782 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10783}
10784
10785
10786/** Opcode 0xd9 0xf9. */
10787FNIEMOP_DEF(iemOp_fyl2xp1)
10788{
10789 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10790 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10791}
10792
10793
10794/** Opcode 0xd9 0xfa. */
10795FNIEMOP_DEF(iemOp_fsqrt)
10796{
10797 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10798 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10799}
10800
10801
10802/** Opcode 0xd9 0xfb. */
10803FNIEMOP_DEF(iemOp_fsincos)
10804{
10805 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10806 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10807}
10808
10809
10810/** Opcode 0xd9 0xfc. */
10811FNIEMOP_DEF(iemOp_frndint)
10812{
10813 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10814 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10815}
10816
10817
10818/** Opcode 0xd9 0xfd. */
10819FNIEMOP_DEF(iemOp_fscale)
10820{
10821 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10822 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10823}
10824
10825
10826/** Opcode 0xd9 0xfe. */
10827FNIEMOP_DEF(iemOp_fsin)
10828{
10829 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10830 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10831}
10832
10833
10834/** Opcode 0xd9 0xff. */
10835FNIEMOP_DEF(iemOp_fcos)
10836{
10837 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10838 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10839}
10840
10841
10842/** Used by iemOp_EscF1. */
10843IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10844{
10845 /* 0xe0 */ iemOp_fchs,
10846 /* 0xe1 */ iemOp_fabs,
10847 /* 0xe2 */ iemOp_Invalid,
10848 /* 0xe3 */ iemOp_Invalid,
10849 /* 0xe4 */ iemOp_ftst,
10850 /* 0xe5 */ iemOp_fxam,
10851 /* 0xe6 */ iemOp_Invalid,
10852 /* 0xe7 */ iemOp_Invalid,
10853 /* 0xe8 */ iemOp_fld1,
10854 /* 0xe9 */ iemOp_fldl2t,
10855 /* 0xea */ iemOp_fldl2e,
10856 /* 0xeb */ iemOp_fldpi,
10857 /* 0xec */ iemOp_fldlg2,
10858 /* 0xed */ iemOp_fldln2,
10859 /* 0xee */ iemOp_fldz,
10860 /* 0xef */ iemOp_Invalid,
10861 /* 0xf0 */ iemOp_f2xm1,
10862 /* 0xf1 */ iemOp_fyl2x,
10863 /* 0xf2 */ iemOp_fptan,
10864 /* 0xf3 */ iemOp_fpatan,
10865 /* 0xf4 */ iemOp_fxtract,
10866 /* 0xf5 */ iemOp_fprem1,
10867 /* 0xf6 */ iemOp_fdecstp,
10868 /* 0xf7 */ iemOp_fincstp,
10869 /* 0xf8 */ iemOp_fprem,
10870 /* 0xf9 */ iemOp_fyl2xp1,
10871 /* 0xfa */ iemOp_fsqrt,
10872 /* 0xfb */ iemOp_fsincos,
10873 /* 0xfc */ iemOp_frndint,
10874 /* 0xfd */ iemOp_fscale,
10875 /* 0xfe */ iemOp_fsin,
10876 /* 0xff */ iemOp_fcos
10877};
10878
10879
10880/**
10881 * @opcode 0xd9
10882 */
10883FNIEMOP_DEF(iemOp_EscF1)
10884{
10885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10886 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10887
10888 if (IEM_IS_MODRM_REG_MODE(bRm))
10889 {
10890 switch (IEM_GET_MODRM_REG_8(bRm))
10891 {
10892 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10893 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10894 case 2:
10895 if (bRm == 0xd0)
10896 return FNIEMOP_CALL(iemOp_fnop);
10897 IEMOP_RAISE_INVALID_OPCODE_RET();
10898 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10899 case 4:
10900 case 5:
10901 case 6:
10902 case 7:
10903 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10904 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10906 }
10907 }
10908 else
10909 {
10910 switch (IEM_GET_MODRM_REG_8(bRm))
10911 {
10912 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10913 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10914 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10915 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10916 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10917 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10918 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10919 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10921 }
10922 }
10923}
10924
10925
10926/** Opcode 0xda 11/0. */
10927FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10928{
10929 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10930 IEM_MC_BEGIN(0, 1, 0, 0);
10931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10932 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10933
10934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10935 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10936
10937 IEM_MC_PREPARE_FPU_USAGE();
10938 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10940 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10941 } IEM_MC_ENDIF();
10942 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10943 } IEM_MC_ELSE() {
10944 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10945 } IEM_MC_ENDIF();
10946 IEM_MC_ADVANCE_RIP_AND_FINISH();
10947
10948 IEM_MC_END();
10949}
10950
10951
10952/** Opcode 0xda 11/1. */
10953FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10954{
10955 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10956 IEM_MC_BEGIN(0, 1, 0, 0);
10957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10958 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10959
10960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10961 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10962
10963 IEM_MC_PREPARE_FPU_USAGE();
10964 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10966 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10967 } IEM_MC_ENDIF();
10968 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10969 } IEM_MC_ELSE() {
10970 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10971 } IEM_MC_ENDIF();
10972 IEM_MC_ADVANCE_RIP_AND_FINISH();
10973
10974 IEM_MC_END();
10975}
10976
10977
10978/** Opcode 0xda 11/2. */
10979FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10980{
10981 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10982 IEM_MC_BEGIN(0, 1, 0, 0);
10983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10984 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10985
10986 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10987 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10988
10989 IEM_MC_PREPARE_FPU_USAGE();
10990 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10991 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10992 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10993 } IEM_MC_ENDIF();
10994 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10995 } IEM_MC_ELSE() {
10996 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10997 } IEM_MC_ENDIF();
10998 IEM_MC_ADVANCE_RIP_AND_FINISH();
10999
11000 IEM_MC_END();
11001}
11002
11003
11004/** Opcode 0xda 11/3. */
11005FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11006{
11007 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11008 IEM_MC_BEGIN(0, 1, 0, 0);
11009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11010 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11011
11012 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11013 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11014
11015 IEM_MC_PREPARE_FPU_USAGE();
11016 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11017 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11018 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11019 } IEM_MC_ENDIF();
11020 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11021 } IEM_MC_ELSE() {
11022 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11023 } IEM_MC_ENDIF();
11024 IEM_MC_ADVANCE_RIP_AND_FINISH();
11025
11026 IEM_MC_END();
11027}
11028
11029
11030/**
11031 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11032 * flags, and popping twice when done.
11033 *
11034 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11035 */
11036FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11037{
11038 IEM_MC_BEGIN(3, 1, 0, 0);
11039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11040 IEM_MC_LOCAL(uint16_t, u16Fsw);
11041 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11042 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11043 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11044
11045 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11046 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11047
11048 IEM_MC_PREPARE_FPU_USAGE();
11049 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11050 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11051 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11052 } IEM_MC_ELSE() {
11053 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11054 } IEM_MC_ENDIF();
11055 IEM_MC_ADVANCE_RIP_AND_FINISH();
11056
11057 IEM_MC_END();
11058}
11059
11060
11061/** Opcode 0xda 0xe9. */
11062FNIEMOP_DEF(iemOp_fucompp)
11063{
11064 IEMOP_MNEMONIC(fucompp, "fucompp");
11065 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11066}
11067
11068
11069/**
11070 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11071 * the result in ST0.
11072 *
11073 * @param bRm Mod R/M byte.
11074 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11075 */
11076FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11077{
11078 IEM_MC_BEGIN(3, 3, 0, 0);
11079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11080 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11081 IEM_MC_LOCAL(int32_t, i32Val2);
11082 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11083 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11084 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11085
11086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11088
11089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11090 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11091 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11092
11093 IEM_MC_PREPARE_FPU_USAGE();
11094 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11095 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11096 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11097 } IEM_MC_ELSE() {
11098 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11099 } IEM_MC_ENDIF();
11100 IEM_MC_ADVANCE_RIP_AND_FINISH();
11101
11102 IEM_MC_END();
11103}
11104
11105
11106/** Opcode 0xda !11/0. */
11107FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11108{
11109 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11110 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11111}
11112
11113
11114/** Opcode 0xda !11/1. */
11115FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11116{
11117 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11118 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11119}
11120
11121
11122/** Opcode 0xda !11/2. */
11123FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11124{
11125 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11126
11127 IEM_MC_BEGIN(3, 3, 0, 0);
11128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11129 IEM_MC_LOCAL(uint16_t, u16Fsw);
11130 IEM_MC_LOCAL(int32_t, i32Val2);
11131 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11132 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11133 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11134
11135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11137
11138 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11139 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11140 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11141
11142 IEM_MC_PREPARE_FPU_USAGE();
11143 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11144 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11145 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11146 } IEM_MC_ELSE() {
11147 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11148 } IEM_MC_ENDIF();
11149 IEM_MC_ADVANCE_RIP_AND_FINISH();
11150
11151 IEM_MC_END();
11152}
11153
11154
11155/** Opcode 0xda !11/3. */
11156FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11157{
11158 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11159
11160 IEM_MC_BEGIN(3, 3, 0, 0);
11161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11162 IEM_MC_LOCAL(uint16_t, u16Fsw);
11163 IEM_MC_LOCAL(int32_t, i32Val2);
11164 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11165 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11166 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11167
11168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11170
11171 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11172 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11173 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11174
11175 IEM_MC_PREPARE_FPU_USAGE();
11176 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11177 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11178 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11179 } IEM_MC_ELSE() {
11180 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11181 } IEM_MC_ENDIF();
11182 IEM_MC_ADVANCE_RIP_AND_FINISH();
11183
11184 IEM_MC_END();
11185}
11186
11187
11188/** Opcode 0xda !11/4. */
11189FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11190{
11191 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11192 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11193}
11194
11195
11196/** Opcode 0xda !11/5. */
11197FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11198{
11199 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11200 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11201}
11202
11203
11204/** Opcode 0xda !11/6. */
11205FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11206{
11207 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11208 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11209}
11210
11211
11212/** Opcode 0xda !11/7. */
11213FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11214{
11215 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11216 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11217}
11218
11219
11220/**
11221 * @opcode 0xda
11222 */
11223FNIEMOP_DEF(iemOp_EscF2)
11224{
11225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11226 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11227 if (IEM_IS_MODRM_REG_MODE(bRm))
11228 {
11229 switch (IEM_GET_MODRM_REG_8(bRm))
11230 {
11231 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11232 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11233 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11234 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11235 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11236 case 5:
11237 if (bRm == 0xe9)
11238 return FNIEMOP_CALL(iemOp_fucompp);
11239 IEMOP_RAISE_INVALID_OPCODE_RET();
11240 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11241 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11243 }
11244 }
11245 else
11246 {
11247 switch (IEM_GET_MODRM_REG_8(bRm))
11248 {
11249 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11250 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11251 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11252 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11253 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11254 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11255 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11256 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11258 }
11259 }
11260}
11261
11262
11263/** Opcode 0xdb !11/0. */
11264FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11265{
11266 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11267
11268 IEM_MC_BEGIN(2, 3, 0, 0);
11269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11270 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11271 IEM_MC_LOCAL(int32_t, i32Val);
11272 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11273 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11274
11275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11277
11278 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11279 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11280 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11281
11282 IEM_MC_PREPARE_FPU_USAGE();
11283 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11284 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11285 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11286 } IEM_MC_ELSE() {
11287 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11288 } IEM_MC_ENDIF();
11289 IEM_MC_ADVANCE_RIP_AND_FINISH();
11290
11291 IEM_MC_END();
11292}
11293
11294
11295/** Opcode 0xdb !11/1. */
11296FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11297{
11298 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11299 IEM_MC_BEGIN(3, 3, 0, 0);
11300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11302
11303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11306 IEM_MC_PREPARE_FPU_USAGE();
11307
11308 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11309 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11310 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11311
11312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11313 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11314 IEM_MC_LOCAL(uint16_t, u16Fsw);
11315 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11316 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11317 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11318 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11319 } IEM_MC_ELSE() {
11320 IEM_MC_IF_FCW_IM() {
11321 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11322 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11323 } IEM_MC_ELSE() {
11324 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11325 } IEM_MC_ENDIF();
11326 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11327 } IEM_MC_ENDIF();
11328 IEM_MC_ADVANCE_RIP_AND_FINISH();
11329
11330 IEM_MC_END();
11331}
11332
11333
11334/** Opcode 0xdb !11/2. */
11335FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11336{
11337 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11338 IEM_MC_BEGIN(3, 3, 0, 0);
11339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11341
11342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11345 IEM_MC_PREPARE_FPU_USAGE();
11346
11347 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11348 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11349 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11350
11351 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11352 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11353 IEM_MC_LOCAL(uint16_t, u16Fsw);
11354 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11355 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11356 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11357 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11358 } IEM_MC_ELSE() {
11359 IEM_MC_IF_FCW_IM() {
11360 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11361 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11362 } IEM_MC_ELSE() {
11363 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11364 } IEM_MC_ENDIF();
11365 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11366 } IEM_MC_ENDIF();
11367 IEM_MC_ADVANCE_RIP_AND_FINISH();
11368
11369 IEM_MC_END();
11370}
11371
11372
11373/** Opcode 0xdb !11/3. */
11374FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11375{
11376 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11377 IEM_MC_BEGIN(3, 2, 0, 0);
11378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11380
11381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11383 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11384 IEM_MC_PREPARE_FPU_USAGE();
11385
11386 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11387 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11388 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11389
11390 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11391 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11392 IEM_MC_LOCAL(uint16_t, u16Fsw);
11393 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11394 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11395 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11396 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11397 } IEM_MC_ELSE() {
11398 IEM_MC_IF_FCW_IM() {
11399 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11400 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11401 } IEM_MC_ELSE() {
11402 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11403 } IEM_MC_ENDIF();
11404 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11405 } IEM_MC_ENDIF();
11406 IEM_MC_ADVANCE_RIP_AND_FINISH();
11407
11408 IEM_MC_END();
11409}
11410
11411
11412/** Opcode 0xdb !11/5. */
11413FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11414{
11415 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11416
11417 IEM_MC_BEGIN(2, 3, 0, 0);
11418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11419 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11420 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11421 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11422 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11423
11424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11426
11427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11429 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11430
11431 IEM_MC_PREPARE_FPU_USAGE();
11432 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11433 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11434 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11435 } IEM_MC_ELSE() {
11436 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11437 } IEM_MC_ENDIF();
11438 IEM_MC_ADVANCE_RIP_AND_FINISH();
11439
11440 IEM_MC_END();
11441}
11442
11443
11444/** Opcode 0xdb !11/7. */
11445FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11446{
11447 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11448 IEM_MC_BEGIN(3, 3, 0, 0);
11449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11451
11452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11453 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11454 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11455 IEM_MC_PREPARE_FPU_USAGE();
11456
11457 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11458 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11459 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11460
11461 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11462 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11463 IEM_MC_LOCAL(uint16_t, u16Fsw);
11464 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11465 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11466 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11467 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11468 } IEM_MC_ELSE() {
11469 IEM_MC_IF_FCW_IM() {
11470 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11471 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11472 } IEM_MC_ELSE() {
11473 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11474 } IEM_MC_ENDIF();
11475 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11476 } IEM_MC_ENDIF();
11477 IEM_MC_ADVANCE_RIP_AND_FINISH();
11478
11479 IEM_MC_END();
11480}
11481
11482
11483/** Opcode 0xdb 11/0. */
11484FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11485{
11486 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11487 IEM_MC_BEGIN(0, 1, 0, 0);
11488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11489 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11490
11491 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11492 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11493
11494 IEM_MC_PREPARE_FPU_USAGE();
11495 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11496 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11497 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11498 } IEM_MC_ENDIF();
11499 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11500 } IEM_MC_ELSE() {
11501 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11502 } IEM_MC_ENDIF();
11503 IEM_MC_ADVANCE_RIP_AND_FINISH();
11504
11505 IEM_MC_END();
11506}
11507
11508
11509/** Opcode 0xdb 11/1. */
11510FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11511{
11512 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11513 IEM_MC_BEGIN(0, 1, 0, 0);
11514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11515 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11516
11517 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11518 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11519
11520 IEM_MC_PREPARE_FPU_USAGE();
11521 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11522 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11523 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11524 } IEM_MC_ENDIF();
11525 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11526 } IEM_MC_ELSE() {
11527 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11528 } IEM_MC_ENDIF();
11529 IEM_MC_ADVANCE_RIP_AND_FINISH();
11530
11531 IEM_MC_END();
11532}
11533
11534
11535/** Opcode 0xdb 11/2. */
11536FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11537{
11538 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11539 IEM_MC_BEGIN(0, 1, 0, 0);
11540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11541 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11542
11543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11544 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11545
11546 IEM_MC_PREPARE_FPU_USAGE();
11547 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11548 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11549 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11550 } IEM_MC_ENDIF();
11551 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11552 } IEM_MC_ELSE() {
11553 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11554 } IEM_MC_ENDIF();
11555 IEM_MC_ADVANCE_RIP_AND_FINISH();
11556
11557 IEM_MC_END();
11558}
11559
11560
11561/** Opcode 0xdb 11/3. */
11562FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11563{
11564 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11565 IEM_MC_BEGIN(0, 1, 0, 0);
11566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11567 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11568
11569 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11570 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11571
11572 IEM_MC_PREPARE_FPU_USAGE();
11573 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11574 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11575 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11576 } IEM_MC_ENDIF();
11577 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11578 } IEM_MC_ELSE() {
11579 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11580 } IEM_MC_ENDIF();
11581 IEM_MC_ADVANCE_RIP_AND_FINISH();
11582
11583 IEM_MC_END();
11584}
11585
11586
11587/** Opcode 0xdb 0xe0. */
11588FNIEMOP_DEF(iemOp_fneni)
11589{
11590 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11591 IEM_MC_BEGIN(0, 0, 0, 0);
11592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11593 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11594 IEM_MC_ADVANCE_RIP_AND_FINISH();
11595 IEM_MC_END();
11596}
11597
11598
11599/** Opcode 0xdb 0xe1. */
11600FNIEMOP_DEF(iemOp_fndisi)
11601{
11602 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11603 IEM_MC_BEGIN(0, 0, 0, 0);
11604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11605 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11606 IEM_MC_ADVANCE_RIP_AND_FINISH();
11607 IEM_MC_END();
11608}
11609
11610
11611/** Opcode 0xdb 0xe2. */
11612FNIEMOP_DEF(iemOp_fnclex)
11613{
11614 IEMOP_MNEMONIC(fnclex, "fnclex");
11615 IEM_MC_BEGIN(0, 0, 0, 0);
11616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11617 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11618 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11619 IEM_MC_CLEAR_FSW_EX();
11620 IEM_MC_ADVANCE_RIP_AND_FINISH();
11621 IEM_MC_END();
11622}
11623
11624
11625/** Opcode 0xdb 0xe3. */
11626FNIEMOP_DEF(iemOp_fninit)
11627{
11628 IEMOP_MNEMONIC(fninit, "fninit");
11629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11630 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11631}
11632
11633
11634/** Opcode 0xdb 0xe4. */
11635FNIEMOP_DEF(iemOp_fnsetpm)
11636{
11637 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11638 IEM_MC_BEGIN(0, 0, 0, 0);
11639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11641 IEM_MC_ADVANCE_RIP_AND_FINISH();
11642 IEM_MC_END();
11643}
11644
11645
11646/** Opcode 0xdb 0xe5. */
11647FNIEMOP_DEF(iemOp_frstpm)
11648{
11649 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11650#if 0 /* #UDs on newer CPUs */
11651 IEM_MC_BEGIN(0, 0, 0, 0);
11652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11653 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11654 IEM_MC_ADVANCE_RIP_AND_FINISH();
11655 IEM_MC_END();
11656 return VINF_SUCCESS;
11657#else
11658 IEMOP_RAISE_INVALID_OPCODE_RET();
11659#endif
11660}
11661
11662
11663/** Opcode 0xdb 11/5. */
11664FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11665{
11666 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11667 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11668 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11669 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11670}
11671
11672
11673/** Opcode 0xdb 11/6. */
11674FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11675{
11676 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11677 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11678 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11679 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11680}
11681
11682
11683/**
11684 * @opcode 0xdb
11685 */
11686FNIEMOP_DEF(iemOp_EscF3)
11687{
11688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11689 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11690 if (IEM_IS_MODRM_REG_MODE(bRm))
11691 {
11692 switch (IEM_GET_MODRM_REG_8(bRm))
11693 {
11694 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11695 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11696 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11697 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11698 case 4:
11699 switch (bRm)
11700 {
11701 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11702 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11703 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11704 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11705 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11706 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11707 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11708 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11710 }
11711 break;
11712 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11713 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11714 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11716 }
11717 }
11718 else
11719 {
11720 switch (IEM_GET_MODRM_REG_8(bRm))
11721 {
11722 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11723 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11724 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11725 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11726 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11727 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11728 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11729 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11731 }
11732 }
11733}
11734
11735
11736/**
11737 * Common worker for FPU instructions working on STn and ST0, and storing the
11738 * result in STn unless IE, DE or ZE was raised.
11739 *
11740 * @param bRm Mod R/M byte.
11741 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11742 */
11743FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11744{
11745 IEM_MC_BEGIN(3, 1, 0, 0);
11746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11747 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11748 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11749 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11750 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11751
11752 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11753 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11754
11755 IEM_MC_PREPARE_FPU_USAGE();
11756 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11757 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11758 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11759 } IEM_MC_ELSE() {
11760 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11761 } IEM_MC_ENDIF();
11762 IEM_MC_ADVANCE_RIP_AND_FINISH();
11763
11764 IEM_MC_END();
11765}
11766
11767
11768/** Opcode 0xdc 11/0. */
11769FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11770{
11771 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11772 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11773}
11774
11775
11776/** Opcode 0xdc 11/1. */
11777FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11778{
11779 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11780 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11781}
11782
11783
11784/** Opcode 0xdc 11/4. */
11785FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11786{
11787 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11788 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11789}
11790
11791
11792/** Opcode 0xdc 11/5. */
11793FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11794{
11795 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11796 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11797}
11798
11799
11800/** Opcode 0xdc 11/6. */
11801FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11802{
11803 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11804 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11805}
11806
11807
11808/** Opcode 0xdc 11/7. */
11809FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11810{
11811 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11812 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11813}
11814
11815
11816/**
11817 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11818 * memory operand, and storing the result in ST0.
11819 *
11820 * @param bRm Mod R/M byte.
11821 * @param pfnImpl Pointer to the instruction implementation (assembly).
11822 */
11823FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11824{
11825 IEM_MC_BEGIN(3, 3, 0, 0);
11826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11827 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11828 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11829 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11830 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11831 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11832
11833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11836 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11837
11838 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11839 IEM_MC_PREPARE_FPU_USAGE();
11840 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11841 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11842 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11843 } IEM_MC_ELSE() {
11844 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11845 } IEM_MC_ENDIF();
11846 IEM_MC_ADVANCE_RIP_AND_FINISH();
11847
11848 IEM_MC_END();
11849}
11850
11851
11852/** Opcode 0xdc !11/0. */
11853FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11854{
11855 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11856 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11857}
11858
11859
11860/** Opcode 0xdc !11/1. */
11861FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11862{
11863 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11864 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11865}
11866
11867
11868/** Opcode 0xdc !11/2. */
11869FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11870{
11871 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11872
11873 IEM_MC_BEGIN(3, 3, 0, 0);
11874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11875 IEM_MC_LOCAL(uint16_t, u16Fsw);
11876 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11877 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11878 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11879 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11880
11881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11883
11884 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11885 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11886 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11887
11888 IEM_MC_PREPARE_FPU_USAGE();
11889 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11890 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11891 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11892 } IEM_MC_ELSE() {
11893 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11894 } IEM_MC_ENDIF();
11895 IEM_MC_ADVANCE_RIP_AND_FINISH();
11896
11897 IEM_MC_END();
11898}
11899
11900
11901/** Opcode 0xdc !11/3. */
11902FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11903{
11904 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11905
11906 IEM_MC_BEGIN(3, 3, 0, 0);
11907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11908 IEM_MC_LOCAL(uint16_t, u16Fsw);
11909 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11910 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11911 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11912 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11913
11914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11916
11917 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11918 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11919 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11920
11921 IEM_MC_PREPARE_FPU_USAGE();
11922 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11923 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11924 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11925 } IEM_MC_ELSE() {
11926 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11927 } IEM_MC_ENDIF();
11928 IEM_MC_ADVANCE_RIP_AND_FINISH();
11929
11930 IEM_MC_END();
11931}
11932
11933
11934/** Opcode 0xdc !11/4. */
11935FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11936{
11937 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11938 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11939}
11940
11941
11942/** Opcode 0xdc !11/5. */
11943FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11944{
11945 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11946 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11947}
11948
11949
11950/** Opcode 0xdc !11/6. */
11951FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11952{
11953 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11954 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11955}
11956
11957
11958/** Opcode 0xdc !11/7. */
11959FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11960{
11961 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11962 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11963}
11964
11965
11966/**
11967 * @opcode 0xdc
11968 */
11969FNIEMOP_DEF(iemOp_EscF4)
11970{
11971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11972 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11973 if (IEM_IS_MODRM_REG_MODE(bRm))
11974 {
11975 switch (IEM_GET_MODRM_REG_8(bRm))
11976 {
11977 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11978 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11979 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11980 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11981 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11982 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11983 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11984 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11986 }
11987 }
11988 else
11989 {
11990 switch (IEM_GET_MODRM_REG_8(bRm))
11991 {
11992 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11993 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11994 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11995 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11996 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11997 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11998 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11999 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12001 }
12002 }
12003}
12004
12005
12006/** Opcode 0xdd !11/0.
12007 * @sa iemOp_fld_m32r */
12008FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12009{
12010 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12011
12012 IEM_MC_BEGIN(2, 3, 0, 0);
12013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12014 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12015 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12016 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12017 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12018
12019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12021 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12022 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12023
12024 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12025 IEM_MC_PREPARE_FPU_USAGE();
12026 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12027 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12028 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12029 } IEM_MC_ELSE() {
12030 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12031 } IEM_MC_ENDIF();
12032 IEM_MC_ADVANCE_RIP_AND_FINISH();
12033
12034 IEM_MC_END();
12035}
12036
12037
12038/** Opcode 0xdd !11/0. */
12039FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12040{
12041 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12042 IEM_MC_BEGIN(3, 3, 0, 0);
12043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12045
12046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12048 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12049 IEM_MC_PREPARE_FPU_USAGE();
12050
12051 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12052 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12053 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12054
12055 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12056 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12057 IEM_MC_LOCAL(uint16_t, u16Fsw);
12058 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12059 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12060 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12061 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12062 } IEM_MC_ELSE() {
12063 IEM_MC_IF_FCW_IM() {
12064 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12065 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12066 } IEM_MC_ELSE() {
12067 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12068 } IEM_MC_ENDIF();
12069 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12070 } IEM_MC_ENDIF();
12071 IEM_MC_ADVANCE_RIP_AND_FINISH();
12072
12073 IEM_MC_END();
12074}
12075
12076
12077/** Opcode 0xdd !11/0. */
12078FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12079{
12080 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12081 IEM_MC_BEGIN(3, 3, 0, 0);
12082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12084
12085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12088 IEM_MC_PREPARE_FPU_USAGE();
12089
12090 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12091 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12092 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12093
12094 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12095 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12096 IEM_MC_LOCAL(uint16_t, u16Fsw);
12097 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12098 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12099 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12100 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12101 } IEM_MC_ELSE() {
12102 IEM_MC_IF_FCW_IM() {
12103 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12104 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12105 } IEM_MC_ELSE() {
12106 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12107 } IEM_MC_ENDIF();
12108 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12109 } IEM_MC_ENDIF();
12110 IEM_MC_ADVANCE_RIP_AND_FINISH();
12111
12112 IEM_MC_END();
12113}
12114
12115
12116
12117
12118/** Opcode 0xdd !11/0. */
12119FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12120{
12121 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12122 IEM_MC_BEGIN(3, 3, 0, 0);
12123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12125
12126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12127 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12128 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12129 IEM_MC_PREPARE_FPU_USAGE();
12130
12131 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12132 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12133 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12134
12135 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12136 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12137 IEM_MC_LOCAL(uint16_t, u16Fsw);
12138 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12139 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12140 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12141 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12142 } IEM_MC_ELSE() {
12143 IEM_MC_IF_FCW_IM() {
12144 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12145 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12146 } IEM_MC_ELSE() {
12147 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12148 } IEM_MC_ENDIF();
12149 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12150 } IEM_MC_ENDIF();
12151 IEM_MC_ADVANCE_RIP_AND_FINISH();
12152
12153 IEM_MC_END();
12154}
12155
12156
12157/** Opcode 0xdd !11/0. */
12158FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12159{
12160 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12161 IEM_MC_BEGIN(3, 0, 0, 0);
12162 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12164
12165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12166 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12167 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12168
12169 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12170 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12171 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12172 IEM_MC_END();
12173}
12174
12175
12176/** Opcode 0xdd !11/0. */
12177FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12178{
12179 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12180 IEM_MC_BEGIN(3, 0, 0, 0);
12181 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12183
12184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12185 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12186 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12187
12188 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12189 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12190 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12191 IEM_MC_END();
12192}
12193
12194/** Opcode 0xdd !11/0. */
12195FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12196{
12197 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12198
12199 IEM_MC_BEGIN(0, 2, 0, 0);
12200 IEM_MC_LOCAL(uint16_t, u16Tmp);
12201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12202
12203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12205 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12206
12207 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12208 IEM_MC_FETCH_FSW(u16Tmp);
12209 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12210 IEM_MC_ADVANCE_RIP_AND_FINISH();
12211
12212/** @todo Debug / drop a hint to the verifier that things may differ
12213 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12214 * NT4SP1. (X86_FSW_PE) */
12215 IEM_MC_END();
12216}
12217
12218
12219/** Opcode 0xdd 11/0. */
12220FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12221{
12222 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12223 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12224 unmodified. */
12225 IEM_MC_BEGIN(0, 0, 0, 0);
12226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12227
12228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12230
12231 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12232 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12233 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12234
12235 IEM_MC_ADVANCE_RIP_AND_FINISH();
12236 IEM_MC_END();
12237}
12238
12239
12240/** Opcode 0xdd 11/1. */
12241FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12242{
12243 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12244 IEM_MC_BEGIN(0, 2, 0, 0);
12245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12246 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12247 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12250
12251 IEM_MC_PREPARE_FPU_USAGE();
12252 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12253 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12254 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12255 } IEM_MC_ELSE() {
12256 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12257 } IEM_MC_ENDIF();
12258
12259 IEM_MC_ADVANCE_RIP_AND_FINISH();
12260 IEM_MC_END();
12261}
12262
12263
12264/** Opcode 0xdd 11/3. */
12265FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12266{
12267 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12268 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12269}
12270
12271
12272/** Opcode 0xdd 11/4. */
12273FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12274{
12275 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12276 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12277}
12278
12279
12280/**
12281 * @opcode 0xdd
12282 */
12283FNIEMOP_DEF(iemOp_EscF5)
12284{
12285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12286 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12287 if (IEM_IS_MODRM_REG_MODE(bRm))
12288 {
12289 switch (IEM_GET_MODRM_REG_8(bRm))
12290 {
12291 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12292 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12293 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12294 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12295 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12296 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12297 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12298 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12300 }
12301 }
12302 else
12303 {
12304 switch (IEM_GET_MODRM_REG_8(bRm))
12305 {
12306 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12307 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12308 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12309 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12310 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12311 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12312 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12313 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12315 }
12316 }
12317}
12318
12319
12320/** Opcode 0xde 11/0. */
12321FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12322{
12323 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12324 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12325}
12326
12327
12328/** Opcode 0xde 11/0. */
12329FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12330{
12331 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12332 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12333}
12334
12335
12336/** Opcode 0xde 0xd9. */
12337FNIEMOP_DEF(iemOp_fcompp)
12338{
12339 IEMOP_MNEMONIC(fcompp, "fcompp");
12340 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12341}
12342
12343
12344/** Opcode 0xde 11/4. */
12345FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12346{
12347 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12348 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12349}
12350
12351
12352/** Opcode 0xde 11/5. */
12353FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12354{
12355 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12356 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12357}
12358
12359
12360/** Opcode 0xde 11/6. */
12361FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12362{
12363 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12364 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12365}
12366
12367
12368/** Opcode 0xde 11/7. */
12369FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12370{
12371 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12372 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12373}
12374
12375
12376/**
12377 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12378 * the result in ST0.
12379 *
12380 * @param bRm Mod R/M byte.
12381 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12382 */
12383FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12384{
12385 IEM_MC_BEGIN(3, 3, 0, 0);
12386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12387 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12388 IEM_MC_LOCAL(int16_t, i16Val2);
12389 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12390 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12391 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12392
12393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12395
12396 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12397 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12398 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12399
12400 IEM_MC_PREPARE_FPU_USAGE();
12401 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12402 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12403 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12404 } IEM_MC_ELSE() {
12405 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12406 } IEM_MC_ENDIF();
12407 IEM_MC_ADVANCE_RIP_AND_FINISH();
12408
12409 IEM_MC_END();
12410}
12411
12412
12413/** Opcode 0xde !11/0. */
12414FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12415{
12416 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12417 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12418}
12419
12420
12421/** Opcode 0xde !11/1. */
12422FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12423{
12424 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12425 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12426}
12427
12428
12429/** Opcode 0xde !11/2. */
12430FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12431{
12432 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12433
12434 IEM_MC_BEGIN(3, 3, 0, 0);
12435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12436 IEM_MC_LOCAL(uint16_t, u16Fsw);
12437 IEM_MC_LOCAL(int16_t, i16Val2);
12438 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12439 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12440 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12441
12442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12444
12445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12447 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12448
12449 IEM_MC_PREPARE_FPU_USAGE();
12450 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12451 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12452 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12453 } IEM_MC_ELSE() {
12454 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12455 } IEM_MC_ENDIF();
12456 IEM_MC_ADVANCE_RIP_AND_FINISH();
12457
12458 IEM_MC_END();
12459}
12460
12461
12462/** Opcode 0xde !11/3. */
12463FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12464{
12465 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12466
12467 IEM_MC_BEGIN(3, 3, 0, 0);
12468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12469 IEM_MC_LOCAL(uint16_t, u16Fsw);
12470 IEM_MC_LOCAL(int16_t, i16Val2);
12471 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12472 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12473 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12474
12475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12477
12478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12480 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12481
12482 IEM_MC_PREPARE_FPU_USAGE();
12483 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12484 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12485 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12486 } IEM_MC_ELSE() {
12487 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12488 } IEM_MC_ENDIF();
12489 IEM_MC_ADVANCE_RIP_AND_FINISH();
12490
12491 IEM_MC_END();
12492}
12493
12494
12495/** Opcode 0xde !11/4. */
12496FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12497{
12498 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12499 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12500}
12501
12502
12503/** Opcode 0xde !11/5. */
12504FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12505{
12506 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12507 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12508}
12509
12510
12511/** Opcode 0xde !11/6. */
12512FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12513{
12514 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12515 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12516}
12517
12518
12519/** Opcode 0xde !11/7. */
12520FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12521{
12522 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12523 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12524}
12525
12526
12527/**
12528 * @opcode 0xde
12529 */
12530FNIEMOP_DEF(iemOp_EscF6)
12531{
12532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12533 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12534 if (IEM_IS_MODRM_REG_MODE(bRm))
12535 {
12536 switch (IEM_GET_MODRM_REG_8(bRm))
12537 {
12538 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12539 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12540 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12541 case 3: if (bRm == 0xd9)
12542 return FNIEMOP_CALL(iemOp_fcompp);
12543 IEMOP_RAISE_INVALID_OPCODE_RET();
12544 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12545 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12546 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12547 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12549 }
12550 }
12551 else
12552 {
12553 switch (IEM_GET_MODRM_REG_8(bRm))
12554 {
12555 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12556 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12557 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12558 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12559 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12560 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12561 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12562 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12564 }
12565 }
12566}
12567
12568
12569/** Opcode 0xdf 11/0.
12570 * Undocument instruction, assumed to work like ffree + fincstp. */
12571FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12572{
12573 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12574 IEM_MC_BEGIN(0, 0, 0, 0);
12575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12576
12577 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12578 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12579
12580 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12581 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12582 IEM_MC_FPU_STACK_INC_TOP();
12583 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12584
12585 IEM_MC_ADVANCE_RIP_AND_FINISH();
12586 IEM_MC_END();
12587}
12588
12589
12590/** Opcode 0xdf 0xe0. */
12591FNIEMOP_DEF(iemOp_fnstsw_ax)
12592{
12593 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12594 IEM_MC_BEGIN(0, 1, 0, 0);
12595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12596 IEM_MC_LOCAL(uint16_t, u16Tmp);
12597 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12598 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12599 IEM_MC_FETCH_FSW(u16Tmp);
12600 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12601 IEM_MC_ADVANCE_RIP_AND_FINISH();
12602 IEM_MC_END();
12603}
12604
12605
12606/** Opcode 0xdf 11/5. */
12607FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12608{
12609 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12610 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12611 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12612 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12613}
12614
12615
12616/** Opcode 0xdf 11/6. */
12617FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12618{
12619 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12620 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12621 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12622 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12623}
12624
12625
12626/** Opcode 0xdf !11/0. */
12627FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12628{
12629 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12630
12631 IEM_MC_BEGIN(2, 3, 0, 0);
12632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12633 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12634 IEM_MC_LOCAL(int16_t, i16Val);
12635 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12636 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12637
12638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12640
12641 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12642 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12643 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12644
12645 IEM_MC_PREPARE_FPU_USAGE();
12646 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12647 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12648 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12649 } IEM_MC_ELSE() {
12650 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12651 } IEM_MC_ENDIF();
12652 IEM_MC_ADVANCE_RIP_AND_FINISH();
12653
12654 IEM_MC_END();
12655}
12656
12657
12658/** Opcode 0xdf !11/1. */
12659FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12660{
12661 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12662 IEM_MC_BEGIN(3, 3, 0, 0);
12663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12665
12666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12669 IEM_MC_PREPARE_FPU_USAGE();
12670
12671 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12672 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12673 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12674
12675 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12676 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12677 IEM_MC_LOCAL(uint16_t, u16Fsw);
12678 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12679 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12680 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12681 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12682 } IEM_MC_ELSE() {
12683 IEM_MC_IF_FCW_IM() {
12684 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12685 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12686 } IEM_MC_ELSE() {
12687 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12688 } IEM_MC_ENDIF();
12689 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12690 } IEM_MC_ENDIF();
12691 IEM_MC_ADVANCE_RIP_AND_FINISH();
12692
12693 IEM_MC_END();
12694}
12695
12696
12697/** Opcode 0xdf !11/2. */
12698FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12699{
12700 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12701 IEM_MC_BEGIN(3, 3, 0, 0);
12702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12704
12705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12708 IEM_MC_PREPARE_FPU_USAGE();
12709
12710 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12711 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12712 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12713
12714 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12715 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12716 IEM_MC_LOCAL(uint16_t, u16Fsw);
12717 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12718 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12719 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12720 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12721 } IEM_MC_ELSE() {
12722 IEM_MC_IF_FCW_IM() {
12723 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12724 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12725 } IEM_MC_ELSE() {
12726 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12727 } IEM_MC_ENDIF();
12728 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12729 } IEM_MC_ENDIF();
12730 IEM_MC_ADVANCE_RIP_AND_FINISH();
12731
12732 IEM_MC_END();
12733}
12734
12735
12736/** Opcode 0xdf !11/3. */
12737FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12738{
12739 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12740 IEM_MC_BEGIN(3, 3, 0, 0);
12741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12743
12744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12745 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12746 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12747 IEM_MC_PREPARE_FPU_USAGE();
12748
12749 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12750 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12751 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12752
12753 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12754 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12755 IEM_MC_LOCAL(uint16_t, u16Fsw);
12756 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12757 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12758 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12759 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12760 } IEM_MC_ELSE() {
12761 IEM_MC_IF_FCW_IM() {
12762 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12763 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12764 } IEM_MC_ELSE() {
12765 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12766 } IEM_MC_ENDIF();
12767 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12768 } IEM_MC_ENDIF();
12769 IEM_MC_ADVANCE_RIP_AND_FINISH();
12770
12771 IEM_MC_END();
12772}
12773
12774
12775/** Opcode 0xdf !11/4. */
12776FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12777{
12778 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12779
12780 IEM_MC_BEGIN(2, 3, 0, 0);
12781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12782 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12783 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12784 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12785 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12786
12787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12789
12790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12792 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12793
12794 IEM_MC_PREPARE_FPU_USAGE();
12795 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12796 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12797 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12798 } IEM_MC_ELSE() {
12799 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12800 } IEM_MC_ENDIF();
12801 IEM_MC_ADVANCE_RIP_AND_FINISH();
12802
12803 IEM_MC_END();
12804}
12805
12806
12807/** Opcode 0xdf !11/5. */
12808FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12809{
12810 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12811
12812 IEM_MC_BEGIN(2, 3, 0, 0);
12813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12814 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12815 IEM_MC_LOCAL(int64_t, i64Val);
12816 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12817 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12818
12819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12821
12822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12824 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12825
12826 IEM_MC_PREPARE_FPU_USAGE();
12827 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12828 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12829 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12830 } IEM_MC_ELSE() {
12831 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12832 } IEM_MC_ENDIF();
12833 IEM_MC_ADVANCE_RIP_AND_FINISH();
12834
12835 IEM_MC_END();
12836}
12837
12838
12839/** Opcode 0xdf !11/6. */
12840FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12841{
12842 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12843 IEM_MC_BEGIN(3, 3, 0, 0);
12844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12846
12847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12850 IEM_MC_PREPARE_FPU_USAGE();
12851
12852 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12853 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12854 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12855
12856 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12857 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12858 IEM_MC_LOCAL(uint16_t, u16Fsw);
12859 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12860 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12861 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12862 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12863 } IEM_MC_ELSE() {
12864 IEM_MC_IF_FCW_IM() {
12865 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12866 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12867 } IEM_MC_ELSE() {
12868 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12869 } IEM_MC_ENDIF();
12870 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12871 } IEM_MC_ENDIF();
12872 IEM_MC_ADVANCE_RIP_AND_FINISH();
12873
12874 IEM_MC_END();
12875}
12876
12877
12878/** Opcode 0xdf !11/7. */
12879FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12880{
12881 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12882 IEM_MC_BEGIN(3, 3, 0, 0);
12883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12885
12886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12888 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12889 IEM_MC_PREPARE_FPU_USAGE();
12890
12891 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12892 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12893 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12894
12895 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12896 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12897 IEM_MC_LOCAL(uint16_t, u16Fsw);
12898 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12899 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12900 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12901 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12902 } IEM_MC_ELSE() {
12903 IEM_MC_IF_FCW_IM() {
12904 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12905 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12906 } IEM_MC_ELSE() {
12907 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12908 } IEM_MC_ENDIF();
12909 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12910 } IEM_MC_ENDIF();
12911 IEM_MC_ADVANCE_RIP_AND_FINISH();
12912
12913 IEM_MC_END();
12914}
12915
12916
12917/**
12918 * @opcode 0xdf
12919 */
12920FNIEMOP_DEF(iemOp_EscF7)
12921{
12922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12923 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12924 if (IEM_IS_MODRM_REG_MODE(bRm))
12925 {
12926 switch (IEM_GET_MODRM_REG_8(bRm))
12927 {
12928 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12929 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12930 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12931 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12932 case 4: if (bRm == 0xe0)
12933 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12934 IEMOP_RAISE_INVALID_OPCODE_RET();
12935 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12936 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12937 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12939 }
12940 }
12941 else
12942 {
12943 switch (IEM_GET_MODRM_REG_8(bRm))
12944 {
12945 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12946 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12947 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12948 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12949 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12950 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12951 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12952 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12954 }
12955 }
12956}
12957
12958
12959/**
12960 * @opcode 0xe0
12961 * @opfltest zf
12962 */
12963FNIEMOP_DEF(iemOp_loopne_Jb)
12964{
12965 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12966 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12967 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12968
12969 switch (pVCpu->iem.s.enmEffAddrMode)
12970 {
12971 case IEMMODE_16BIT:
12972 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12974 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12975 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12976 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12977 } IEM_MC_ELSE() {
12978 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12979 IEM_MC_ADVANCE_RIP_AND_FINISH();
12980 } IEM_MC_ENDIF();
12981 IEM_MC_END();
12982 break;
12983
12984 case IEMMODE_32BIT:
12985 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12987 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12988 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12989 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12990 } IEM_MC_ELSE() {
12991 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12992 IEM_MC_ADVANCE_RIP_AND_FINISH();
12993 } IEM_MC_ENDIF();
12994 IEM_MC_END();
12995 break;
12996
12997 case IEMMODE_64BIT:
12998 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13000 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13001 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13002 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13003 } IEM_MC_ELSE() {
13004 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13005 IEM_MC_ADVANCE_RIP_AND_FINISH();
13006 } IEM_MC_ENDIF();
13007 IEM_MC_END();
13008 break;
13009
13010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13011 }
13012}
13013
13014
13015/**
13016 * @opcode 0xe1
13017 * @opfltest zf
13018 */
13019FNIEMOP_DEF(iemOp_loope_Jb)
13020{
13021 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13022 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13024
13025 switch (pVCpu->iem.s.enmEffAddrMode)
13026 {
13027 case IEMMODE_16BIT:
13028 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13030 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13031 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13032 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13033 } IEM_MC_ELSE() {
13034 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13035 IEM_MC_ADVANCE_RIP_AND_FINISH();
13036 } IEM_MC_ENDIF();
13037 IEM_MC_END();
13038 break;
13039
13040 case IEMMODE_32BIT:
13041 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13043 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13044 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13045 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13046 } IEM_MC_ELSE() {
13047 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13048 IEM_MC_ADVANCE_RIP_AND_FINISH();
13049 } IEM_MC_ENDIF();
13050 IEM_MC_END();
13051 break;
13052
13053 case IEMMODE_64BIT:
13054 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13056 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13057 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13058 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13059 } IEM_MC_ELSE() {
13060 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13061 IEM_MC_ADVANCE_RIP_AND_FINISH();
13062 } IEM_MC_ENDIF();
13063 IEM_MC_END();
13064 break;
13065
13066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13067 }
13068}
13069
13070
13071/**
13072 * @opcode 0xe2
13073 */
13074FNIEMOP_DEF(iemOp_loop_Jb)
13075{
13076 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13077 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13078 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13079
13080 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13081 * using the 32-bit operand size override. How can that be restarted? See
13082 * weird pseudo code in intel manual. */
13083
13084 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13085 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13086 * the loop causes guest crashes, but when logging it's nice to skip a few million
13087 * lines of useless output. */
13088#if defined(LOG_ENABLED)
13089 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13090 switch (pVCpu->iem.s.enmEffAddrMode)
13091 {
13092 case IEMMODE_16BIT:
13093 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13095 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13096 IEM_MC_ADVANCE_RIP_AND_FINISH();
13097 IEM_MC_END();
13098 break;
13099
13100 case IEMMODE_32BIT:
13101 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13103 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13104 IEM_MC_ADVANCE_RIP_AND_FINISH();
13105 IEM_MC_END();
13106 break;
13107
13108 case IEMMODE_64BIT:
13109 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13111 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13112 IEM_MC_ADVANCE_RIP_AND_FINISH();
13113 IEM_MC_END();
13114 break;
13115
13116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13117 }
13118#endif
13119
13120 switch (pVCpu->iem.s.enmEffAddrMode)
13121 {
13122 case IEMMODE_16BIT:
13123 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13125 IEM_MC_IF_CX_IS_NOT_ONE() {
13126 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13127 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13128 } IEM_MC_ELSE() {
13129 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13130 IEM_MC_ADVANCE_RIP_AND_FINISH();
13131 } IEM_MC_ENDIF();
13132 IEM_MC_END();
13133 break;
13134
13135 case IEMMODE_32BIT:
13136 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13138 IEM_MC_IF_ECX_IS_NOT_ONE() {
13139 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13140 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13141 } IEM_MC_ELSE() {
13142 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13143 IEM_MC_ADVANCE_RIP_AND_FINISH();
13144 } IEM_MC_ENDIF();
13145 IEM_MC_END();
13146 break;
13147
13148 case IEMMODE_64BIT:
13149 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13151 IEM_MC_IF_RCX_IS_NOT_ONE() {
13152 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13153 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13154 } IEM_MC_ELSE() {
13155 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13156 IEM_MC_ADVANCE_RIP_AND_FINISH();
13157 } IEM_MC_ENDIF();
13158 IEM_MC_END();
13159 break;
13160
13161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13162 }
13163}
13164
13165
13166/**
13167 * @opcode 0xe3
13168 */
13169FNIEMOP_DEF(iemOp_jecxz_Jb)
13170{
13171 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13172 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13174
13175 switch (pVCpu->iem.s.enmEffAddrMode)
13176 {
13177 case IEMMODE_16BIT:
13178 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13180 IEM_MC_IF_CX_IS_NZ() {
13181 IEM_MC_ADVANCE_RIP_AND_FINISH();
13182 } IEM_MC_ELSE() {
13183 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13184 } IEM_MC_ENDIF();
13185 IEM_MC_END();
13186 break;
13187
13188 case IEMMODE_32BIT:
13189 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13191 IEM_MC_IF_ECX_IS_NZ() {
13192 IEM_MC_ADVANCE_RIP_AND_FINISH();
13193 } IEM_MC_ELSE() {
13194 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13195 } IEM_MC_ENDIF();
13196 IEM_MC_END();
13197 break;
13198
13199 case IEMMODE_64BIT:
13200 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13202 IEM_MC_IF_RCX_IS_NZ() {
13203 IEM_MC_ADVANCE_RIP_AND_FINISH();
13204 } IEM_MC_ELSE() {
13205 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13206 } IEM_MC_ENDIF();
13207 IEM_MC_END();
13208 break;
13209
13210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13211 }
13212}
13213
13214
13215/**
13216 * @opcode 0xe4
13217 * @opfltest iopl
13218 */
13219FNIEMOP_DEF(iemOp_in_AL_Ib)
13220{
13221 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13222 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13224 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13225 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13226}
13227
13228
13229/**
13230 * @opcode 0xe5
13231 * @opfltest iopl
13232 */
13233FNIEMOP_DEF(iemOp_in_eAX_Ib)
13234{
13235 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13236 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13238 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13239 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13240 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13241}
13242
13243
13244/**
13245 * @opcode 0xe6
13246 * @opfltest iopl
13247 */
13248FNIEMOP_DEF(iemOp_out_Ib_AL)
13249{
13250 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13251 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13253 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13254 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13255}
13256
13257
13258/**
13259 * @opcode 0xe7
13260 * @opfltest iopl
13261 */
13262FNIEMOP_DEF(iemOp_out_Ib_eAX)
13263{
13264 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13265 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13267 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13268 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13269 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13270}
13271
13272
13273/**
13274 * @opcode 0xe8
13275 */
13276FNIEMOP_DEF(iemOp_call_Jv)
13277{
13278 IEMOP_MNEMONIC(call_Jv, "call Jv");
13279 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13280 switch (pVCpu->iem.s.enmEffOpSize)
13281 {
13282 case IEMMODE_16BIT:
13283 {
13284 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13285 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13286 iemCImpl_call_rel_16, (int16_t)u16Imm);
13287 }
13288
13289 case IEMMODE_32BIT:
13290 {
13291 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13292 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13293 iemCImpl_call_rel_32, (int32_t)u32Imm);
13294 }
13295
13296 case IEMMODE_64BIT:
13297 {
13298 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13299 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13300 iemCImpl_call_rel_64, u64Imm);
13301 }
13302
13303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13304 }
13305}
13306
13307
13308/**
13309 * @opcode 0xe9
13310 */
13311FNIEMOP_DEF(iemOp_jmp_Jv)
13312{
13313 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13314 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13315 switch (pVCpu->iem.s.enmEffOpSize)
13316 {
13317 case IEMMODE_16BIT:
13318 IEM_MC_BEGIN(0, 0, 0, 0);
13319 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13321 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13322 IEM_MC_END();
13323 break;
13324
13325 case IEMMODE_64BIT:
13326 case IEMMODE_32BIT:
13327 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13328 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13330 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13331 IEM_MC_END();
13332 break;
13333
13334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13335 }
13336}
13337
13338
13339/**
13340 * @opcode 0xea
13341 */
13342FNIEMOP_DEF(iemOp_jmp_Ap)
13343{
13344 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13345 IEMOP_HLP_NO_64BIT();
13346
13347 /* Decode the far pointer address and pass it on to the far call C implementation. */
13348 uint32_t off32Seg;
13349 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13350 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13351 else
13352 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13353 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13355 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13356 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13357 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13358 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13359}
13360
13361
13362/**
13363 * @opcode 0xeb
13364 */
13365FNIEMOP_DEF(iemOp_jmp_Jb)
13366{
13367 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13368 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13369 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13370
13371 IEM_MC_BEGIN(0, 0, 0, 0);
13372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13373 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13374 IEM_MC_END();
13375}
13376
13377
13378/**
13379 * @opcode 0xec
13380 * @opfltest iopl
13381 */
13382FNIEMOP_DEF(iemOp_in_AL_DX)
13383{
13384 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13386 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13387 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13388 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13389}
13390
13391
13392/**
13393 * @opcode 0xed
13394 * @opfltest iopl
13395 */
13396FNIEMOP_DEF(iemOp_in_eAX_DX)
13397{
13398 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13400 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13401 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13402 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13403 pVCpu->iem.s.enmEffAddrMode);
13404}
13405
13406
13407/**
13408 * @opcode 0xee
13409 * @opfltest iopl
13410 */
13411FNIEMOP_DEF(iemOp_out_DX_AL)
13412{
13413 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13415 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13416 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13417}
13418
13419
13420/**
13421 * @opcode 0xef
13422 * @opfltest iopl
13423 */
13424FNIEMOP_DEF(iemOp_out_DX_eAX)
13425{
13426 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13428 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13429 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13430 pVCpu->iem.s.enmEffAddrMode);
13431}
13432
13433
13434/**
13435 * @opcode 0xf0
13436 */
13437FNIEMOP_DEF(iemOp_lock)
13438{
13439 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13440 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13441
13442 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13443 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13444}
13445
13446
13447/**
13448 * @opcode 0xf1
13449 */
13450FNIEMOP_DEF(iemOp_int1)
13451{
13452 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13453 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13454 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13455 * LOADALL memo. Needs some testing. */
13456 IEMOP_HLP_MIN_386();
13457 /** @todo testcase! */
13458 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13459 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13460 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13461}
13462
13463
13464/**
13465 * @opcode 0xf2
13466 */
13467FNIEMOP_DEF(iemOp_repne)
13468{
13469 /* This overrides any previous REPE prefix. */
13470 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13471 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13472 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13473
13474 /* For the 4 entry opcode tables, REPNZ overrides any previous
13475 REPZ and operand size prefixes. */
13476 pVCpu->iem.s.idxPrefix = 3;
13477
13478 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13479 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13480}
13481
13482
13483/**
13484 * @opcode 0xf3
13485 */
13486FNIEMOP_DEF(iemOp_repe)
13487{
13488 /* This overrides any previous REPNE prefix. */
13489 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13492
13493 /* For the 4 entry opcode tables, REPNZ overrides any previous
13494 REPNZ and operand size prefixes. */
13495 pVCpu->iem.s.idxPrefix = 2;
13496
13497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13499}
13500
13501
13502/**
13503 * @opcode 0xf4
13504 */
13505FNIEMOP_DEF(iemOp_hlt)
13506{
13507 IEMOP_MNEMONIC(hlt, "hlt");
13508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13509 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13510}
13511
13512
13513/**
13514 * @opcode 0xf5
13515 * @opflmodify cf
13516 */
13517FNIEMOP_DEF(iemOp_cmc)
13518{
13519 IEMOP_MNEMONIC(cmc, "cmc");
13520 IEM_MC_BEGIN(0, 0, 0, 0);
13521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13522 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13523 IEM_MC_ADVANCE_RIP_AND_FINISH();
13524 IEM_MC_END();
13525}
13526
13527
13528/**
13529 * Body for of 'inc/dec/not/neg Eb'.
13530 */
13531#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13532 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13533 { \
13534 /* register access */ \
13535 IEM_MC_BEGIN(2, 0, 0, 0); \
13536 IEMOP_HLP_DONE_DECODING(); \
13537 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13538 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13539 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13540 IEM_MC_REF_EFLAGS(pEFlags); \
13541 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13542 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13543 IEM_MC_END(); \
13544 } \
13545 else \
13546 { \
13547 /* memory access. */ \
13548 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13549 { \
13550 IEM_MC_BEGIN(2, 2, 0, 0); \
13551 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13552 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13554 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13555 \
13556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13557 IEMOP_HLP_DONE_DECODING(); \
13558 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13559 IEM_MC_FETCH_EFLAGS(EFlags); \
13560 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13561 \
13562 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13563 IEM_MC_COMMIT_EFLAGS(EFlags); \
13564 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13565 IEM_MC_END(); \
13566 } \
13567 else \
13568 { \
13569 IEM_MC_BEGIN(2, 2, 0, 0); \
13570 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13571 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13573 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13574 \
13575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13576 IEMOP_HLP_DONE_DECODING(); \
13577 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13578 IEM_MC_FETCH_EFLAGS(EFlags); \
13579 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13580 \
13581 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13582 IEM_MC_COMMIT_EFLAGS(EFlags); \
13583 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13584 IEM_MC_END(); \
13585 } \
13586 } \
13587 (void)0
13588
13589
13590/**
13591 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13592 */
13593#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13594 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13595 { \
13596 /* \
13597 * Register target \
13598 */ \
13599 switch (pVCpu->iem.s.enmEffOpSize) \
13600 { \
13601 case IEMMODE_16BIT: \
13602 IEM_MC_BEGIN(2, 0, 0, 0); \
13603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13604 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13605 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13606 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13607 IEM_MC_REF_EFLAGS(pEFlags); \
13608 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13609 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13610 IEM_MC_END(); \
13611 break; \
13612 \
13613 case IEMMODE_32BIT: \
13614 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13616 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13617 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13618 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13619 IEM_MC_REF_EFLAGS(pEFlags); \
13620 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13621 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13622 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13623 IEM_MC_END(); \
13624 break; \
13625 \
13626 case IEMMODE_64BIT: \
13627 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13629 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13630 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13631 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13632 IEM_MC_REF_EFLAGS(pEFlags); \
13633 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13634 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13635 IEM_MC_END(); \
13636 break; \
13637 \
13638 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13639 } \
13640 } \
13641 else \
13642 { \
13643 /* \
13644 * Memory target. \
13645 */ \
13646 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13647 { \
13648 switch (pVCpu->iem.s.enmEffOpSize) \
13649 { \
13650 case IEMMODE_16BIT: \
13651 IEM_MC_BEGIN(2, 3, 0, 0); \
13652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13653 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13655 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13656 \
13657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13658 IEMOP_HLP_DONE_DECODING(); \
13659 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13660 IEM_MC_FETCH_EFLAGS(EFlags); \
13661 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13662 \
13663 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13664 IEM_MC_COMMIT_EFLAGS(EFlags); \
13665 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13666 IEM_MC_END(); \
13667 break; \
13668 \
13669 case IEMMODE_32BIT: \
13670 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13671 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13672 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13674 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13675 \
13676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13677 IEMOP_HLP_DONE_DECODING(); \
13678 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13679 IEM_MC_FETCH_EFLAGS(EFlags); \
13680 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13681 \
13682 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13683 IEM_MC_COMMIT_EFLAGS(EFlags); \
13684 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13685 IEM_MC_END(); \
13686 break; \
13687 \
13688 case IEMMODE_64BIT: \
13689 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13693 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13694 \
13695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13696 IEMOP_HLP_DONE_DECODING(); \
13697 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13698 IEM_MC_FETCH_EFLAGS(EFlags); \
13699 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13700 \
13701 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13702 IEM_MC_COMMIT_EFLAGS(EFlags); \
13703 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13704 IEM_MC_END(); \
13705 break; \
13706 \
13707 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13708 } \
13709 } \
13710 else \
13711 { \
13712 (void)0
13713
13714#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13715 switch (pVCpu->iem.s.enmEffOpSize) \
13716 { \
13717 case IEMMODE_16BIT: \
13718 IEM_MC_BEGIN(2, 3, 0, 0); \
13719 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13722 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13723 \
13724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13725 IEMOP_HLP_DONE_DECODING(); \
13726 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13727 IEM_MC_FETCH_EFLAGS(EFlags); \
13728 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13729 \
13730 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13731 IEM_MC_COMMIT_EFLAGS(EFlags); \
13732 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13733 IEM_MC_END(); \
13734 break; \
13735 \
13736 case IEMMODE_32BIT: \
13737 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13738 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13739 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13741 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13742 \
13743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13744 IEMOP_HLP_DONE_DECODING(); \
13745 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13746 IEM_MC_FETCH_EFLAGS(EFlags); \
13747 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13748 \
13749 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13750 IEM_MC_COMMIT_EFLAGS(EFlags); \
13751 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13752 IEM_MC_END(); \
13753 break; \
13754 \
13755 case IEMMODE_64BIT: \
13756 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13757 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13758 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13760 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13761 \
13762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13763 IEMOP_HLP_DONE_DECODING(); \
13764 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13765 IEM_MC_FETCH_EFLAGS(EFlags); \
13766 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13767 \
13768 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13769 IEM_MC_COMMIT_EFLAGS(EFlags); \
13770 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13771 IEM_MC_END(); \
13772 break; \
13773 \
13774 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13775 } \
13776 } \
13777 } \
13778 (void)0
13779
13780
13781/**
13782 * @opmaps grp3_f6
13783 * @opcode /0
13784 * @opflclass logical
13785 * @todo also /1
13786 */
13787FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13788{
13789 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13790 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13791
13792 if (IEM_IS_MODRM_REG_MODE(bRm))
13793 {
13794 /* register access */
13795 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13796 IEM_MC_BEGIN(3, 0, 0, 0);
13797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13798 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13799 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13800 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13801 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13802 IEM_MC_REF_EFLAGS(pEFlags);
13803 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13804 IEM_MC_ADVANCE_RIP_AND_FINISH();
13805 IEM_MC_END();
13806 }
13807 else
13808 {
13809 /* memory access. */
13810 IEM_MC_BEGIN(3, 3, 0, 0);
13811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13813
13814 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13816
13817 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13818 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13819 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13820
13821 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13822 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13823 IEM_MC_FETCH_EFLAGS(EFlags);
13824 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13825
13826 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13827 IEM_MC_COMMIT_EFLAGS(EFlags);
13828 IEM_MC_ADVANCE_RIP_AND_FINISH();
13829 IEM_MC_END();
13830 }
13831}
13832
13833
13834/** Opcode 0xf6 /4, /5, /6 and /7. */
13835FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13836{
13837 if (IEM_IS_MODRM_REG_MODE(bRm))
13838 {
13839 /* register access */
13840 IEM_MC_BEGIN(3, 1, 0, 0);
13841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13842 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13843 IEM_MC_ARG(uint8_t, u8Value, 1);
13844 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13845 IEM_MC_LOCAL(int32_t, rc);
13846
13847 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13848 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13849 IEM_MC_REF_EFLAGS(pEFlags);
13850 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13851 IEM_MC_IF_LOCAL_IS_Z(rc) {
13852 IEM_MC_ADVANCE_RIP_AND_FINISH();
13853 } IEM_MC_ELSE() {
13854 IEM_MC_RAISE_DIVIDE_ERROR();
13855 } IEM_MC_ENDIF();
13856
13857 IEM_MC_END();
13858 }
13859 else
13860 {
13861 /* memory access. */
13862 IEM_MC_BEGIN(3, 2, 0, 0);
13863 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13864 IEM_MC_ARG(uint8_t, u8Value, 1);
13865 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13867 IEM_MC_LOCAL(int32_t, rc);
13868
13869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13871 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13872 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13873 IEM_MC_REF_EFLAGS(pEFlags);
13874 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13875 IEM_MC_IF_LOCAL_IS_Z(rc) {
13876 IEM_MC_ADVANCE_RIP_AND_FINISH();
13877 } IEM_MC_ELSE() {
13878 IEM_MC_RAISE_DIVIDE_ERROR();
13879 } IEM_MC_ENDIF();
13880
13881 IEM_MC_END();
13882 }
13883}
13884
13885
13886/** Opcode 0xf7 /4, /5, /6 and /7. */
13887FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13888{
13889 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13890
13891 if (IEM_IS_MODRM_REG_MODE(bRm))
13892 {
13893 /* register access */
13894 switch (pVCpu->iem.s.enmEffOpSize)
13895 {
13896 case IEMMODE_16BIT:
13897 IEM_MC_BEGIN(4, 1, 0, 0);
13898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13899 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13900 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13901 IEM_MC_ARG(uint16_t, u16Value, 2);
13902 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13903 IEM_MC_LOCAL(int32_t, rc);
13904
13905 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13906 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13907 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13908 IEM_MC_REF_EFLAGS(pEFlags);
13909 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13910 IEM_MC_IF_LOCAL_IS_Z(rc) {
13911 IEM_MC_ADVANCE_RIP_AND_FINISH();
13912 } IEM_MC_ELSE() {
13913 IEM_MC_RAISE_DIVIDE_ERROR();
13914 } IEM_MC_ENDIF();
13915
13916 IEM_MC_END();
13917 break;
13918
13919 case IEMMODE_32BIT:
13920 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13922 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13923 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13924 IEM_MC_ARG(uint32_t, u32Value, 2);
13925 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13926 IEM_MC_LOCAL(int32_t, rc);
13927
13928 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13929 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13930 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13931 IEM_MC_REF_EFLAGS(pEFlags);
13932 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13933 IEM_MC_IF_LOCAL_IS_Z(rc) {
13934 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13935 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13936 IEM_MC_ADVANCE_RIP_AND_FINISH();
13937 } IEM_MC_ELSE() {
13938 IEM_MC_RAISE_DIVIDE_ERROR();
13939 } IEM_MC_ENDIF();
13940
13941 IEM_MC_END();
13942 break;
13943
13944 case IEMMODE_64BIT:
13945 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13947 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13948 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13949 IEM_MC_ARG(uint64_t, u64Value, 2);
13950 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13951 IEM_MC_LOCAL(int32_t, rc);
13952
13953 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13954 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13955 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13956 IEM_MC_REF_EFLAGS(pEFlags);
13957 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13958 IEM_MC_IF_LOCAL_IS_Z(rc) {
13959 IEM_MC_ADVANCE_RIP_AND_FINISH();
13960 } IEM_MC_ELSE() {
13961 IEM_MC_RAISE_DIVIDE_ERROR();
13962 } IEM_MC_ENDIF();
13963
13964 IEM_MC_END();
13965 break;
13966
13967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13968 }
13969 }
13970 else
13971 {
13972 /* memory access. */
13973 switch (pVCpu->iem.s.enmEffOpSize)
13974 {
13975 case IEMMODE_16BIT:
13976 IEM_MC_BEGIN(4, 2, 0, 0);
13977 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13978 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13979 IEM_MC_ARG(uint16_t, u16Value, 2);
13980 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13982 IEM_MC_LOCAL(int32_t, rc);
13983
13984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13986 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13987 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13988 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13989 IEM_MC_REF_EFLAGS(pEFlags);
13990 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13991 IEM_MC_IF_LOCAL_IS_Z(rc) {
13992 IEM_MC_ADVANCE_RIP_AND_FINISH();
13993 } IEM_MC_ELSE() {
13994 IEM_MC_RAISE_DIVIDE_ERROR();
13995 } IEM_MC_ENDIF();
13996
13997 IEM_MC_END();
13998 break;
13999
14000 case IEMMODE_32BIT:
14001 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
14002 IEM_MC_ARG(uint32_t *, pu32AX, 0);
14003 IEM_MC_ARG(uint32_t *, pu32DX, 1);
14004 IEM_MC_ARG(uint32_t, u32Value, 2);
14005 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14007 IEM_MC_LOCAL(int32_t, rc);
14008
14009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14011 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14012 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
14013 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
14014 IEM_MC_REF_EFLAGS(pEFlags);
14015 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
14016 IEM_MC_IF_LOCAL_IS_Z(rc) {
14017 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
14018 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
14019 IEM_MC_ADVANCE_RIP_AND_FINISH();
14020 } IEM_MC_ELSE() {
14021 IEM_MC_RAISE_DIVIDE_ERROR();
14022 } IEM_MC_ENDIF();
14023
14024 IEM_MC_END();
14025 break;
14026
14027 case IEMMODE_64BIT:
14028 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
14029 IEM_MC_ARG(uint64_t *, pu64AX, 0);
14030 IEM_MC_ARG(uint64_t *, pu64DX, 1);
14031 IEM_MC_ARG(uint64_t, u64Value, 2);
14032 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14034 IEM_MC_LOCAL(int32_t, rc);
14035
14036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14038 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14039 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
14040 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
14041 IEM_MC_REF_EFLAGS(pEFlags);
14042 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
14043 IEM_MC_IF_LOCAL_IS_Z(rc) {
14044 IEM_MC_ADVANCE_RIP_AND_FINISH();
14045 } IEM_MC_ELSE() {
14046 IEM_MC_RAISE_DIVIDE_ERROR();
14047 } IEM_MC_ENDIF();
14048
14049 IEM_MC_END();
14050 break;
14051
14052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14053 }
14054 }
14055}
14056
14057
14058/**
14059 * @opmaps grp3_f6
14060 * @opcode /2
14061 * @opflclass unchanged
14062 */
14063FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14064{
14065/** @todo does not modify EFLAGS. */
14066 IEMOP_MNEMONIC(not_Eb, "not Eb");
14067 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14068}
14069
14070
14071/**
14072 * @opmaps grp3_f6
14073 * @opcode /3
14074 * @opflclass arithmetic
14075 */
14076FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14077{
14078 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14079 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14080}
14081
14082
14083/**
14084 * @opcode 0xf6
14085 */
14086FNIEMOP_DEF(iemOp_Grp3_Eb)
14087{
14088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14089 switch (IEM_GET_MODRM_REG_8(bRm))
14090 {
14091 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14092 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14093 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14094 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14095 case 4:
14096 /**
14097 * @opdone
14098 * @opmaps grp3_f6
14099 * @opcode /4
14100 * @opflclass multiply
14101 */
14102 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14103 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14104 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14105 case 5:
14106 /**
14107 * @opdone
14108 * @opmaps grp3_f6
14109 * @opcode /5
14110 * @opflclass multiply
14111 */
14112 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14113 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14114 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14115 case 6:
14116 /**
14117 * @opdone
14118 * @opmaps grp3_f6
14119 * @opcode /6
14120 * @opflclass division
14121 */
14122 IEMOP_MNEMONIC(div_Eb, "div Eb");
14123 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14124 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14125 case 7:
14126 /**
14127 * @opdone
14128 * @opmaps grp3_f6
14129 * @opcode /7
14130 * @opflclass division
14131 */
14132 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14133 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14134 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14136 }
14137}
14138
14139
14140/**
14141 * @opmaps grp3_f7
14142 * @opcode /0
14143 * @opflclass logical
14144 */
14145FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14146{
14147 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14148 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14149
14150 if (IEM_IS_MODRM_REG_MODE(bRm))
14151 {
14152 /* register access */
14153 switch (pVCpu->iem.s.enmEffOpSize)
14154 {
14155 case IEMMODE_16BIT:
14156 IEM_MC_BEGIN(3, 0, 0, 0);
14157 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14159 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14160 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14161 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14162 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14163 IEM_MC_REF_EFLAGS(pEFlags);
14164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14165 IEM_MC_ADVANCE_RIP_AND_FINISH();
14166 IEM_MC_END();
14167 break;
14168
14169 case IEMMODE_32BIT:
14170 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14171 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14173 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14174 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14175 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14176 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14177 IEM_MC_REF_EFLAGS(pEFlags);
14178 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14179 /* No clearing the high dword here - test doesn't write back the result. */
14180 IEM_MC_ADVANCE_RIP_AND_FINISH();
14181 IEM_MC_END();
14182 break;
14183
14184 case IEMMODE_64BIT:
14185 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14186 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14188 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14189 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14191 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14192 IEM_MC_REF_EFLAGS(pEFlags);
14193 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14194 IEM_MC_ADVANCE_RIP_AND_FINISH();
14195 IEM_MC_END();
14196 break;
14197
14198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14199 }
14200 }
14201 else
14202 {
14203 /* memory access. */
14204 switch (pVCpu->iem.s.enmEffOpSize)
14205 {
14206 case IEMMODE_16BIT:
14207 IEM_MC_BEGIN(3, 3, 0, 0);
14208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14210
14211 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14213
14214 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14215 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14216 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14217
14218 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14219 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14220 IEM_MC_FETCH_EFLAGS(EFlags);
14221 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14222
14223 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14224 IEM_MC_COMMIT_EFLAGS(EFlags);
14225 IEM_MC_ADVANCE_RIP_AND_FINISH();
14226 IEM_MC_END();
14227 break;
14228
14229 case IEMMODE_32BIT:
14230 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14233
14234 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14236
14237 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14238 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14239 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14240
14241 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14242 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14243 IEM_MC_FETCH_EFLAGS(EFlags);
14244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14245
14246 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14247 IEM_MC_COMMIT_EFLAGS(EFlags);
14248 IEM_MC_ADVANCE_RIP_AND_FINISH();
14249 IEM_MC_END();
14250 break;
14251
14252 case IEMMODE_64BIT:
14253 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14256
14257 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14259
14260 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14261 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14262 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14263
14264 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14265 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14266 IEM_MC_FETCH_EFLAGS(EFlags);
14267 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14268
14269 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14270 IEM_MC_COMMIT_EFLAGS(EFlags);
14271 IEM_MC_ADVANCE_RIP_AND_FINISH();
14272 IEM_MC_END();
14273 break;
14274
14275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14276 }
14277 }
14278}
14279
14280
14281/**
14282 * @opmaps grp3_f7
14283 * @opcode /2
14284 * @opflclass unchanged
14285 */
14286FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14287{
14288/** @todo does not modify EFLAGS */
14289 IEMOP_MNEMONIC(not_Ev, "not Ev");
14290 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14291 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14292}
14293
14294
14295/**
14296 * @opmaps grp3_f7
14297 * @opcode /3
14298 * @opflclass arithmetic
14299 */
14300FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14301{
14302 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14303 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14304 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14305}
14306
14307
14308/**
14309 * @opcode 0xf7
14310 */
14311FNIEMOP_DEF(iemOp_Grp3_Ev)
14312{
14313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14314 switch (IEM_GET_MODRM_REG_8(bRm))
14315 {
14316 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14317 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14318 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14319 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14320 case 4:
14321 /**
14322 * @opdone
14323 * @opmaps grp3_f7
14324 * @opcode /4
14325 * @opflclass multiply
14326 */
14327 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14329 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14330 case 5:
14331 /**
14332 * @opdone
14333 * @opmaps grp3_f7
14334 * @opcode /5
14335 * @opflclass multiply
14336 */
14337 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14339 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14340 case 6:
14341 /**
14342 * @opdone
14343 * @opmaps grp3_f7
14344 * @opcode /6
14345 * @opflclass division
14346 */
14347 IEMOP_MNEMONIC(div_Ev, "div Ev");
14348 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14349 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14350 case 7:
14351 /**
14352 * @opdone
14353 * @opmaps grp3_f7
14354 * @opcode /7
14355 * @opflclass division
14356 */
14357 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14359 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14361 }
14362}
14363
14364
14365/**
14366 * @opcode 0xf8
14367 * @opflmodify cf
14368 * @opflclear cf
14369 */
14370FNIEMOP_DEF(iemOp_clc)
14371{
14372 IEMOP_MNEMONIC(clc, "clc");
14373 IEM_MC_BEGIN(0, 0, 0, 0);
14374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14375 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14376 IEM_MC_ADVANCE_RIP_AND_FINISH();
14377 IEM_MC_END();
14378}
14379
14380
14381/**
14382 * @opcode 0xf9
14383 * @opflmodify cf
14384 * @opflset cf
14385 */
14386FNIEMOP_DEF(iemOp_stc)
14387{
14388 IEMOP_MNEMONIC(stc, "stc");
14389 IEM_MC_BEGIN(0, 0, 0, 0);
14390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14391 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14392 IEM_MC_ADVANCE_RIP_AND_FINISH();
14393 IEM_MC_END();
14394}
14395
14396
14397/**
14398 * @opcode 0xfa
14399 * @opfltest iopl,vm
14400 * @opflmodify if,vif
14401 */
14402FNIEMOP_DEF(iemOp_cli)
14403{
14404 IEMOP_MNEMONIC(cli, "cli");
14405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14406 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14407}
14408
14409
14410/**
14411 * @opcode 0xfb
14412 * @opfltest iopl,vm
14413 * @opflmodify if,vif
14414 */
14415FNIEMOP_DEF(iemOp_sti)
14416{
14417 IEMOP_MNEMONIC(sti, "sti");
14418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14419 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14420 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14421}
14422
14423
14424/**
14425 * @opcode 0xfc
14426 * @opflmodify df
14427 * @opflclear df
14428 */
14429FNIEMOP_DEF(iemOp_cld)
14430{
14431 IEMOP_MNEMONIC(cld, "cld");
14432 IEM_MC_BEGIN(0, 0, 0, 0);
14433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14434 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14435 IEM_MC_ADVANCE_RIP_AND_FINISH();
14436 IEM_MC_END();
14437}
14438
14439
14440/**
14441 * @opcode 0xfd
14442 * @opflmodify df
14443 * @opflset df
14444 */
14445FNIEMOP_DEF(iemOp_std)
14446{
14447 IEMOP_MNEMONIC(std, "std");
14448 IEM_MC_BEGIN(0, 0, 0, 0);
14449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14450 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14451 IEM_MC_ADVANCE_RIP_AND_FINISH();
14452 IEM_MC_END();
14453}
14454
14455
14456/**
14457 * @opmaps grp4
14458 * @opcode /0
14459 * @opflclass incdec
14460 */
14461FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14462{
14463 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14464 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14465}
14466
14467
14468/**
14469 * @opmaps grp4
14470 * @opcode /1
14471 * @opflclass incdec
14472 */
14473FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14474{
14475 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14476 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14477}
14478
14479
14480/**
14481 * @opcode 0xfe
14482 */
14483FNIEMOP_DEF(iemOp_Grp4)
14484{
14485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14486 switch (IEM_GET_MODRM_REG_8(bRm))
14487 {
14488 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14489 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14490 default:
14491 /** @todo is the eff-addr decoded? */
14492 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14493 IEMOP_RAISE_INVALID_OPCODE_RET();
14494 }
14495}
14496
14497/**
14498 * @opmaps grp5
14499 * @opcode /0
14500 * @opflclass incdec
14501 */
14502FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14503{
14504 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14505 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14506 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14507}
14508
14509
14510/**
14511 * @opmaps grp5
14512 * @opcode /1
14513 * @opflclass incdec
14514 */
14515FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14516{
14517 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14518 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14519 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14520}
14521
14522
14523/**
14524 * Opcode 0xff /2.
14525 * @param bRm The RM byte.
14526 */
14527FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14528{
14529 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14530 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14531
14532 if (IEM_IS_MODRM_REG_MODE(bRm))
14533 {
14534 /* The new RIP is taken from a register. */
14535 switch (pVCpu->iem.s.enmEffOpSize)
14536 {
14537 case IEMMODE_16BIT:
14538 IEM_MC_BEGIN(1, 0, 0, 0);
14539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14540 IEM_MC_ARG(uint16_t, u16Target, 0);
14541 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14542 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14543 IEM_MC_END();
14544 break;
14545
14546 case IEMMODE_32BIT:
14547 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14549 IEM_MC_ARG(uint32_t, u32Target, 0);
14550 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14551 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14552 IEM_MC_END();
14553 break;
14554
14555 case IEMMODE_64BIT:
14556 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14558 IEM_MC_ARG(uint64_t, u64Target, 0);
14559 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14560 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14561 IEM_MC_END();
14562 break;
14563
14564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14565 }
14566 }
14567 else
14568 {
14569 /* The new RIP is taken from a register. */
14570 switch (pVCpu->iem.s.enmEffOpSize)
14571 {
14572 case IEMMODE_16BIT:
14573 IEM_MC_BEGIN(1, 1, 0, 0);
14574 IEM_MC_ARG(uint16_t, u16Target, 0);
14575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14578 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14579 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14580 IEM_MC_END();
14581 break;
14582
14583 case IEMMODE_32BIT:
14584 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14585 IEM_MC_ARG(uint32_t, u32Target, 0);
14586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14589 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14590 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14591 IEM_MC_END();
14592 break;
14593
14594 case IEMMODE_64BIT:
14595 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14596 IEM_MC_ARG(uint64_t, u64Target, 0);
14597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14600 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14601 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14602 IEM_MC_END();
14603 break;
14604
14605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14606 }
14607 }
14608}
14609
14610#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14611 /* Registers? How?? */ \
14612 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14613 { /* likely */ } \
14614 else \
14615 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14616 \
14617 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14618 /** @todo what does VIA do? */ \
14619 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14620 { /* likely */ } \
14621 else \
14622 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14623 \
14624 /* Far pointer loaded from memory. */ \
14625 switch (pVCpu->iem.s.enmEffOpSize) \
14626 { \
14627 case IEMMODE_16BIT: \
14628 IEM_MC_BEGIN(3, 1, 0, 0); \
14629 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14630 IEM_MC_ARG(uint16_t, offSeg, 1); \
14631 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14635 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14636 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14637 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14638 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14639 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14640 IEM_MC_END(); \
14641 break; \
14642 \
14643 case IEMMODE_32BIT: \
14644 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14645 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14646 IEM_MC_ARG(uint32_t, offSeg, 1); \
14647 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14651 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14652 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14653 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14654 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14655 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14656 IEM_MC_END(); \
14657 break; \
14658 \
14659 case IEMMODE_64BIT: \
14660 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14661 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14662 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14663 IEM_MC_ARG(uint64_t, offSeg, 1); \
14664 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14668 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14669 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14670 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14671 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14672 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14673 IEM_MC_END(); \
14674 break; \
14675 \
14676 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14677 } do {} while (0)
14678
14679
14680/**
14681 * Opcode 0xff /3.
14682 * @param bRm The RM byte.
14683 */
14684FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14685{
14686 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14687 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14688}
14689
14690
14691/**
14692 * Opcode 0xff /4.
14693 * @param bRm The RM byte.
14694 */
14695FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14696{
14697 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14698 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14699
14700 if (IEM_IS_MODRM_REG_MODE(bRm))
14701 {
14702 /* The new RIP is taken from a register. */
14703 switch (pVCpu->iem.s.enmEffOpSize)
14704 {
14705 case IEMMODE_16BIT:
14706 IEM_MC_BEGIN(0, 1, 0, 0);
14707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14708 IEM_MC_LOCAL(uint16_t, u16Target);
14709 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14710 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14711 IEM_MC_END();
14712 break;
14713
14714 case IEMMODE_32BIT:
14715 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14717 IEM_MC_LOCAL(uint32_t, u32Target);
14718 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14719 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14720 IEM_MC_END();
14721 break;
14722
14723 case IEMMODE_64BIT:
14724 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14726 IEM_MC_LOCAL(uint64_t, u64Target);
14727 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14728 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14729 IEM_MC_END();
14730 break;
14731
14732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14733 }
14734 }
14735 else
14736 {
14737 /* The new RIP is taken from a memory location. */
14738 switch (pVCpu->iem.s.enmEffOpSize)
14739 {
14740 case IEMMODE_16BIT:
14741 IEM_MC_BEGIN(0, 2, 0, 0);
14742 IEM_MC_LOCAL(uint16_t, u16Target);
14743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14746 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14747 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14748 IEM_MC_END();
14749 break;
14750
14751 case IEMMODE_32BIT:
14752 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14753 IEM_MC_LOCAL(uint32_t, u32Target);
14754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14757 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14758 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14759 IEM_MC_END();
14760 break;
14761
14762 case IEMMODE_64BIT:
14763 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14764 IEM_MC_LOCAL(uint64_t, u64Target);
14765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14768 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14769 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14770 IEM_MC_END();
14771 break;
14772
14773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14774 }
14775 }
14776}
14777
14778
14779/**
14780 * Opcode 0xff /5.
14781 * @param bRm The RM byte.
14782 */
14783FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14784{
14785 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14786 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14787}
14788
14789
14790/**
14791 * Opcode 0xff /6.
14792 * @param bRm The RM byte.
14793 */
14794FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14795{
14796 IEMOP_MNEMONIC(push_Ev, "push Ev");
14797
14798 /* Registers are handled by a common worker. */
14799 if (IEM_IS_MODRM_REG_MODE(bRm))
14800 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14801
14802 /* Memory we do here. */
14803 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14804 switch (pVCpu->iem.s.enmEffOpSize)
14805 {
14806 case IEMMODE_16BIT:
14807 IEM_MC_BEGIN(0, 2, 0, 0);
14808 IEM_MC_LOCAL(uint16_t, u16Src);
14809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14812 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14813 IEM_MC_PUSH_U16(u16Src);
14814 IEM_MC_ADVANCE_RIP_AND_FINISH();
14815 IEM_MC_END();
14816 break;
14817
14818 case IEMMODE_32BIT:
14819 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14820 IEM_MC_LOCAL(uint32_t, u32Src);
14821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14824 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14825 IEM_MC_PUSH_U32(u32Src);
14826 IEM_MC_ADVANCE_RIP_AND_FINISH();
14827 IEM_MC_END();
14828 break;
14829
14830 case IEMMODE_64BIT:
14831 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14832 IEM_MC_LOCAL(uint64_t, u64Src);
14833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14836 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14837 IEM_MC_PUSH_U64(u64Src);
14838 IEM_MC_ADVANCE_RIP_AND_FINISH();
14839 IEM_MC_END();
14840 break;
14841
14842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14843 }
14844}
14845
14846
14847/**
14848 * @opcode 0xff
14849 */
14850FNIEMOP_DEF(iemOp_Grp5)
14851{
14852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14853 switch (IEM_GET_MODRM_REG_8(bRm))
14854 {
14855 case 0:
14856 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14857 case 1:
14858 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14859 case 2:
14860 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14861 case 3:
14862 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14863 case 4:
14864 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14865 case 5:
14866 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14867 case 6:
14868 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14869 case 7:
14870 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14871 IEMOP_RAISE_INVALID_OPCODE_RET();
14872 }
14873 AssertFailedReturn(VERR_IEM_IPE_3);
14874}
14875
14876
14877
14878const PFNIEMOP g_apfnOneByteMap[256] =
14879{
14880 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14881 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14882 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14883 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14884 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14885 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14886 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14887 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14888 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14889 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14890 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14891 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14892 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14893 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14894 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14895 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14896 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14897 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14898 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14899 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14900 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14901 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14902 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14903 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14904 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14905 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14906 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14907 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14908 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14909 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14910 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14911 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14912 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14913 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14914 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14915 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14916 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14917 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14918 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14919 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14920 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14921 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14922 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14923 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14924 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14925 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14926 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14927 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14928 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14929 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14930 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14931 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14932 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14933 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14934 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14935 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14936 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14937 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14938 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14939 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14940 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14941 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14942 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14943 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14944};
14945
14946
14947/** @} */
14948
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette