VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103675

Last change on this file since 103675 was 103675, checked in by vboxsync, 14 months ago

VMM/IEM: Implemented iemNativeEmit_sub_r_r_efl and enabled it for both hosts. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 573.7 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103675 2024-03-05 02:10:37Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
75 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
76 IEM_MC_LOCAL(uint8_t, u8Dst); \
77 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
78 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
79 IEM_MC_LOCAL(uint32_t, uEFlags); \
80 IEM_MC_FETCH_EFLAGS(uEFlags); \
81 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
82 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
83 IEM_MC_COMMIT_EFLAGS(uEFlags); \
84 } IEM_MC_NATIVE_ELSE() { \
85 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
86 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
87 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
88 IEM_MC_REF_EFLAGS(pEFlags); \
89 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
90 } IEM_MC_NATIVE_ENDIF(); \
91 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
92 IEM_MC_END(); \
93 } \
94 else \
95 { \
96 /* \
97 * We're accessing memory. \
98 * Note! We're putting the eflags on the stack here so we can commit them \
99 * after the memory. \
100 */ \
101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
102 { \
103 IEM_MC_BEGIN(3, 3, 0, 0); \
104 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
105 IEM_MC_ARG(uint8_t, u8Src, 1); \
106 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
108 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
109 \
110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
111 IEMOP_HLP_DONE_DECODING(); \
112 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
113 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
114 IEM_MC_FETCH_EFLAGS(EFlags); \
115 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
116 \
117 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
118 IEM_MC_COMMIT_EFLAGS(EFlags); \
119 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
120 IEM_MC_END(); \
121 } \
122 else \
123 { \
124 IEM_MC_BEGIN(3, 3, 0, 0); \
125 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
126 IEM_MC_ARG(uint8_t, u8Src, 1); \
127 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
129 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
130 \
131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
132 IEMOP_HLP_DONE_DECODING(); \
133 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
134 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
135 IEM_MC_FETCH_EFLAGS(EFlags); \
136 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
137 \
138 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
139 IEM_MC_COMMIT_EFLAGS(EFlags); \
140 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
141 IEM_MC_END(); \
142 } \
143 } \
144 (void)0
145
146/**
147 * Body for instructions like TEST & CMP with a byte memory/registers as
148 * operands.
149 */
150#define IEMOP_BODY_BINARY_rm_r8_RO(a_bRm, a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
151 /* \
152 * If rm is denoting a register, no more instruction bytes. \
153 */ \
154 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
155 { \
156 IEM_MC_BEGIN(3, 0, 0, 0); \
157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
160 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
161 IEM_MC_LOCAL(uint8_t, u8Dst); \
162 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
163 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
164 IEM_MC_LOCAL(uint32_t, uEFlags); \
165 IEM_MC_FETCH_EFLAGS(uEFlags); \
166 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
167 IEM_MC_COMMIT_EFLAGS(uEFlags); \
168 } IEM_MC_NATIVE_ELSE() { \
169 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
170 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
171 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
172 IEM_MC_REF_EFLAGS(pEFlags); \
173 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
174 } IEM_MC_NATIVE_ENDIF(); \
175 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
176 IEM_MC_END(); \
177 } \
178 else \
179 { \
180 /* \
181 * We're accessing memory. \
182 * Note! We're putting the eflags on the stack here so we can commit them \
183 * after the memory. \
184 */ \
185 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
186 { \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
190 IEMOP_HLP_DONE_DECODING(); \
191 IEM_MC_NATIVE_IF(0) { \
192 IEM_MC_LOCAL(uint8_t, u8Dst); \
193 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
194 IEM_MC_LOCAL(uint8_t, u8SrcEmit); \
195 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
196 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
197 IEM_MC_LOCAL(uint32_t, uEFlags); \
198 IEM_MC_FETCH_EFLAGS(uEFlags); \
199 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8SrcEmit, uEFlags, 8); \
200 IEM_MC_COMMIT_EFLAGS(uEFlags); \
201 } IEM_MC_NATIVE_ELSE() { \
202 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
203 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
204 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
205 IEM_MC_ARG(uint8_t, u8Src, 1); \
206 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
207 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
208 IEM_MC_FETCH_EFLAGS(EFlags); \
209 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
210 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
211 IEM_MC_COMMIT_EFLAGS(EFlags); \
212 } IEM_MC_NATIVE_ENDIF(); \
213 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
214 IEM_MC_END(); \
215 } \
216 else \
217 { \
218 /** @todo we should probably decode the address first. */ \
219 IEMOP_HLP_DONE_DECODING(); \
220 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
221 } \
222 } \
223 (void)0
224
225/**
226 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
227 * destination.
228 */
229#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
231 \
232 /* \
233 * If rm is denoting a register, no more instruction bytes. \
234 */ \
235 if (IEM_IS_MODRM_REG_MODE(bRm)) \
236 { \
237 IEM_MC_BEGIN(3, 0, 0, 0); \
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
239 IEM_MC_ARG(uint8_t, u8Src, 1); \
240 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
241 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
242 IEM_MC_LOCAL(uint8_t, u8Dst); \
243 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
244 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
245 IEM_MC_LOCAL(uint32_t, uEFlags); \
246 IEM_MC_FETCH_EFLAGS(uEFlags); \
247 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
248 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst); \
249 IEM_MC_COMMIT_EFLAGS(uEFlags); \
250 } IEM_MC_NATIVE_ELSE() { \
251 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
252 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
253 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
254 IEM_MC_REF_EFLAGS(pEFlags); \
255 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
256 } IEM_MC_NATIVE_ENDIF(); \
257 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
258 IEM_MC_END(); \
259 } \
260 else \
261 { \
262 /* \
263 * We're accessing memory. \
264 */ \
265 IEM_MC_BEGIN(3, 1, 0, 0); \
266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
269 IEM_MC_ARG(uint8_t, u8Src, 1); \
270 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
271 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
272 IEM_MC_LOCAL(uint8_t, u8Dst); \
273 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
274 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
275 IEM_MC_LOCAL(uint32_t, uEFlags); \
276 IEM_MC_FETCH_EFLAGS(uEFlags); \
277 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
278 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst); \
279 IEM_MC_COMMIT_EFLAGS(uEFlags); \
280 } IEM_MC_NATIVE_ELSE() { \
281 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
282 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
283 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
284 IEM_MC_REF_EFLAGS(pEFlags); \
285 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
286 } IEM_MC_NATIVE_ENDIF(); \
287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
288 IEM_MC_END(); \
289 } \
290 (void)0
291
292
293/**
294 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
295 * memory/register as the destination.
296 */
297#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
299 \
300 /* \
301 * If rm is denoting a register, no more instruction bytes. \
302 */ \
303 if (IEM_IS_MODRM_REG_MODE(bRm)) \
304 { \
305 switch (pVCpu->iem.s.enmEffOpSize) \
306 { \
307 case IEMMODE_16BIT: \
308 IEM_MC_BEGIN(3, 0, 0, 0); \
309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
310 IEM_MC_ARG(uint16_t, u16Src, 1); \
311 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
312 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
313 IEM_MC_LOCAL(uint16_t, u16Dst); \
314 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
316 IEM_MC_LOCAL(uint32_t, uEFlags); \
317 IEM_MC_FETCH_EFLAGS(uEFlags); \
318 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
319 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
320 IEM_MC_COMMIT_EFLAGS(uEFlags); \
321 } IEM_MC_NATIVE_ELSE() { \
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
323 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
324 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
325 IEM_MC_REF_EFLAGS(pEFlags); \
326 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
327 } IEM_MC_NATIVE_ENDIF(); \
328 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
329 IEM_MC_END(); \
330 break; \
331 \
332 case IEMMODE_32BIT: \
333 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
335 IEM_MC_ARG(uint32_t, u32Src, 1); \
336 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
337 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
338 IEM_MC_LOCAL(uint32_t, u32Dst); \
339 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
340 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
341 IEM_MC_LOCAL(uint32_t, uEFlags); \
342 IEM_MC_FETCH_EFLAGS(uEFlags); \
343 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
344 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
345 IEM_MC_COMMIT_EFLAGS(uEFlags); \
346 } IEM_MC_NATIVE_ELSE() { \
347 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
348 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
349 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
350 IEM_MC_REF_EFLAGS(pEFlags); \
351 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
352 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
353 } IEM_MC_NATIVE_ENDIF(); \
354 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
355 IEM_MC_END(); \
356 break; \
357 \
358 case IEMMODE_64BIT: \
359 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
361 IEM_MC_ARG(uint64_t, u64Src, 1); \
362 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
363 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
364 IEM_MC_LOCAL(uint64_t, u64Dst); \
365 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
366 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
367 IEM_MC_LOCAL(uint32_t, uEFlags); \
368 IEM_MC_FETCH_EFLAGS(uEFlags); \
369 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
370 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
371 IEM_MC_COMMIT_EFLAGS(uEFlags); \
372 } IEM_MC_NATIVE_ELSE() { \
373 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
374 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
375 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
376 IEM_MC_REF_EFLAGS(pEFlags); \
377 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
378 } IEM_MC_NATIVE_ENDIF(); \
379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
380 IEM_MC_END(); \
381 break; \
382 \
383 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
384 } \
385 } \
386 else \
387 { \
388 /* \
389 * We're accessing memory. \
390 * Note! We're putting the eflags on the stack here so we can commit them \
391 * after the memory. \
392 */ \
393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
394 { \
395 switch (pVCpu->iem.s.enmEffOpSize) \
396 { \
397 case IEMMODE_16BIT: \
398 IEM_MC_BEGIN(3, 3, 0, 0); \
399 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
400 IEM_MC_ARG(uint16_t, u16Src, 1); \
401 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
403 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
404 \
405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
406 IEMOP_HLP_DONE_DECODING(); \
407 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
408 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
409 IEM_MC_FETCH_EFLAGS(EFlags); \
410 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
411 \
412 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
413 IEM_MC_COMMIT_EFLAGS(EFlags); \
414 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
415 IEM_MC_END(); \
416 break; \
417 \
418 case IEMMODE_32BIT: \
419 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
420 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
421 IEM_MC_ARG(uint32_t, u32Src, 1); \
422 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
424 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
425 \
426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
427 IEMOP_HLP_DONE_DECODING(); \
428 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
429 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
430 IEM_MC_FETCH_EFLAGS(EFlags); \
431 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
432 \
433 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
434 IEM_MC_COMMIT_EFLAGS(EFlags); \
435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
436 IEM_MC_END(); \
437 break; \
438 \
439 case IEMMODE_64BIT: \
440 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
441 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
442 IEM_MC_ARG(uint64_t, u64Src, 1); \
443 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
445 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
446 \
447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
448 IEMOP_HLP_DONE_DECODING(); \
449 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
450 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
451 IEM_MC_FETCH_EFLAGS(EFlags); \
452 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
453 \
454 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
455 IEM_MC_COMMIT_EFLAGS(EFlags); \
456 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
457 IEM_MC_END(); \
458 break; \
459 \
460 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
461 } \
462 } \
463 else \
464 { \
465 (void)0
466/* Separate macro to work around parsing issue in IEMAllInstPython.py */
467#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
468 switch (pVCpu->iem.s.enmEffOpSize) \
469 { \
470 case IEMMODE_16BIT: \
471 IEM_MC_BEGIN(3, 3, 0, 0); \
472 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
473 IEM_MC_ARG(uint16_t, u16Src, 1); \
474 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
476 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
477 \
478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
479 IEMOP_HLP_DONE_DECODING(); \
480 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
481 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
482 IEM_MC_FETCH_EFLAGS(EFlags); \
483 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
484 \
485 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
486 IEM_MC_COMMIT_EFLAGS(EFlags); \
487 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
488 IEM_MC_END(); \
489 break; \
490 \
491 case IEMMODE_32BIT: \
492 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
493 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
494 IEM_MC_ARG(uint32_t, u32Src, 1); \
495 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
497 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
498 \
499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
500 IEMOP_HLP_DONE_DECODING(); \
501 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
502 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
503 IEM_MC_FETCH_EFLAGS(EFlags); \
504 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
505 \
506 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
507 IEM_MC_COMMIT_EFLAGS(EFlags); \
508 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
509 IEM_MC_END(); \
510 break; \
511 \
512 case IEMMODE_64BIT: \
513 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
514 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
515 IEM_MC_ARG(uint64_t, u64Src, 1); \
516 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
518 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
519 \
520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
521 IEMOP_HLP_DONE_DECODING(); \
522 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
523 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
524 IEM_MC_FETCH_EFLAGS(EFlags); \
525 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
526 \
527 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
528 IEM_MC_COMMIT_EFLAGS(EFlags); \
529 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
530 IEM_MC_END(); \
531 break; \
532 \
533 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
534 } \
535 } \
536 } \
537 (void)0
538
539/**
540 * Body for read-only word/dword/qword instructions like TEST and CMP with
541 * memory/register as the destination.
542 */
543#define IEMOP_BODY_BINARY_rm_rv_RO(a_bRm, a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_EmitterBasename, a_fNativeArchs) \
544 /* \
545 * If rm is denoting a register, no more instruction bytes. \
546 */ \
547 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
548 { \
549 switch (pVCpu->iem.s.enmEffOpSize) \
550 { \
551 case IEMMODE_16BIT: \
552 IEM_MC_BEGIN(3, 0, 0, 0); \
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
554 IEM_MC_ARG(uint16_t, u16Src, 1); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
556 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
557 IEM_MC_LOCAL(uint16_t, u16Dst); \
558 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
559 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
560 IEM_MC_LOCAL(uint32_t, uEFlags); \
561 IEM_MC_FETCH_EFLAGS(uEFlags); \
562 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
563 IEM_MC_COMMIT_EFLAGS(uEFlags); \
564 } IEM_MC_NATIVE_ELSE() { \
565 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
566 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
567 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
568 IEM_MC_REF_EFLAGS(pEFlags); \
569 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
570 } IEM_MC_NATIVE_ENDIF(); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
580 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
581 IEM_MC_LOCAL(uint32_t, u32Dst); \
582 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
583 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
584 IEM_MC_LOCAL(uint32_t, uEFlags); \
585 IEM_MC_FETCH_EFLAGS(uEFlags); \
586 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
587 IEM_MC_COMMIT_EFLAGS(uEFlags); \
588 } IEM_MC_NATIVE_ELSE() { \
589 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
590 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
591 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
592 IEM_MC_REF_EFLAGS(pEFlags); \
593 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
594 } IEM_MC_NATIVE_ENDIF(); \
595 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
596 IEM_MC_END(); \
597 break; \
598 \
599 case IEMMODE_64BIT: \
600 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
602 IEM_MC_ARG(uint64_t, u64Src, 1); \
603 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
604 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
605 IEM_MC_LOCAL(uint64_t, u64Dst); \
606 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
607 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
608 IEM_MC_LOCAL(uint32_t, uEFlags); \
609 IEM_MC_FETCH_EFLAGS(uEFlags); \
610 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
611 IEM_MC_COMMIT_EFLAGS(uEFlags); \
612 } IEM_MC_NATIVE_ELSE() { \
613 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
614 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
615 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
616 IEM_MC_REF_EFLAGS(pEFlags); \
617 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
618 } IEM_MC_NATIVE_ENDIF(); \
619 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
620 IEM_MC_END(); \
621 break; \
622 \
623 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
624 } \
625 } \
626 else \
627 { \
628 /* \
629 * We're accessing memory. \
630 * Note! We're putting the eflags on the stack here so we can commit them \
631 * after the memory. \
632 */ \
633 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
634 { \
635 switch (pVCpu->iem.s.enmEffOpSize) \
636 { \
637 case IEMMODE_16BIT: \
638 IEM_MC_BEGIN(3, 3, 0, 0); \
639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
641 IEMOP_HLP_DONE_DECODING(); \
642 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
643 IEM_MC_LOCAL(uint16_t, u16Dst); \
644 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
645 IEM_MC_LOCAL(uint16_t, u16SrcEmit); \
646 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
647 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
648 IEM_MC_LOCAL(uint32_t, uEFlags); \
649 IEM_MC_FETCH_EFLAGS(uEFlags); \
650 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16SrcEmit, uEFlags, 16); \
651 IEM_MC_COMMIT_EFLAGS(uEFlags); \
652 } IEM_MC_NATIVE_ELSE() { \
653 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
654 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
655 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
656 IEM_MC_ARG(uint16_t, u16Src, 1); \
657 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
658 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
659 IEM_MC_FETCH_EFLAGS(EFlags); \
660 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
661 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
662 IEM_MC_COMMIT_EFLAGS(EFlags); \
663 } IEM_MC_NATIVE_ENDIF(); \
664 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
665 IEM_MC_END(); \
666 break; \
667 \
668 case IEMMODE_32BIT: \
669 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
672 IEMOP_HLP_DONE_DECODING(); \
673 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
674 IEM_MC_LOCAL(uint32_t, u32Dst); \
675 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
676 IEM_MC_LOCAL(uint32_t, u32SrcEmit); \
677 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
678 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
679 IEM_MC_LOCAL(uint32_t, uEFlags); \
680 IEM_MC_FETCH_EFLAGS(uEFlags); \
681 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32SrcEmit, uEFlags, 32); \
682 IEM_MC_COMMIT_EFLAGS(uEFlags); \
683 } IEM_MC_NATIVE_ELSE() { \
684 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
685 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
686 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
687 IEM_MC_ARG(uint32_t, u32Src, 1); \
688 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
690 IEM_MC_FETCH_EFLAGS(EFlags); \
691 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
692 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
693 IEM_MC_COMMIT_EFLAGS(EFlags); \
694 } IEM_MC_NATIVE_ENDIF(); \
695 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
696 IEM_MC_END(); \
697 break; \
698 \
699 case IEMMODE_64BIT: \
700 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
703 IEMOP_HLP_DONE_DECODING(); \
704 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
705 IEM_MC_LOCAL(uint64_t, u64Dst); \
706 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
707 IEM_MC_LOCAL(uint64_t, u64SrcEmit); \
708 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
709 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
710 IEM_MC_LOCAL(uint32_t, uEFlags); \
711 IEM_MC_FETCH_EFLAGS(uEFlags); \
712 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64SrcEmit, uEFlags, 64); \
713 IEM_MC_COMMIT_EFLAGS(uEFlags); \
714 } IEM_MC_NATIVE_ELSE() { \
715 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
716 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
717 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
718 IEM_MC_ARG(uint64_t, u64Src, 1); \
719 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, a_bRm)); \
720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
721 IEM_MC_FETCH_EFLAGS(EFlags); \
722 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
723 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
724 IEM_MC_COMMIT_EFLAGS(EFlags); \
725 } IEM_MC_NATIVE_ENDIF(); \
726 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
727 IEM_MC_END(); \
728 break; \
729 \
730 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
731 } \
732 } \
733 else \
734 { \
735 IEMOP_HLP_DONE_DECODING(); \
736 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
737 } \
738 } \
739 (void)0
740
741
742/**
743 * Body for instructions like ADD, AND, OR, ++ with working on AL with
744 * a byte immediate.
745 */
746#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
747 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
748 \
749 IEM_MC_BEGIN(3, 0, 0, 0); \
750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
751 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
752 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
753 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
754 \
755 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
756 IEM_MC_REF_EFLAGS(pEFlags); \
757 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
758 \
759 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
760 IEM_MC_END()
761
762/**
763 * Body for instructions like ADD, AND, OR, ++ with working on
764 * AX/EAX/RAX with a word/dword immediate.
765 */
766#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
767 switch (pVCpu->iem.s.enmEffOpSize) \
768 { \
769 case IEMMODE_16BIT: \
770 { \
771 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
772 \
773 IEM_MC_BEGIN(3, 0, 0, 0); \
774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
775 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
776 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
777 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
778 \
779 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
780 IEM_MC_REF_EFLAGS(pEFlags); \
781 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
782 \
783 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
784 IEM_MC_END(); \
785 } \
786 \
787 case IEMMODE_32BIT: \
788 { \
789 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
790 \
791 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
793 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
794 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
795 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
796 \
797 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
798 IEM_MC_REF_EFLAGS(pEFlags); \
799 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
800 \
801 if (a_fModifiesDstReg) \
802 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
804 IEM_MC_END(); \
805 } \
806 \
807 case IEMMODE_64BIT: \
808 { \
809 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
810 \
811 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
813 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
814 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
815 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
816 \
817 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
818 IEM_MC_REF_EFLAGS(pEFlags); \
819 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
820 \
821 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
822 IEM_MC_END(); \
823 } \
824 \
825 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
826 } \
827 (void)0
828
829
830
831/* Instruction specification format - work in progress: */
832
833/**
834 * @opcode 0x00
835 * @opmnemonic add
836 * @op1 rm:Eb
837 * @op2 reg:Gb
838 * @opmaps one
839 * @openc ModR/M
840 * @opflclass arithmetic
841 * @ophints harmless ignores_op_sizes
842 * @opstats add_Eb_Gb
843 * @opgroup og_gen_arith_bin
844 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
845 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
846 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
847 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
848 */
849FNIEMOP_DEF(iemOp_add_Eb_Gb)
850{
851 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
852 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked, add, 0, 0);
853}
854
855
856/**
857 * @opcode 0x01
858 * @opgroup og_gen_arith_bin
859 * @opflclass arithmetic
860 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
861 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
862 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
863 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
864 */
865FNIEMOP_DEF(iemOp_add_Ev_Gv)
866{
867 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
868 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, add, 0, 0);
869 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
870}
871
872
873/**
874 * @opcode 0x02
875 * @opgroup og_gen_arith_bin
876 * @opflclass arithmetic
877 * @opcopytests iemOp_add_Eb_Gb
878 */
879FNIEMOP_DEF(iemOp_add_Gb_Eb)
880{
881 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
882 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8, add, 0);
883}
884
885
886/**
887 * @opcode 0x03
888 * @opgroup og_gen_arith_bin
889 * @opflclass arithmetic
890 * @opcopytests iemOp_add_Ev_Gv
891 */
892FNIEMOP_DEF(iemOp_add_Gv_Ev)
893{
894 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
896 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0, add, 0);
897}
898
899
900/**
901 * @opcode 0x04
902 * @opgroup og_gen_arith_bin
903 * @opflclass arithmetic
904 * @opcopytests iemOp_add_Eb_Gb
905 */
906FNIEMOP_DEF(iemOp_add_Al_Ib)
907{
908 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
909 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
910}
911
912
913/**
914 * @opcode 0x05
915 * @opgroup og_gen_arith_bin
916 * @opflclass arithmetic
917 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
918 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
919 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
920 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
921 */
922FNIEMOP_DEF(iemOp_add_eAX_Iz)
923{
924 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
925 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
926}
927
928
929/**
930 * @opcode 0x06
931 * @opgroup og_stack_sreg
932 */
933FNIEMOP_DEF(iemOp_push_ES)
934{
935 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
936 IEMOP_HLP_NO_64BIT();
937 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
938}
939
940
941/**
942 * @opcode 0x07
943 * @opgroup og_stack_sreg
944 */
945FNIEMOP_DEF(iemOp_pop_ES)
946{
947 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
948 IEMOP_HLP_NO_64BIT();
949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
950 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
951 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
952 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
953 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
954 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
955 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
956 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
957}
958
959
960/**
961 * @opcode 0x08
962 * @opgroup og_gen_arith_bin
963 * @opflclass logical
964 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
965 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
966 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
967 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
968 */
969FNIEMOP_DEF(iemOp_or_Eb_Gb)
970{
971 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
972 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
973 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
974}
975
976
977/*
978 * @opcode 0x09
979 * @opgroup og_gen_arith_bin
980 * @opflclass logical
981 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
982 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
983 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
984 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
985 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
986 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
987 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
988 */
989FNIEMOP_DEF(iemOp_or_Ev_Gv)
990{
991 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
992 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
993 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
994 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
995}
996
997
998/**
999 * @opcode 0x0a
1000 * @opgroup og_gen_arith_bin
1001 * @opflclass logical
1002 * @opcopytests iemOp_or_Eb_Gb
1003 */
1004FNIEMOP_DEF(iemOp_or_Gb_Eb)
1005{
1006 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1007 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1008 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1009}
1010
1011
1012/**
1013 * @opcode 0x0b
1014 * @opgroup og_gen_arith_bin
1015 * @opflclass logical
1016 * @opcopytests iemOp_or_Ev_Gv
1017 */
1018FNIEMOP_DEF(iemOp_or_Gv_Ev)
1019{
1020 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1021 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1023 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1024}
1025
1026
1027/**
1028 * @opcode 0x0c
1029 * @opgroup og_gen_arith_bin
1030 * @opflclass logical
1031 * @opcopytests iemOp_or_Eb_Gb
1032 */
1033FNIEMOP_DEF(iemOp_or_Al_Ib)
1034{
1035 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1036 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1037 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
1038}
1039
1040
1041/**
1042 * @opcode 0x0d
1043 * @opgroup og_gen_arith_bin
1044 * @opflclass logical
1045 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1046 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1047 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1048 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1049 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1050 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1051 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
1052 */
1053FNIEMOP_DEF(iemOp_or_eAX_Iz)
1054{
1055 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1056 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1057 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
1058}
1059
1060
1061/**
1062 * @opcode 0x0e
1063 * @opgroup og_stack_sreg
1064 */
1065FNIEMOP_DEF(iemOp_push_CS)
1066{
1067 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
1068 IEMOP_HLP_NO_64BIT();
1069 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
1070}
1071
1072
1073/**
1074 * @opcode 0x0f
1075 * @opmnemonic EscTwo0f
1076 * @openc two0f
1077 * @opdisenum OP_2B_ESC
1078 * @ophints harmless
1079 * @opgroup og_escapes
1080 */
1081FNIEMOP_DEF(iemOp_2byteEscape)
1082{
1083#if 0 /// @todo def VBOX_STRICT
1084 /* Sanity check the table the first time around. */
1085 static bool s_fTested = false;
1086 if (RT_LIKELY(s_fTested)) { /* likely */ }
1087 else
1088 {
1089 s_fTested = true;
1090 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
1091 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
1092 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
1093 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
1094 }
1095#endif
1096
1097 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
1098 {
1099 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1100 IEMOP_HLP_MIN_286();
1101 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
1102 }
1103 /* @opdone */
1104
1105 /*
1106 * On the 8086 this is a POP CS instruction.
1107 * For the time being we don't specify this this.
1108 */
1109 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1110 IEMOP_HLP_NO_64BIT();
1111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1112 /** @todo eliminate END_TB here */
1113 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1114 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1115 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1116 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1117}
1118
1119/**
1120 * @opcode 0x10
1121 * @opgroup og_gen_arith_bin
1122 * @opflclass arithmetic_carry
1123 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1124 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1125 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1126 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1127 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1128 */
1129FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1130{
1131 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1132 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked, adc, 0, 0);
1133}
1134
1135
1136/**
1137 * @opcode 0x11
1138 * @opgroup og_gen_arith_bin
1139 * @opflclass arithmetic_carry
1140 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1141 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1142 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1143 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1144 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1145 */
1146FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1147{
1148 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1149 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, adc, 0, 0);
1150 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1151}
1152
1153
1154/**
1155 * @opcode 0x12
1156 * @opgroup og_gen_arith_bin
1157 * @opflclass arithmetic_carry
1158 * @opcopytests iemOp_adc_Eb_Gb
1159 */
1160FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1161{
1162 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1163 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8, adc, 0);
1164}
1165
1166
1167/**
1168 * @opcode 0x13
1169 * @opgroup og_gen_arith_bin
1170 * @opflclass arithmetic_carry
1171 * @opcopytests iemOp_adc_Ev_Gv
1172 */
1173FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1174{
1175 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1177 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0, adc, 0);
1178}
1179
1180
1181/**
1182 * @opcode 0x14
1183 * @opgroup og_gen_arith_bin
1184 * @opflclass arithmetic_carry
1185 * @opcopytests iemOp_adc_Eb_Gb
1186 */
1187FNIEMOP_DEF(iemOp_adc_Al_Ib)
1188{
1189 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1190 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1191}
1192
1193
1194/**
1195 * @opcode 0x15
1196 * @opgroup og_gen_arith_bin
1197 * @opflclass arithmetic_carry
1198 * @opcopytests iemOp_adc_Ev_Gv
1199 */
1200FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1201{
1202 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1203 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1204}
1205
1206
1207/**
1208 * @opcode 0x16
1209 */
1210FNIEMOP_DEF(iemOp_push_SS)
1211{
1212 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1213 IEMOP_HLP_NO_64BIT();
1214 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1215}
1216
1217
1218/**
1219 * @opcode 0x17
1220 */
1221FNIEMOP_DEF(iemOp_pop_SS)
1222{
1223 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1225 IEMOP_HLP_NO_64BIT();
1226 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1227 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1228 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1229 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1230 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1231 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1232 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1233}
1234
1235
1236/**
1237 * @opcode 0x18
1238 * @opgroup og_gen_arith_bin
1239 * @opflclass arithmetic_carry
1240 */
1241FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1242{
1243 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1244 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked, sbb, 0, 0);
1245}
1246
1247
1248/**
1249 * @opcode 0x19
1250 * @opgroup og_gen_arith_bin
1251 * @opflclass arithmetic_carry
1252 */
1253FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1254{
1255 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1256 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, sbb, 0, 0);
1257 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1258}
1259
1260
1261/**
1262 * @opcode 0x1a
1263 * @opgroup og_gen_arith_bin
1264 * @opflclass arithmetic_carry
1265 */
1266FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1267{
1268 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1269 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8, sbb, 0);
1270}
1271
1272
1273/**
1274 * @opcode 0x1b
1275 * @opgroup og_gen_arith_bin
1276 * @opflclass arithmetic_carry
1277 */
1278FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1279{
1280 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1282 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0, sbb, 0);
1283}
1284
1285
1286/**
1287 * @opcode 0x1c
1288 * @opgroup og_gen_arith_bin
1289 * @opflclass arithmetic_carry
1290 */
1291FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1292{
1293 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1294 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1295}
1296
1297
1298/**
1299 * @opcode 0x1d
1300 * @opgroup og_gen_arith_bin
1301 * @opflclass arithmetic_carry
1302 */
1303FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1304{
1305 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1306 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1307}
1308
1309
1310/**
1311 * @opcode 0x1e
1312 * @opgroup og_stack_sreg
1313 */
1314FNIEMOP_DEF(iemOp_push_DS)
1315{
1316 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1317 IEMOP_HLP_NO_64BIT();
1318 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1319}
1320
1321
1322/**
1323 * @opcode 0x1f
1324 * @opgroup og_stack_sreg
1325 */
1326FNIEMOP_DEF(iemOp_pop_DS)
1327{
1328 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1330 IEMOP_HLP_NO_64BIT();
1331 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1332 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1333 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1334 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1335 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1336 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1337 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1338}
1339
1340
1341/**
1342 * @opcode 0x20
1343 * @opgroup og_gen_arith_bin
1344 * @opflclass logical
1345 */
1346FNIEMOP_DEF(iemOp_and_Eb_Gb)
1347{
1348 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1349 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1350 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1351}
1352
1353
1354/**
1355 * @opcode 0x21
1356 * @opgroup og_gen_arith_bin
1357 * @opflclass logical
1358 */
1359FNIEMOP_DEF(iemOp_and_Ev_Gv)
1360{
1361 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1362 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1363 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1364 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1365}
1366
1367
1368/**
1369 * @opcode 0x22
1370 * @opgroup og_gen_arith_bin
1371 * @opflclass logical
1372 */
1373FNIEMOP_DEF(iemOp_and_Gb_Eb)
1374{
1375 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1376 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1377 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1378}
1379
1380
1381/**
1382 * @opcode 0x23
1383 * @opgroup og_gen_arith_bin
1384 * @opflclass logical
1385 */
1386FNIEMOP_DEF(iemOp_and_Gv_Ev)
1387{
1388 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1389 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1391 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1392}
1393
1394
1395/**
1396 * @opcode 0x24
1397 * @opgroup og_gen_arith_bin
1398 * @opflclass logical
1399 */
1400FNIEMOP_DEF(iemOp_and_Al_Ib)
1401{
1402 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1403 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1404 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1405}
1406
1407
1408/**
1409 * @opcode 0x25
1410 * @opgroup og_gen_arith_bin
1411 * @opflclass logical
1412 */
1413FNIEMOP_DEF(iemOp_and_eAX_Iz)
1414{
1415 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1416 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1417 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1418}
1419
1420
1421/**
1422 * @opcode 0x26
1423 * @opmnemonic SEG
1424 * @op1 ES
1425 * @opgroup og_prefix
1426 * @openc prefix
1427 * @opdisenum OP_SEG
1428 * @ophints harmless
1429 */
1430FNIEMOP_DEF(iemOp_seg_ES)
1431{
1432 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1433 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1434 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1435
1436 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1437 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1438}
1439
1440
1441/**
1442 * @opcode 0x27
1443 * @opfltest af,cf
1444 * @opflmodify cf,pf,af,zf,sf,of
1445 * @opflundef of
1446 */
1447FNIEMOP_DEF(iemOp_daa)
1448{
1449 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1450 IEMOP_HLP_NO_64BIT();
1451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1452 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1453 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1454}
1455
1456
1457/**
1458 * @opcode 0x28
1459 * @opgroup og_gen_arith_bin
1460 * @opflclass arithmetic
1461 */
1462FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1463{
1464 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1465 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1466}
1467
1468
1469/**
1470 * @opcode 0x29
1471 * @opgroup og_gen_arith_bin
1472 * @opflclass arithmetic
1473 */
1474FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1475{
1476 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1477 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1478 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1479}
1480
1481
1482/**
1483 * @opcode 0x2a
1484 * @opgroup og_gen_arith_bin
1485 * @opflclass arithmetic
1486 */
1487FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1488{
1489 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1490 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1491}
1492
1493
1494/**
1495 * @opcode 0x2b
1496 * @opgroup og_gen_arith_bin
1497 * @opflclass arithmetic
1498 */
1499FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1500{
1501 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1503 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0, sub, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1504}
1505
1506
1507/**
1508 * @opcode 0x2c
1509 * @opgroup og_gen_arith_bin
1510 * @opflclass arithmetic
1511 */
1512FNIEMOP_DEF(iemOp_sub_Al_Ib)
1513{
1514 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1515 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1516}
1517
1518
1519/**
1520 * @opcode 0x2d
1521 * @opgroup og_gen_arith_bin
1522 * @opflclass arithmetic
1523 */
1524FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1525{
1526 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1527 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1528}
1529
1530
1531/**
1532 * @opcode 0x2e
1533 * @opmnemonic SEG
1534 * @op1 CS
1535 * @opgroup og_prefix
1536 * @openc prefix
1537 * @opdisenum OP_SEG
1538 * @ophints harmless
1539 */
1540FNIEMOP_DEF(iemOp_seg_CS)
1541{
1542 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1543 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1544 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1545
1546 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1547 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1548}
1549
1550
1551/**
1552 * @opcode 0x2f
1553 * @opfltest af,cf
1554 * @opflmodify cf,pf,af,zf,sf,of
1555 * @opflundef of
1556 */
1557FNIEMOP_DEF(iemOp_das)
1558{
1559 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1560 IEMOP_HLP_NO_64BIT();
1561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1563 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1564}
1565
1566
1567/**
1568 * @opcode 0x30
1569 * @opgroup og_gen_arith_bin
1570 * @opflclass logical
1571 */
1572FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1573{
1574 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1576 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1577}
1578
1579
1580/**
1581 * @opcode 0x31
1582 * @opgroup og_gen_arith_bin
1583 * @opflclass logical
1584 */
1585FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1586{
1587 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1589 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1590 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1591}
1592
1593
1594/**
1595 * @opcode 0x32
1596 * @opgroup og_gen_arith_bin
1597 * @opflclass logical
1598 */
1599FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1600{
1601 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1602 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1603 /** @todo xor al,al optimization */
1604 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1605}
1606
1607
1608/**
1609 * @opcode 0x33
1610 * @opgroup og_gen_arith_bin
1611 * @opflclass logical
1612 */
1613FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1614{
1615 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1616 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1617
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619
1620 /*
1621 * Deal with special case of 'xor rN, rN' which sets rN to zero and has a known EFLAGS outcome.
1622 */
1623 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
1624 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
1625 {
1626 switch (pVCpu->iem.s.enmEffOpSize)
1627 {
1628 case IEMMODE_16BIT:
1629 IEM_MC_BEGIN(1, 0, 0, 0);
1630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1631 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1632 IEM_MC_LOCAL(uint32_t, fEFlags);
1633 IEM_MC_FETCH_EFLAGS(fEFlags);
1634 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1635 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1636 IEM_MC_COMMIT_EFLAGS(fEFlags);
1637 IEM_MC_ADVANCE_RIP_AND_FINISH();
1638 IEM_MC_END();
1639 break;
1640
1641 case IEMMODE_32BIT:
1642 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
1643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1644 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1645 IEM_MC_LOCAL(uint32_t, fEFlags);
1646 IEM_MC_FETCH_EFLAGS(fEFlags);
1647 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1648 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1649 IEM_MC_COMMIT_EFLAGS(fEFlags);
1650 IEM_MC_ADVANCE_RIP_AND_FINISH();
1651 IEM_MC_END();
1652 break;
1653
1654 case IEMMODE_64BIT:
1655 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
1656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1657 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1658 IEM_MC_LOCAL(uint32_t, fEFlags);
1659 IEM_MC_FETCH_EFLAGS(fEFlags);
1660 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1661 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1662 IEM_MC_COMMIT_EFLAGS(fEFlags);
1663 IEM_MC_ADVANCE_RIP_AND_FINISH();
1664 IEM_MC_END();
1665 break;
1666
1667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1668 }
1669 }
1670
1671 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1672}
1673
1674
1675/**
1676 * @opcode 0x34
1677 * @opgroup og_gen_arith_bin
1678 * @opflclass logical
1679 */
1680FNIEMOP_DEF(iemOp_xor_Al_Ib)
1681{
1682 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1684 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1685}
1686
1687
1688/**
1689 * @opcode 0x35
1690 * @opgroup og_gen_arith_bin
1691 * @opflclass logical
1692 */
1693FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1694{
1695 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1696 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1697 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1698}
1699
1700
1701/**
1702 * @opcode 0x36
1703 * @opmnemonic SEG
1704 * @op1 SS
1705 * @opgroup og_prefix
1706 * @openc prefix
1707 * @opdisenum OP_SEG
1708 * @ophints harmless
1709 */
1710FNIEMOP_DEF(iemOp_seg_SS)
1711{
1712 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1713 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1714 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1715
1716 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1717 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1718}
1719
1720
1721/**
1722 * @opcode 0x37
1723 * @opfltest af
1724 * @opflmodify cf,pf,af,zf,sf,of
1725 * @opflundef pf,zf,sf,of
1726 * @opgroup og_gen_arith_dec
1727 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1728 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1729 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1730 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1731 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1732 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1733 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1734 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1735 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1736 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1737 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1738 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1739 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1740 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1741 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1742 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1743 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1745 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1748 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1750 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1752 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1754 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1756 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1758 */
1759FNIEMOP_DEF(iemOp_aaa)
1760{
1761 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1762 IEMOP_HLP_NO_64BIT();
1763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1764 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1765
1766 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1767}
1768
1769
1770/**
1771 * @opcode 0x38
1772 * @opflclass arithmetic
1773 */
1774FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1775{
1776 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1778 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_cmp_u8, cmp, 0);
1779}
1780
1781
1782/**
1783 * @opcode 0x39
1784 * @opflclass arithmetic
1785 */
1786FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1787{
1788 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 IEMOP_BODY_BINARY_rm_rv_RO(bRm, iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, cmp, 0);
1791}
1792
1793
1794/**
1795 * @opcode 0x3a
1796 * @opflclass arithmetic
1797 */
1798FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1799{
1800 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1801 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8, cmp, 0);
1802}
1803
1804
1805/**
1806 * @opcode 0x3b
1807 * @opflclass arithmetic
1808 */
1809FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1810{
1811 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1813 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0, cmp, 0);
1814}
1815
1816
1817/**
1818 * @opcode 0x3c
1819 * @opflclass arithmetic
1820 */
1821FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1822{
1823 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1824 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1825}
1826
1827
1828/**
1829 * @opcode 0x3d
1830 * @opflclass arithmetic
1831 */
1832FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1833{
1834 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1835 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1836}
1837
1838
1839/**
1840 * @opcode 0x3e
1841 */
1842FNIEMOP_DEF(iemOp_seg_DS)
1843{
1844 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1845 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1846 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1847
1848 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1849 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1850}
1851
1852
1853/**
1854 * @opcode 0x3f
1855 * @opfltest af
1856 * @opflmodify cf,pf,af,zf,sf,of
1857 * @opflundef pf,zf,sf,of
1858 * @opgroup og_gen_arith_dec
1859 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1860 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1861 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1862 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1863 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1864 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1865 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1866 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1867 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1868 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1869 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1870 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1871 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1872 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1873 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1874 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1875 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1876 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1877 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1878 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1879 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1880 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1881 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1882 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1883 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1884 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1885 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1886 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1887 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1888 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1889 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1890 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1891 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1892 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1893 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1894 */
1895FNIEMOP_DEF(iemOp_aas)
1896{
1897 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1898 IEMOP_HLP_NO_64BIT();
1899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1901
1902 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1903}
1904
1905
1906/**
1907 * Common 'inc/dec register' helper.
1908 *
1909 * Not for 64-bit code, only for what became the rex prefixes.
1910 */
1911#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1912 switch (pVCpu->iem.s.enmEffOpSize) \
1913 { \
1914 case IEMMODE_16BIT: \
1915 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1917 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1918 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1919 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1920 IEM_MC_REF_EFLAGS(pEFlags); \
1921 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1922 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1923 IEM_MC_END(); \
1924 break; \
1925 \
1926 case IEMMODE_32BIT: \
1927 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1929 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1930 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1931 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1932 IEM_MC_REF_EFLAGS(pEFlags); \
1933 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1934 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1936 IEM_MC_END(); \
1937 break; \
1938 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1939 } \
1940 (void)0
1941
1942/**
1943 * @opcode 0x40
1944 * @opflclass incdec
1945 */
1946FNIEMOP_DEF(iemOp_inc_eAX)
1947{
1948 /*
1949 * This is a REX prefix in 64-bit mode.
1950 */
1951 if (IEM_IS_64BIT_CODE(pVCpu))
1952 {
1953 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1954 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1955
1956 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1957 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1958 }
1959
1960 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1961 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1962}
1963
1964
1965/**
1966 * @opcode 0x41
1967 * @opflclass incdec
1968 */
1969FNIEMOP_DEF(iemOp_inc_eCX)
1970{
1971 /*
1972 * This is a REX prefix in 64-bit mode.
1973 */
1974 if (IEM_IS_64BIT_CODE(pVCpu))
1975 {
1976 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1977 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1978 pVCpu->iem.s.uRexB = 1 << 3;
1979
1980 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1981 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1982 }
1983
1984 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1985 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1986}
1987
1988
1989/**
1990 * @opcode 0x42
1991 * @opflclass incdec
1992 */
1993FNIEMOP_DEF(iemOp_inc_eDX)
1994{
1995 /*
1996 * This is a REX prefix in 64-bit mode.
1997 */
1998 if (IEM_IS_64BIT_CODE(pVCpu))
1999 {
2000 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
2001 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2002 pVCpu->iem.s.uRexIndex = 1 << 3;
2003
2004 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2005 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2006 }
2007
2008 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
2009 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
2010}
2011
2012
2013
2014/**
2015 * @opcode 0x43
2016 * @opflclass incdec
2017 */
2018FNIEMOP_DEF(iemOp_inc_eBX)
2019{
2020 /*
2021 * This is a REX prefix in 64-bit mode.
2022 */
2023 if (IEM_IS_64BIT_CODE(pVCpu))
2024 {
2025 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
2026 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2027 pVCpu->iem.s.uRexB = 1 << 3;
2028 pVCpu->iem.s.uRexIndex = 1 << 3;
2029
2030 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2031 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2032 }
2033
2034 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2035 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2036}
2037
2038
2039/**
2040 * @opcode 0x44
2041 * @opflclass incdec
2042 */
2043FNIEMOP_DEF(iemOp_inc_eSP)
2044{
2045 /*
2046 * This is a REX prefix in 64-bit mode.
2047 */
2048 if (IEM_IS_64BIT_CODE(pVCpu))
2049 {
2050 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2051 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2052 pVCpu->iem.s.uRexReg = 1 << 3;
2053
2054 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2055 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2056 }
2057
2058 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2059 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2060}
2061
2062
2063/**
2064 * @opcode 0x45
2065 * @opflclass incdec
2066 */
2067FNIEMOP_DEF(iemOp_inc_eBP)
2068{
2069 /*
2070 * This is a REX prefix in 64-bit mode.
2071 */
2072 if (IEM_IS_64BIT_CODE(pVCpu))
2073 {
2074 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2075 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2076 pVCpu->iem.s.uRexReg = 1 << 3;
2077 pVCpu->iem.s.uRexB = 1 << 3;
2078
2079 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2080 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2081 }
2082
2083 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2084 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2085}
2086
2087
2088/**
2089 * @opcode 0x46
2090 * @opflclass incdec
2091 */
2092FNIEMOP_DEF(iemOp_inc_eSI)
2093{
2094 /*
2095 * This is a REX prefix in 64-bit mode.
2096 */
2097 if (IEM_IS_64BIT_CODE(pVCpu))
2098 {
2099 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2100 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2101 pVCpu->iem.s.uRexReg = 1 << 3;
2102 pVCpu->iem.s.uRexIndex = 1 << 3;
2103
2104 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2105 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2106 }
2107
2108 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2109 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2110}
2111
2112
2113/**
2114 * @opcode 0x47
2115 * @opflclass incdec
2116 */
2117FNIEMOP_DEF(iemOp_inc_eDI)
2118{
2119 /*
2120 * This is a REX prefix in 64-bit mode.
2121 */
2122 if (IEM_IS_64BIT_CODE(pVCpu))
2123 {
2124 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2126 pVCpu->iem.s.uRexReg = 1 << 3;
2127 pVCpu->iem.s.uRexB = 1 << 3;
2128 pVCpu->iem.s.uRexIndex = 1 << 3;
2129
2130 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2131 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2132 }
2133
2134 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2135 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2136}
2137
2138
2139/**
2140 * @opcode 0x48
2141 * @opflclass incdec
2142 */
2143FNIEMOP_DEF(iemOp_dec_eAX)
2144{
2145 /*
2146 * This is a REX prefix in 64-bit mode.
2147 */
2148 if (IEM_IS_64BIT_CODE(pVCpu))
2149 {
2150 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2151 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2152 iemRecalEffOpSize(pVCpu);
2153
2154 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2155 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2156 }
2157
2158 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2159 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2160}
2161
2162
2163/**
2164 * @opcode 0x49
2165 * @opflclass incdec
2166 */
2167FNIEMOP_DEF(iemOp_dec_eCX)
2168{
2169 /*
2170 * This is a REX prefix in 64-bit mode.
2171 */
2172 if (IEM_IS_64BIT_CODE(pVCpu))
2173 {
2174 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2175 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2176 pVCpu->iem.s.uRexB = 1 << 3;
2177 iemRecalEffOpSize(pVCpu);
2178
2179 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2180 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2181 }
2182
2183 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2184 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2185}
2186
2187
2188/**
2189 * @opcode 0x4a
2190 * @opflclass incdec
2191 */
2192FNIEMOP_DEF(iemOp_dec_eDX)
2193{
2194 /*
2195 * This is a REX prefix in 64-bit mode.
2196 */
2197 if (IEM_IS_64BIT_CODE(pVCpu))
2198 {
2199 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2200 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2201 pVCpu->iem.s.uRexIndex = 1 << 3;
2202 iemRecalEffOpSize(pVCpu);
2203
2204 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2205 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2206 }
2207
2208 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2209 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2210}
2211
2212
2213/**
2214 * @opcode 0x4b
2215 * @opflclass incdec
2216 */
2217FNIEMOP_DEF(iemOp_dec_eBX)
2218{
2219 /*
2220 * This is a REX prefix in 64-bit mode.
2221 */
2222 if (IEM_IS_64BIT_CODE(pVCpu))
2223 {
2224 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2225 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2226 pVCpu->iem.s.uRexB = 1 << 3;
2227 pVCpu->iem.s.uRexIndex = 1 << 3;
2228 iemRecalEffOpSize(pVCpu);
2229
2230 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2231 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2232 }
2233
2234 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2235 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2236}
2237
2238
2239/**
2240 * @opcode 0x4c
2241 * @opflclass incdec
2242 */
2243FNIEMOP_DEF(iemOp_dec_eSP)
2244{
2245 /*
2246 * This is a REX prefix in 64-bit mode.
2247 */
2248 if (IEM_IS_64BIT_CODE(pVCpu))
2249 {
2250 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2251 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2252 pVCpu->iem.s.uRexReg = 1 << 3;
2253 iemRecalEffOpSize(pVCpu);
2254
2255 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2256 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2257 }
2258
2259 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2260 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2261}
2262
2263
2264/**
2265 * @opcode 0x4d
2266 * @opflclass incdec
2267 */
2268FNIEMOP_DEF(iemOp_dec_eBP)
2269{
2270 /*
2271 * This is a REX prefix in 64-bit mode.
2272 */
2273 if (IEM_IS_64BIT_CODE(pVCpu))
2274 {
2275 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2276 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2277 pVCpu->iem.s.uRexReg = 1 << 3;
2278 pVCpu->iem.s.uRexB = 1 << 3;
2279 iemRecalEffOpSize(pVCpu);
2280
2281 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2282 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2283 }
2284
2285 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2286 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2287}
2288
2289
2290/**
2291 * @opcode 0x4e
2292 * @opflclass incdec
2293 */
2294FNIEMOP_DEF(iemOp_dec_eSI)
2295{
2296 /*
2297 * This is a REX prefix in 64-bit mode.
2298 */
2299 if (IEM_IS_64BIT_CODE(pVCpu))
2300 {
2301 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2302 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2303 pVCpu->iem.s.uRexReg = 1 << 3;
2304 pVCpu->iem.s.uRexIndex = 1 << 3;
2305 iemRecalEffOpSize(pVCpu);
2306
2307 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2308 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2309 }
2310
2311 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2312 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2313}
2314
2315
2316/**
2317 * @opcode 0x4f
2318 * @opflclass incdec
2319 */
2320FNIEMOP_DEF(iemOp_dec_eDI)
2321{
2322 /*
2323 * This is a REX prefix in 64-bit mode.
2324 */
2325 if (IEM_IS_64BIT_CODE(pVCpu))
2326 {
2327 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2328 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2329 pVCpu->iem.s.uRexReg = 1 << 3;
2330 pVCpu->iem.s.uRexB = 1 << 3;
2331 pVCpu->iem.s.uRexIndex = 1 << 3;
2332 iemRecalEffOpSize(pVCpu);
2333
2334 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2335 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2336 }
2337
2338 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2339 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2340}
2341
2342
2343/**
2344 * Common 'push register' helper.
2345 */
2346FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2347{
2348 if (IEM_IS_64BIT_CODE(pVCpu))
2349 {
2350 iReg |= pVCpu->iem.s.uRexB;
2351 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2352 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2353 }
2354
2355 switch (pVCpu->iem.s.enmEffOpSize)
2356 {
2357 case IEMMODE_16BIT:
2358 IEM_MC_BEGIN(0, 1, 0, 0);
2359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2360 IEM_MC_LOCAL(uint16_t, u16Value);
2361 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2362 IEM_MC_PUSH_U16(u16Value);
2363 IEM_MC_ADVANCE_RIP_AND_FINISH();
2364 IEM_MC_END();
2365 break;
2366
2367 case IEMMODE_32BIT:
2368 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2370 IEM_MC_LOCAL(uint32_t, u32Value);
2371 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2372 IEM_MC_PUSH_U32(u32Value);
2373 IEM_MC_ADVANCE_RIP_AND_FINISH();
2374 IEM_MC_END();
2375 break;
2376
2377 case IEMMODE_64BIT:
2378 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2380 IEM_MC_LOCAL(uint64_t, u64Value);
2381 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2382 IEM_MC_PUSH_U64(u64Value);
2383 IEM_MC_ADVANCE_RIP_AND_FINISH();
2384 IEM_MC_END();
2385 break;
2386
2387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2388 }
2389}
2390
2391
2392/**
2393 * @opcode 0x50
2394 */
2395FNIEMOP_DEF(iemOp_push_eAX)
2396{
2397 IEMOP_MNEMONIC(push_rAX, "push rAX");
2398 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2399}
2400
2401
2402/**
2403 * @opcode 0x51
2404 */
2405FNIEMOP_DEF(iemOp_push_eCX)
2406{
2407 IEMOP_MNEMONIC(push_rCX, "push rCX");
2408 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2409}
2410
2411
2412/**
2413 * @opcode 0x52
2414 */
2415FNIEMOP_DEF(iemOp_push_eDX)
2416{
2417 IEMOP_MNEMONIC(push_rDX, "push rDX");
2418 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2419}
2420
2421
2422/**
2423 * @opcode 0x53
2424 */
2425FNIEMOP_DEF(iemOp_push_eBX)
2426{
2427 IEMOP_MNEMONIC(push_rBX, "push rBX");
2428 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2429}
2430
2431
2432/**
2433 * @opcode 0x54
2434 */
2435FNIEMOP_DEF(iemOp_push_eSP)
2436{
2437 IEMOP_MNEMONIC(push_rSP, "push rSP");
2438 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2439 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2440
2441 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2442 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2444 IEM_MC_LOCAL(uint16_t, u16Value);
2445 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2446 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2447 IEM_MC_PUSH_U16(u16Value);
2448 IEM_MC_ADVANCE_RIP_AND_FINISH();
2449 IEM_MC_END();
2450}
2451
2452
2453/**
2454 * @opcode 0x55
2455 */
2456FNIEMOP_DEF(iemOp_push_eBP)
2457{
2458 IEMOP_MNEMONIC(push_rBP, "push rBP");
2459 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2460}
2461
2462
2463/**
2464 * @opcode 0x56
2465 */
2466FNIEMOP_DEF(iemOp_push_eSI)
2467{
2468 IEMOP_MNEMONIC(push_rSI, "push rSI");
2469 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2470}
2471
2472
2473/**
2474 * @opcode 0x57
2475 */
2476FNIEMOP_DEF(iemOp_push_eDI)
2477{
2478 IEMOP_MNEMONIC(push_rDI, "push rDI");
2479 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2480}
2481
2482
2483/**
2484 * Common 'pop register' helper.
2485 */
2486FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2487{
2488 if (IEM_IS_64BIT_CODE(pVCpu))
2489 {
2490 iReg |= pVCpu->iem.s.uRexB;
2491 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2492 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2493 }
2494
2495 switch (pVCpu->iem.s.enmEffOpSize)
2496 {
2497 case IEMMODE_16BIT:
2498 IEM_MC_BEGIN(0, 0, 0, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2500 IEM_MC_POP_GREG_U16(iReg);
2501 IEM_MC_ADVANCE_RIP_AND_FINISH();
2502 IEM_MC_END();
2503 break;
2504
2505 case IEMMODE_32BIT:
2506 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2508 IEM_MC_POP_GREG_U32(iReg);
2509 IEM_MC_ADVANCE_RIP_AND_FINISH();
2510 IEM_MC_END();
2511 break;
2512
2513 case IEMMODE_64BIT:
2514 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2516 IEM_MC_POP_GREG_U64(iReg);
2517 IEM_MC_ADVANCE_RIP_AND_FINISH();
2518 IEM_MC_END();
2519 break;
2520
2521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2522 }
2523}
2524
2525
2526/**
2527 * @opcode 0x58
2528 */
2529FNIEMOP_DEF(iemOp_pop_eAX)
2530{
2531 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2532 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2533}
2534
2535
2536/**
2537 * @opcode 0x59
2538 */
2539FNIEMOP_DEF(iemOp_pop_eCX)
2540{
2541 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2542 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2543}
2544
2545
2546/**
2547 * @opcode 0x5a
2548 */
2549FNIEMOP_DEF(iemOp_pop_eDX)
2550{
2551 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2552 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2553}
2554
2555
2556/**
2557 * @opcode 0x5b
2558 */
2559FNIEMOP_DEF(iemOp_pop_eBX)
2560{
2561 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2562 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2563}
2564
2565
2566/**
2567 * @opcode 0x5c
2568 */
2569FNIEMOP_DEF(iemOp_pop_eSP)
2570{
2571 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2572 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2573}
2574
2575
2576/**
2577 * @opcode 0x5d
2578 */
2579FNIEMOP_DEF(iemOp_pop_eBP)
2580{
2581 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2582 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2583}
2584
2585
2586/**
2587 * @opcode 0x5e
2588 */
2589FNIEMOP_DEF(iemOp_pop_eSI)
2590{
2591 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2592 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2593}
2594
2595
2596/**
2597 * @opcode 0x5f
2598 */
2599FNIEMOP_DEF(iemOp_pop_eDI)
2600{
2601 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2602 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2603}
2604
2605
2606/**
2607 * @opcode 0x60
2608 */
2609FNIEMOP_DEF(iemOp_pusha)
2610{
2611 IEMOP_MNEMONIC(pusha, "pusha");
2612 IEMOP_HLP_MIN_186();
2613 IEMOP_HLP_NO_64BIT();
2614 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2615 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2616 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2617 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2618}
2619
2620
2621/**
2622 * @opcode 0x61
2623 */
2624FNIEMOP_DEF(iemOp_popa__mvex)
2625{
2626 if (!IEM_IS_64BIT_CODE(pVCpu))
2627 {
2628 IEMOP_MNEMONIC(popa, "popa");
2629 IEMOP_HLP_MIN_186();
2630 IEMOP_HLP_NO_64BIT();
2631 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2632 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2633 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2634 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2635 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2636 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2637 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2638 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2639 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2640 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2641 iemCImpl_popa_16);
2642 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2643 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2644 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2645 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2646 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2647 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2648 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2649 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2652 iemCImpl_popa_32);
2653 }
2654 IEMOP_MNEMONIC(mvex, "mvex");
2655 Log(("mvex prefix is not supported!\n"));
2656 IEMOP_RAISE_INVALID_OPCODE_RET();
2657}
2658
2659
2660/**
2661 * @opcode 0x62
2662 * @opmnemonic bound
2663 * @op1 Gv_RO
2664 * @op2 Ma
2665 * @opmincpu 80186
2666 * @ophints harmless x86_invalid_64
2667 * @optest op1=0 op2=0 ->
2668 * @optest op1=1 op2=0 -> value.xcpt=5
2669 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2670 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2671 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2672 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2673 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2674 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2675 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2676 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2677 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2678 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2679 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2680 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2681 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2682 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2683 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2684 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2685 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2686 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2687 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2688 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2689 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2690 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2691 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2692 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2693 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2694 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2695 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2696 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2697 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2698 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2699 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2700 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2701 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2702 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2703 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2704 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2705 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2706 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2707 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2708 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2709 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2710 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2711 */
2712FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2713{
2714 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2715 compatability mode it is invalid with MOD=3.
2716
2717 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2718 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2719 given as R and X without an exact description, so we assume it builds on
2720 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2721 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2722 uint8_t bRm;
2723 if (!IEM_IS_64BIT_CODE(pVCpu))
2724 {
2725 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2726 IEMOP_HLP_MIN_186();
2727 IEM_OPCODE_GET_NEXT_U8(&bRm);
2728 if (IEM_IS_MODRM_MEM_MODE(bRm))
2729 {
2730 /** @todo testcase: check that there are two memory accesses involved. Check
2731 * whether they're both read before the \#BR triggers. */
2732 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2733 {
2734 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2735 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2736 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2737 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2739
2740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2742
2743 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2744 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2745 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2746
2747 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2748 IEM_MC_END();
2749 }
2750 else /* 32-bit operands */
2751 {
2752 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2753 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2754 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2755 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2757
2758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2760
2761 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2762 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2763 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2764
2765 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2766 IEM_MC_END();
2767 }
2768 }
2769
2770 /*
2771 * @opdone
2772 */
2773 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2774 {
2775 /* Note that there is no need for the CPU to fetch further bytes
2776 here because MODRM.MOD == 3. */
2777 Log(("evex not supported by the guest CPU!\n"));
2778 IEMOP_RAISE_INVALID_OPCODE_RET();
2779 }
2780 }
2781 else
2782 {
2783 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2784 * does modr/m read, whereas AMD probably doesn't... */
2785 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2786 {
2787 Log(("evex not supported by the guest CPU!\n"));
2788 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2789 }
2790 IEM_OPCODE_GET_NEXT_U8(&bRm);
2791 }
2792
2793 IEMOP_MNEMONIC(evex, "evex");
2794 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2795 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2796 Log(("evex prefix is not implemented!\n"));
2797 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2798}
2799
2800
2801/**
2802 * @opcode 0x63
2803 * @opflmodify zf
2804 * @note non-64-bit modes.
2805 */
2806FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2807{
2808 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2809 IEMOP_HLP_MIN_286();
2810 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2812
2813 if (IEM_IS_MODRM_REG_MODE(bRm))
2814 {
2815 /* Register */
2816 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2817 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2818 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2819 IEM_MC_ARG(uint16_t, u16Src, 1);
2820 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2821
2822 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2823 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2824 IEM_MC_REF_EFLAGS(pEFlags);
2825 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2826
2827 IEM_MC_ADVANCE_RIP_AND_FINISH();
2828 IEM_MC_END();
2829 }
2830 else
2831 {
2832 /* Memory */
2833 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2834 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2835 IEM_MC_ARG(uint16_t, u16Src, 1);
2836 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2838 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2839
2840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2841 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2842 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2843 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2844 IEM_MC_FETCH_EFLAGS(EFlags);
2845 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2846
2847 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2848 IEM_MC_COMMIT_EFLAGS(EFlags);
2849 IEM_MC_ADVANCE_RIP_AND_FINISH();
2850 IEM_MC_END();
2851 }
2852}
2853
2854
2855/**
2856 * @opcode 0x63
2857 *
2858 * @note This is a weird one. It works like a regular move instruction if
2859 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2860 * @todo This definitely needs a testcase to verify the odd cases. */
2861FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2862{
2863 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2864
2865 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2867
2868 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2869 {
2870 if (IEM_IS_MODRM_REG_MODE(bRm))
2871 {
2872 /*
2873 * Register to register.
2874 */
2875 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2877 IEM_MC_LOCAL(uint64_t, u64Value);
2878 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2879 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 }
2883 else
2884 {
2885 /*
2886 * We're loading a register from memory.
2887 */
2888 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2889 IEM_MC_LOCAL(uint64_t, u64Value);
2890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2893 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2894 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2895 IEM_MC_ADVANCE_RIP_AND_FINISH();
2896 IEM_MC_END();
2897 }
2898 }
2899 else
2900 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2901}
2902
2903
2904/**
2905 * @opcode 0x64
2906 * @opmnemonic segfs
2907 * @opmincpu 80386
2908 * @opgroup og_prefixes
2909 */
2910FNIEMOP_DEF(iemOp_seg_FS)
2911{
2912 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2913 IEMOP_HLP_MIN_386();
2914
2915 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2916 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2917
2918 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2919 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2920}
2921
2922
2923/**
2924 * @opcode 0x65
2925 * @opmnemonic seggs
2926 * @opmincpu 80386
2927 * @opgroup og_prefixes
2928 */
2929FNIEMOP_DEF(iemOp_seg_GS)
2930{
2931 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2932 IEMOP_HLP_MIN_386();
2933
2934 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2935 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2936
2937 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2938 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2939}
2940
2941
2942/**
2943 * @opcode 0x66
2944 * @opmnemonic opsize
2945 * @openc prefix
2946 * @opmincpu 80386
2947 * @ophints harmless
2948 * @opgroup og_prefixes
2949 */
2950FNIEMOP_DEF(iemOp_op_size)
2951{
2952 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2953 IEMOP_HLP_MIN_386();
2954
2955 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2956 iemRecalEffOpSize(pVCpu);
2957
2958 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2959 when REPZ or REPNZ are present. */
2960 if (pVCpu->iem.s.idxPrefix == 0)
2961 pVCpu->iem.s.idxPrefix = 1;
2962
2963 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2964 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2965}
2966
2967
2968/**
2969 * @opcode 0x67
2970 * @opmnemonic addrsize
2971 * @openc prefix
2972 * @opmincpu 80386
2973 * @ophints harmless
2974 * @opgroup og_prefixes
2975 */
2976FNIEMOP_DEF(iemOp_addr_size)
2977{
2978 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2979 IEMOP_HLP_MIN_386();
2980
2981 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2982 switch (pVCpu->iem.s.enmDefAddrMode)
2983 {
2984 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2985 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2986 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2987 default: AssertFailed();
2988 }
2989
2990 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2991 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2992}
2993
2994
2995/**
2996 * @opcode 0x68
2997 */
2998FNIEMOP_DEF(iemOp_push_Iz)
2999{
3000 IEMOP_MNEMONIC(push_Iz, "push Iz");
3001 IEMOP_HLP_MIN_186();
3002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3003 switch (pVCpu->iem.s.enmEffOpSize)
3004 {
3005 case IEMMODE_16BIT:
3006 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3007 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3009 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
3010 IEM_MC_PUSH_U16(u16Value);
3011 IEM_MC_ADVANCE_RIP_AND_FINISH();
3012 IEM_MC_END();
3013 break;
3014
3015 case IEMMODE_32BIT:
3016 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3017 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3019 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
3020 IEM_MC_PUSH_U32(u32Value);
3021 IEM_MC_ADVANCE_RIP_AND_FINISH();
3022 IEM_MC_END();
3023 break;
3024
3025 case IEMMODE_64BIT:
3026 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3027 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3029 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
3030 IEM_MC_PUSH_U64(u64Value);
3031 IEM_MC_ADVANCE_RIP_AND_FINISH();
3032 IEM_MC_END();
3033 break;
3034
3035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3036 }
3037}
3038
3039
3040/**
3041 * @opcode 0x69
3042 * @opflclass multiply
3043 */
3044FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3045{
3046 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3047 IEMOP_HLP_MIN_186();
3048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3049 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3050
3051 switch (pVCpu->iem.s.enmEffOpSize)
3052 {
3053 case IEMMODE_16BIT:
3054 {
3055 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3056 if (IEM_IS_MODRM_REG_MODE(bRm))
3057 {
3058 /* register operand */
3059 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3060 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3062 IEM_MC_LOCAL(uint16_t, u16Tmp);
3063 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3064 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3065 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
3066 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3067 IEM_MC_REF_EFLAGS(pEFlags);
3068 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3069 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3070
3071 IEM_MC_ADVANCE_RIP_AND_FINISH();
3072 IEM_MC_END();
3073 }
3074 else
3075 {
3076 /* memory operand */
3077 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3080
3081 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3083
3084 IEM_MC_LOCAL(uint16_t, u16Tmp);
3085 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3086
3087 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3088 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3090 IEM_MC_REF_EFLAGS(pEFlags);
3091 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3092 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3093
3094 IEM_MC_ADVANCE_RIP_AND_FINISH();
3095 IEM_MC_END();
3096 }
3097 break;
3098 }
3099
3100 case IEMMODE_32BIT:
3101 {
3102 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3103 if (IEM_IS_MODRM_REG_MODE(bRm))
3104 {
3105 /* register operand */
3106 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3107 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3109 IEM_MC_LOCAL(uint32_t, u32Tmp);
3110 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3111
3112 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3113 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
3114 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3115 IEM_MC_REF_EFLAGS(pEFlags);
3116 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3117 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3118
3119 IEM_MC_ADVANCE_RIP_AND_FINISH();
3120 IEM_MC_END();
3121 }
3122 else
3123 {
3124 /* memory operand */
3125 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3128
3129 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3131
3132 IEM_MC_LOCAL(uint32_t, u32Tmp);
3133 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3134
3135 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3136 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3138 IEM_MC_REF_EFLAGS(pEFlags);
3139 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3140 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3141
3142 IEM_MC_ADVANCE_RIP_AND_FINISH();
3143 IEM_MC_END();
3144 }
3145 break;
3146 }
3147
3148 case IEMMODE_64BIT:
3149 {
3150 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3151 if (IEM_IS_MODRM_REG_MODE(bRm))
3152 {
3153 /* register operand */
3154 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3155 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_LOCAL(uint64_t, u64Tmp);
3158 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3159
3160 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3161 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3162 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3163 IEM_MC_REF_EFLAGS(pEFlags);
3164 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3165 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3166
3167 IEM_MC_ADVANCE_RIP_AND_FINISH();
3168 IEM_MC_END();
3169 }
3170 else
3171 {
3172 /* memory operand */
3173 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3176
3177 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3179
3180 IEM_MC_LOCAL(uint64_t, u64Tmp);
3181 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3182
3183 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3184 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3185 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3186 IEM_MC_REF_EFLAGS(pEFlags);
3187 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3188 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3189
3190 IEM_MC_ADVANCE_RIP_AND_FINISH();
3191 IEM_MC_END();
3192 }
3193 break;
3194 }
3195
3196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3197 }
3198}
3199
3200
3201/**
3202 * @opcode 0x6a
3203 */
3204FNIEMOP_DEF(iemOp_push_Ib)
3205{
3206 IEMOP_MNEMONIC(push_Ib, "push Ib");
3207 IEMOP_HLP_MIN_186();
3208 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3210
3211 switch (pVCpu->iem.s.enmEffOpSize)
3212 {
3213 case IEMMODE_16BIT:
3214 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3216 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3217 IEM_MC_PUSH_U16(uValue);
3218 IEM_MC_ADVANCE_RIP_AND_FINISH();
3219 IEM_MC_END();
3220 break;
3221 case IEMMODE_32BIT:
3222 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3224 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3225 IEM_MC_PUSH_U32(uValue);
3226 IEM_MC_ADVANCE_RIP_AND_FINISH();
3227 IEM_MC_END();
3228 break;
3229 case IEMMODE_64BIT:
3230 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3232 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3233 IEM_MC_PUSH_U64(uValue);
3234 IEM_MC_ADVANCE_RIP_AND_FINISH();
3235 IEM_MC_END();
3236 break;
3237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3238 }
3239}
3240
3241
3242/**
3243 * @opcode 0x6b
3244 * @opflclass multiply
3245 */
3246FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3247{
3248 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3249 IEMOP_HLP_MIN_186();
3250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3251 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3252
3253 switch (pVCpu->iem.s.enmEffOpSize)
3254 {
3255 case IEMMODE_16BIT:
3256 {
3257 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3258 if (IEM_IS_MODRM_REG_MODE(bRm))
3259 {
3260 /* register operand */
3261 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3262 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3264
3265 IEM_MC_LOCAL(uint16_t, u16Tmp);
3266 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3267
3268 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3269 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3271 IEM_MC_REF_EFLAGS(pEFlags);
3272 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3273 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3274
3275 IEM_MC_ADVANCE_RIP_AND_FINISH();
3276 IEM_MC_END();
3277 }
3278 else
3279 {
3280 /* memory operand */
3281 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3282
3283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3285
3286 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3288
3289 IEM_MC_LOCAL(uint16_t, u16Tmp);
3290 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3291
3292 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3293 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3294 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3295 IEM_MC_REF_EFLAGS(pEFlags);
3296 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3297 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3298
3299 IEM_MC_ADVANCE_RIP_AND_FINISH();
3300 IEM_MC_END();
3301 }
3302 break;
3303 }
3304
3305 case IEMMODE_32BIT:
3306 {
3307 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3308 if (IEM_IS_MODRM_REG_MODE(bRm))
3309 {
3310 /* register operand */
3311 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3312 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3314 IEM_MC_LOCAL(uint32_t, u32Tmp);
3315 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3316
3317 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3318 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3319 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3320 IEM_MC_REF_EFLAGS(pEFlags);
3321 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3322 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3323
3324 IEM_MC_ADVANCE_RIP_AND_FINISH();
3325 IEM_MC_END();
3326 }
3327 else
3328 {
3329 /* memory operand */
3330 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3333
3334 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3336
3337 IEM_MC_LOCAL(uint32_t, u32Tmp);
3338 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3339
3340 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3341 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3343 IEM_MC_REF_EFLAGS(pEFlags);
3344 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3345 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3346
3347 IEM_MC_ADVANCE_RIP_AND_FINISH();
3348 IEM_MC_END();
3349 }
3350 break;
3351 }
3352
3353 case IEMMODE_64BIT:
3354 {
3355 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3356 if (IEM_IS_MODRM_REG_MODE(bRm))
3357 {
3358 /* register operand */
3359 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3360 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_LOCAL(uint64_t, u64Tmp);
3363 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3364
3365 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3366 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3367 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3368 IEM_MC_REF_EFLAGS(pEFlags);
3369 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3370 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3371
3372 IEM_MC_ADVANCE_RIP_AND_FINISH();
3373 IEM_MC_END();
3374 }
3375 else
3376 {
3377 /* memory operand */
3378 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3381
3382 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3384
3385 IEM_MC_LOCAL(uint64_t, u64Tmp);
3386 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3387
3388 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3389 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3390 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3391 IEM_MC_REF_EFLAGS(pEFlags);
3392 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3393 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3394
3395 IEM_MC_ADVANCE_RIP_AND_FINISH();
3396 IEM_MC_END();
3397 }
3398 break;
3399 }
3400
3401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3402 }
3403}
3404
3405
3406/**
3407 * @opcode 0x6c
3408 * @opfltest iopl,df
3409 */
3410FNIEMOP_DEF(iemOp_insb_Yb_DX)
3411{
3412 IEMOP_HLP_MIN_186();
3413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3414 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3415 {
3416 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3417 switch (pVCpu->iem.s.enmEffAddrMode)
3418 {
3419 case IEMMODE_16BIT:
3420 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3421 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3422 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3423 iemCImpl_rep_ins_op8_addr16, false);
3424 case IEMMODE_32BIT:
3425 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3426 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3427 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3428 iemCImpl_rep_ins_op8_addr32, false);
3429 case IEMMODE_64BIT:
3430 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3431 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3432 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3433 iemCImpl_rep_ins_op8_addr64, false);
3434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3435 }
3436 }
3437 else
3438 {
3439 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3440 switch (pVCpu->iem.s.enmEffAddrMode)
3441 {
3442 case IEMMODE_16BIT:
3443 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3444 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3445 iemCImpl_ins_op8_addr16, false);
3446 case IEMMODE_32BIT:
3447 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3448 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3449 iemCImpl_ins_op8_addr32, false);
3450 case IEMMODE_64BIT:
3451 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3453 iemCImpl_ins_op8_addr64, false);
3454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3455 }
3456 }
3457}
3458
3459
3460/**
3461 * @opcode 0x6d
3462 * @opfltest iopl,df
3463 */
3464FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3465{
3466 IEMOP_HLP_MIN_186();
3467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3468 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3469 {
3470 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3471 switch (pVCpu->iem.s.enmEffOpSize)
3472 {
3473 case IEMMODE_16BIT:
3474 switch (pVCpu->iem.s.enmEffAddrMode)
3475 {
3476 case IEMMODE_16BIT:
3477 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3478 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3479 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3480 iemCImpl_rep_ins_op16_addr16, false);
3481 case IEMMODE_32BIT:
3482 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3483 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3484 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3485 iemCImpl_rep_ins_op16_addr32, false);
3486 case IEMMODE_64BIT:
3487 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3488 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3489 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3490 iemCImpl_rep_ins_op16_addr64, false);
3491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3492 }
3493 break;
3494 case IEMMODE_64BIT:
3495 case IEMMODE_32BIT:
3496 switch (pVCpu->iem.s.enmEffAddrMode)
3497 {
3498 case IEMMODE_16BIT:
3499 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3500 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3501 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3502 iemCImpl_rep_ins_op32_addr16, false);
3503 case IEMMODE_32BIT:
3504 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3505 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3506 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3507 iemCImpl_rep_ins_op32_addr32, false);
3508 case IEMMODE_64BIT:
3509 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3511 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3512 iemCImpl_rep_ins_op32_addr64, false);
3513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3514 }
3515 break;
3516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3517 }
3518 }
3519 else
3520 {
3521 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3522 switch (pVCpu->iem.s.enmEffOpSize)
3523 {
3524 case IEMMODE_16BIT:
3525 switch (pVCpu->iem.s.enmEffAddrMode)
3526 {
3527 case IEMMODE_16BIT:
3528 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3529 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3530 iemCImpl_ins_op16_addr16, false);
3531 case IEMMODE_32BIT:
3532 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3533 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3534 iemCImpl_ins_op16_addr32, false);
3535 case IEMMODE_64BIT:
3536 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3538 iemCImpl_ins_op16_addr64, false);
3539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3540 }
3541 break;
3542 case IEMMODE_64BIT:
3543 case IEMMODE_32BIT:
3544 switch (pVCpu->iem.s.enmEffAddrMode)
3545 {
3546 case IEMMODE_16BIT:
3547 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3548 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3549 iemCImpl_ins_op32_addr16, false);
3550 case IEMMODE_32BIT:
3551 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3552 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3553 iemCImpl_ins_op32_addr32, false);
3554 case IEMMODE_64BIT:
3555 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3556 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3557 iemCImpl_ins_op32_addr64, false);
3558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3559 }
3560 break;
3561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3562 }
3563 }
3564}
3565
3566
3567/**
3568 * @opcode 0x6e
3569 * @opfltest iopl,df
3570 */
3571FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3572{
3573 IEMOP_HLP_MIN_186();
3574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3575 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3576 {
3577 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3578 switch (pVCpu->iem.s.enmEffAddrMode)
3579 {
3580 case IEMMODE_16BIT:
3581 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3582 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3583 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3584 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3585 case IEMMODE_32BIT:
3586 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3587 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3588 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3589 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3590 case IEMMODE_64BIT:
3591 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3592 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3593 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3594 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3596 }
3597 }
3598 else
3599 {
3600 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3601 switch (pVCpu->iem.s.enmEffAddrMode)
3602 {
3603 case IEMMODE_16BIT:
3604 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3605 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3606 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3607 case IEMMODE_32BIT:
3608 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3609 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3610 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3611 case IEMMODE_64BIT:
3612 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3613 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3614 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3616 }
3617 }
3618}
3619
3620
3621/**
3622 * @opcode 0x6f
3623 * @opfltest iopl,df
3624 */
3625FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3626{
3627 IEMOP_HLP_MIN_186();
3628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3629 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3630 {
3631 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3632 switch (pVCpu->iem.s.enmEffOpSize)
3633 {
3634 case IEMMODE_16BIT:
3635 switch (pVCpu->iem.s.enmEffAddrMode)
3636 {
3637 case IEMMODE_16BIT:
3638 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3639 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3640 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3641 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3642 case IEMMODE_32BIT:
3643 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3644 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3645 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3646 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3647 case IEMMODE_64BIT:
3648 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3651 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3653 }
3654 break;
3655 case IEMMODE_64BIT:
3656 case IEMMODE_32BIT:
3657 switch (pVCpu->iem.s.enmEffAddrMode)
3658 {
3659 case IEMMODE_16BIT:
3660 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3661 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3662 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3663 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3664 case IEMMODE_32BIT:
3665 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3666 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3667 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3668 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3669 case IEMMODE_64BIT:
3670 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3671 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3672 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3673 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3675 }
3676 break;
3677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3678 }
3679 }
3680 else
3681 {
3682 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3683 switch (pVCpu->iem.s.enmEffOpSize)
3684 {
3685 case IEMMODE_16BIT:
3686 switch (pVCpu->iem.s.enmEffAddrMode)
3687 {
3688 case IEMMODE_16BIT:
3689 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3690 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3691 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3692 case IEMMODE_32BIT:
3693 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3694 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3695 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3696 case IEMMODE_64BIT:
3697 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3698 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3699 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3701 }
3702 break;
3703 case IEMMODE_64BIT:
3704 case IEMMODE_32BIT:
3705 switch (pVCpu->iem.s.enmEffAddrMode)
3706 {
3707 case IEMMODE_16BIT:
3708 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3709 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3710 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3711 case IEMMODE_32BIT:
3712 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3713 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3714 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3715 case IEMMODE_64BIT:
3716 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3717 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3718 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3720 }
3721 break;
3722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3723 }
3724 }
3725}
3726
3727
3728/**
3729 * @opcode 0x70
3730 * @opfltest of
3731 */
3732FNIEMOP_DEF(iemOp_jo_Jb)
3733{
3734 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3735 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3736 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0, 0, 0);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3741 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3742 } IEM_MC_ELSE() {
3743 IEM_MC_ADVANCE_RIP_AND_FINISH();
3744 } IEM_MC_ENDIF();
3745 IEM_MC_END();
3746}
3747
3748
3749/**
3750 * @opcode 0x71
3751 * @opfltest of
3752 */
3753FNIEMOP_DEF(iemOp_jno_Jb)
3754{
3755 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3756 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3757 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3758
3759 IEM_MC_BEGIN(0, 0, 0, 0);
3760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3761 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3762 IEM_MC_ADVANCE_RIP_AND_FINISH();
3763 } IEM_MC_ELSE() {
3764 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3765 } IEM_MC_ENDIF();
3766 IEM_MC_END();
3767}
3768
3769/**
3770 * @opcode 0x72
3771 * @opfltest cf
3772 */
3773FNIEMOP_DEF(iemOp_jc_Jb)
3774{
3775 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3776 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3777 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3778
3779 IEM_MC_BEGIN(0, 0, 0, 0);
3780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3782 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3783 } IEM_MC_ELSE() {
3784 IEM_MC_ADVANCE_RIP_AND_FINISH();
3785 } IEM_MC_ENDIF();
3786 IEM_MC_END();
3787}
3788
3789
3790/**
3791 * @opcode 0x73
3792 * @opfltest cf
3793 */
3794FNIEMOP_DEF(iemOp_jnc_Jb)
3795{
3796 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3797 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3798 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3799
3800 IEM_MC_BEGIN(0, 0, 0, 0);
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3803 IEM_MC_ADVANCE_RIP_AND_FINISH();
3804 } IEM_MC_ELSE() {
3805 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3806 } IEM_MC_ENDIF();
3807 IEM_MC_END();
3808}
3809
3810
3811/**
3812 * @opcode 0x74
3813 * @opfltest zf
3814 */
3815FNIEMOP_DEF(iemOp_je_Jb)
3816{
3817 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3818 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3819 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3820
3821 IEM_MC_BEGIN(0, 0, 0, 0);
3822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3824 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3825 } IEM_MC_ELSE() {
3826 IEM_MC_ADVANCE_RIP_AND_FINISH();
3827 } IEM_MC_ENDIF();
3828 IEM_MC_END();
3829}
3830
3831
3832/**
3833 * @opcode 0x75
3834 * @opfltest zf
3835 */
3836FNIEMOP_DEF(iemOp_jne_Jb)
3837{
3838 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3839 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3840 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3841
3842 IEM_MC_BEGIN(0, 0, 0, 0);
3843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3845 IEM_MC_ADVANCE_RIP_AND_FINISH();
3846 } IEM_MC_ELSE() {
3847 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3848 } IEM_MC_ENDIF();
3849 IEM_MC_END();
3850}
3851
3852
3853/**
3854 * @opcode 0x76
3855 * @opfltest cf,zf
3856 */
3857FNIEMOP_DEF(iemOp_jbe_Jb)
3858{
3859 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3860 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3862
3863 IEM_MC_BEGIN(0, 0, 0, 0);
3864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3865 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3866 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3867 } IEM_MC_ELSE() {
3868 IEM_MC_ADVANCE_RIP_AND_FINISH();
3869 } IEM_MC_ENDIF();
3870 IEM_MC_END();
3871}
3872
3873
3874/**
3875 * @opcode 0x77
3876 * @opfltest cf,zf
3877 */
3878FNIEMOP_DEF(iemOp_jnbe_Jb)
3879{
3880 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3881 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3882 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3883
3884 IEM_MC_BEGIN(0, 0, 0, 0);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3887 IEM_MC_ADVANCE_RIP_AND_FINISH();
3888 } IEM_MC_ELSE() {
3889 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3890 } IEM_MC_ENDIF();
3891 IEM_MC_END();
3892}
3893
3894
3895/**
3896 * @opcode 0x78
3897 * @opfltest sf
3898 */
3899FNIEMOP_DEF(iemOp_js_Jb)
3900{
3901 IEMOP_MNEMONIC(js_Jb, "js Jb");
3902 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3904
3905 IEM_MC_BEGIN(0, 0, 0, 0);
3906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3907 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3908 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3909 } IEM_MC_ELSE() {
3910 IEM_MC_ADVANCE_RIP_AND_FINISH();
3911 } IEM_MC_ENDIF();
3912 IEM_MC_END();
3913}
3914
3915
3916/**
3917 * @opcode 0x79
3918 * @opfltest sf
3919 */
3920FNIEMOP_DEF(iemOp_jns_Jb)
3921{
3922 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3923 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3924 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3925
3926 IEM_MC_BEGIN(0, 0, 0, 0);
3927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3928 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3929 IEM_MC_ADVANCE_RIP_AND_FINISH();
3930 } IEM_MC_ELSE() {
3931 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3932 } IEM_MC_ENDIF();
3933 IEM_MC_END();
3934}
3935
3936
3937/**
3938 * @opcode 0x7a
3939 * @opfltest pf
3940 */
3941FNIEMOP_DEF(iemOp_jp_Jb)
3942{
3943 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3944 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3945 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3946
3947 IEM_MC_BEGIN(0, 0, 0, 0);
3948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3949 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3950 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3951 } IEM_MC_ELSE() {
3952 IEM_MC_ADVANCE_RIP_AND_FINISH();
3953 } IEM_MC_ENDIF();
3954 IEM_MC_END();
3955}
3956
3957
3958/**
3959 * @opcode 0x7b
3960 * @opfltest pf
3961 */
3962FNIEMOP_DEF(iemOp_jnp_Jb)
3963{
3964 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3965 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3967
3968 IEM_MC_BEGIN(0, 0, 0, 0);
3969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3970 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3971 IEM_MC_ADVANCE_RIP_AND_FINISH();
3972 } IEM_MC_ELSE() {
3973 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3974 } IEM_MC_ENDIF();
3975 IEM_MC_END();
3976}
3977
3978
3979/**
3980 * @opcode 0x7c
3981 * @opfltest sf,of
3982 */
3983FNIEMOP_DEF(iemOp_jl_Jb)
3984{
3985 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3986 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3987 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3988
3989 IEM_MC_BEGIN(0, 0, 0, 0);
3990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3991 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3992 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3993 } IEM_MC_ELSE() {
3994 IEM_MC_ADVANCE_RIP_AND_FINISH();
3995 } IEM_MC_ENDIF();
3996 IEM_MC_END();
3997}
3998
3999
4000/**
4001 * @opcode 0x7d
4002 * @opfltest sf,of
4003 */
4004FNIEMOP_DEF(iemOp_jnl_Jb)
4005{
4006 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
4007 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4008 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4009
4010 IEM_MC_BEGIN(0, 0, 0, 0);
4011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4012 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4013 IEM_MC_ADVANCE_RIP_AND_FINISH();
4014 } IEM_MC_ELSE() {
4015 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4016 } IEM_MC_ENDIF();
4017 IEM_MC_END();
4018}
4019
4020
4021/**
4022 * @opcode 0x7e
4023 * @opfltest zf,sf,of
4024 */
4025FNIEMOP_DEF(iemOp_jle_Jb)
4026{
4027 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
4028 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4029 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4030
4031 IEM_MC_BEGIN(0, 0, 0, 0);
4032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4033 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4034 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4035 } IEM_MC_ELSE() {
4036 IEM_MC_ADVANCE_RIP_AND_FINISH();
4037 } IEM_MC_ENDIF();
4038 IEM_MC_END();
4039}
4040
4041
4042/**
4043 * @opcode 0x7f
4044 * @opfltest zf,sf,of
4045 */
4046FNIEMOP_DEF(iemOp_jnle_Jb)
4047{
4048 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4049 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4050 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4051
4052 IEM_MC_BEGIN(0, 0, 0, 0);
4053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4054 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4055 IEM_MC_ADVANCE_RIP_AND_FINISH();
4056 } IEM_MC_ELSE() {
4057 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4058 } IEM_MC_ENDIF();
4059 IEM_MC_END();
4060}
4061
4062
4063/**
4064 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4065 * iemOp_Grp1_Eb_Ib_80.
4066 */
4067#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
4068 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4069 { \
4070 /* register target */ \
4071 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4072 IEM_MC_BEGIN(3, 0, 0, 0); \
4073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4074 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4075 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4076 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4077 \
4078 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4079 IEM_MC_REF_EFLAGS(pEFlags); \
4080 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4081 \
4082 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4083 IEM_MC_END(); \
4084 } \
4085 else \
4086 { \
4087 /* memory target */ \
4088 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4089 { \
4090 IEM_MC_BEGIN(3, 3, 0, 0); \
4091 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4092 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4094 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4095 \
4096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4097 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4098 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4099 IEMOP_HLP_DONE_DECODING(); \
4100 \
4101 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4102 IEM_MC_FETCH_EFLAGS(EFlags); \
4103 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4104 \
4105 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4106 IEM_MC_COMMIT_EFLAGS(EFlags); \
4107 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4108 IEM_MC_END(); \
4109 } \
4110 else \
4111 { \
4112 (void)0
4113
4114#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
4115 IEM_MC_BEGIN(3, 3, 0, 0); \
4116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4117 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4119 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4120 \
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4122 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4123 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4124 IEMOP_HLP_DONE_DECODING(); \
4125 \
4126 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4127 IEM_MC_FETCH_EFLAGS(EFlags); \
4128 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4129 \
4130 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4131 IEM_MC_COMMIT_EFLAGS(EFlags); \
4132 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4133 IEM_MC_END(); \
4134 } \
4135 } \
4136 (void)0
4137
4138#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4139 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4140 { \
4141 /* register target */ \
4142 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4143 IEM_MC_BEGIN(3, 0, 0, 0); \
4144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4145 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4146 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4147 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4148 \
4149 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4150 IEM_MC_REF_EFLAGS(pEFlags); \
4151 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4152 \
4153 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4154 IEM_MC_END(); \
4155 } \
4156 else \
4157 { \
4158 /* memory target */ \
4159 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4160 { \
4161 IEM_MC_BEGIN(3, 3, 0, 0); \
4162 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4163 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4165 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4166 \
4167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4168 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4169 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4170 IEMOP_HLP_DONE_DECODING(); \
4171 \
4172 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4173 IEM_MC_FETCH_EFLAGS(EFlags); \
4174 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4175 \
4176 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4177 IEM_MC_COMMIT_EFLAGS(EFlags); \
4178 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4179 IEM_MC_END(); \
4180 } \
4181 else \
4182 { \
4183 (void)0
4184
4185#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4186 IEMOP_HLP_DONE_DECODING(); \
4187 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4188 } \
4189 } \
4190 (void)0
4191
4192
4193
4194/**
4195 * @opmaps grp1_80,grp1_83
4196 * @opcode /0
4197 * @opflclass arithmetic
4198 */
4199FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4200{
4201 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4202 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4203 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4204}
4205
4206
4207/**
4208 * @opmaps grp1_80,grp1_83
4209 * @opcode /1
4210 * @opflclass logical
4211 */
4212FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4213{
4214 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4215 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4216 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4217}
4218
4219
4220/**
4221 * @opmaps grp1_80,grp1_83
4222 * @opcode /2
4223 * @opflclass arithmetic_carry
4224 */
4225FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4226{
4227 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4228 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4229 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4230}
4231
4232
4233/**
4234 * @opmaps grp1_80,grp1_83
4235 * @opcode /3
4236 * @opflclass arithmetic_carry
4237 */
4238FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4239{
4240 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4241 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4242 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4243}
4244
4245
4246/**
4247 * @opmaps grp1_80,grp1_83
4248 * @opcode /4
4249 * @opflclass logical
4250 */
4251FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4252{
4253 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4254 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4255 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4256}
4257
4258
4259/**
4260 * @opmaps grp1_80,grp1_83
4261 * @opcode /5
4262 * @opflclass arithmetic
4263 */
4264FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4265{
4266 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4267 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4268 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4269}
4270
4271
4272/**
4273 * @opmaps grp1_80,grp1_83
4274 * @opcode /6
4275 * @opflclass logical
4276 */
4277FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4278{
4279 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4280 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4281 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4282}
4283
4284
4285/**
4286 * @opmaps grp1_80,grp1_83
4287 * @opcode /7
4288 * @opflclass arithmetic
4289 */
4290FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4291{
4292 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4293 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4294 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4295}
4296
4297
4298/**
4299 * @opcode 0x80
4300 */
4301FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4302{
4303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4304 switch (IEM_GET_MODRM_REG_8(bRm))
4305 {
4306 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4307 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4308 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4309 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4310 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4311 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4312 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4313 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4315 }
4316}
4317
4318
4319/**
4320 * Body for a group 1 binary operator.
4321 */
4322#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4323 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4324 { \
4325 /* register target */ \
4326 switch (pVCpu->iem.s.enmEffOpSize) \
4327 { \
4328 case IEMMODE_16BIT: \
4329 { \
4330 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4331 IEM_MC_BEGIN(3, 0, 0, 0); \
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4333 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4334 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4335 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4336 \
4337 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4338 IEM_MC_REF_EFLAGS(pEFlags); \
4339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4340 \
4341 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4342 IEM_MC_END(); \
4343 break; \
4344 } \
4345 \
4346 case IEMMODE_32BIT: \
4347 { \
4348 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4349 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4351 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4352 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4353 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4354 \
4355 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4356 IEM_MC_REF_EFLAGS(pEFlags); \
4357 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4358 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4359 \
4360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4361 IEM_MC_END(); \
4362 break; \
4363 } \
4364 \
4365 case IEMMODE_64BIT: \
4366 { \
4367 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4368 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4371 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4372 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4373 \
4374 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4375 IEM_MC_REF_EFLAGS(pEFlags); \
4376 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4377 \
4378 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4379 IEM_MC_END(); \
4380 break; \
4381 } \
4382 \
4383 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4384 } \
4385 } \
4386 else \
4387 { \
4388 /* memory target */ \
4389 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4390 { \
4391 switch (pVCpu->iem.s.enmEffOpSize) \
4392 { \
4393 case IEMMODE_16BIT: \
4394 { \
4395 IEM_MC_BEGIN(3, 3, 0, 0); \
4396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4398 \
4399 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4400 IEMOP_HLP_DONE_DECODING(); \
4401 \
4402 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4403 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4404 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4405 \
4406 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4407 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4408 IEM_MC_FETCH_EFLAGS(EFlags); \
4409 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4410 \
4411 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4412 IEM_MC_COMMIT_EFLAGS(EFlags); \
4413 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4414 IEM_MC_END(); \
4415 break; \
4416 } \
4417 \
4418 case IEMMODE_32BIT: \
4419 { \
4420 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4423 \
4424 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4425 IEMOP_HLP_DONE_DECODING(); \
4426 \
4427 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4428 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4429 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4430 \
4431 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4432 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4433 IEM_MC_FETCH_EFLAGS(EFlags); \
4434 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4435 \
4436 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4437 IEM_MC_COMMIT_EFLAGS(EFlags); \
4438 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4439 IEM_MC_END(); \
4440 break; \
4441 } \
4442 \
4443 case IEMMODE_64BIT: \
4444 { \
4445 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4446 \
4447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4449 \
4450 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4451 IEMOP_HLP_DONE_DECODING(); \
4452 \
4453 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4454 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4455 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4456 \
4457 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4458 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4459 IEM_MC_FETCH_EFLAGS(EFlags); \
4460 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4461 \
4462 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4463 IEM_MC_COMMIT_EFLAGS(EFlags); \
4464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4465 IEM_MC_END(); \
4466 break; \
4467 } \
4468 \
4469 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4470 } \
4471 } \
4472 else \
4473 { \
4474 (void)0
4475/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4476#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4477 switch (pVCpu->iem.s.enmEffOpSize) \
4478 { \
4479 case IEMMODE_16BIT: \
4480 { \
4481 IEM_MC_BEGIN(3, 3, 0, 0); \
4482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4484 \
4485 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4486 IEMOP_HLP_DONE_DECODING(); \
4487 \
4488 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4489 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4490 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4491 \
4492 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4493 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4494 IEM_MC_FETCH_EFLAGS(EFlags); \
4495 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4496 \
4497 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4498 IEM_MC_COMMIT_EFLAGS(EFlags); \
4499 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4500 IEM_MC_END(); \
4501 break; \
4502 } \
4503 \
4504 case IEMMODE_32BIT: \
4505 { \
4506 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4509 \
4510 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4511 IEMOP_HLP_DONE_DECODING(); \
4512 \
4513 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4514 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4515 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4516 \
4517 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4519 IEM_MC_FETCH_EFLAGS(EFlags); \
4520 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4521 \
4522 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4523 IEM_MC_COMMIT_EFLAGS(EFlags); \
4524 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4525 IEM_MC_END(); \
4526 break; \
4527 } \
4528 \
4529 case IEMMODE_64BIT: \
4530 { \
4531 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4534 \
4535 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4536 IEMOP_HLP_DONE_DECODING(); \
4537 \
4538 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4539 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4540 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4541 \
4542 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4543 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4544 IEM_MC_FETCH_EFLAGS(EFlags); \
4545 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4546 \
4547 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4548 IEM_MC_COMMIT_EFLAGS(EFlags); \
4549 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4550 IEM_MC_END(); \
4551 break; \
4552 } \
4553 \
4554 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4555 } \
4556 } \
4557 } \
4558 (void)0
4559
4560/* read-only version */
4561#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4562 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4563 { \
4564 /* register target */ \
4565 switch (pVCpu->iem.s.enmEffOpSize) \
4566 { \
4567 case IEMMODE_16BIT: \
4568 { \
4569 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4570 IEM_MC_BEGIN(3, 0, 0, 0); \
4571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4572 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4573 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4574 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4575 \
4576 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4577 IEM_MC_REF_EFLAGS(pEFlags); \
4578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4579 \
4580 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4581 IEM_MC_END(); \
4582 break; \
4583 } \
4584 \
4585 case IEMMODE_32BIT: \
4586 { \
4587 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4588 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4590 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4591 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4592 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4593 \
4594 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4595 IEM_MC_REF_EFLAGS(pEFlags); \
4596 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4597 \
4598 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4599 IEM_MC_END(); \
4600 break; \
4601 } \
4602 \
4603 case IEMMODE_64BIT: \
4604 { \
4605 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4606 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4608 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4609 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4610 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4611 \
4612 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4613 IEM_MC_REF_EFLAGS(pEFlags); \
4614 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4615 \
4616 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4617 IEM_MC_END(); \
4618 break; \
4619 } \
4620 \
4621 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4622 } \
4623 } \
4624 else \
4625 { \
4626 /* memory target */ \
4627 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4628 { \
4629 switch (pVCpu->iem.s.enmEffOpSize) \
4630 { \
4631 case IEMMODE_16BIT: \
4632 { \
4633 IEM_MC_BEGIN(3, 3, 0, 0); \
4634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4636 \
4637 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4638 IEMOP_HLP_DONE_DECODING(); \
4639 \
4640 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4641 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4642 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4643 \
4644 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4645 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4646 IEM_MC_FETCH_EFLAGS(EFlags); \
4647 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4648 \
4649 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4650 IEM_MC_COMMIT_EFLAGS(EFlags); \
4651 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4652 IEM_MC_END(); \
4653 break; \
4654 } \
4655 \
4656 case IEMMODE_32BIT: \
4657 { \
4658 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4661 \
4662 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4663 IEMOP_HLP_DONE_DECODING(); \
4664 \
4665 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4666 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4667 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4668 \
4669 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4670 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4671 IEM_MC_FETCH_EFLAGS(EFlags); \
4672 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4673 \
4674 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4675 IEM_MC_COMMIT_EFLAGS(EFlags); \
4676 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4677 IEM_MC_END(); \
4678 break; \
4679 } \
4680 \
4681 case IEMMODE_64BIT: \
4682 { \
4683 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4686 \
4687 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4688 IEMOP_HLP_DONE_DECODING(); \
4689 \
4690 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4691 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4692 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4693 \
4694 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4695 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4696 IEM_MC_FETCH_EFLAGS(EFlags); \
4697 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4698 \
4699 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4700 IEM_MC_COMMIT_EFLAGS(EFlags); \
4701 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4702 IEM_MC_END(); \
4703 break; \
4704 } \
4705 \
4706 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4707 } \
4708 } \
4709 else \
4710 { \
4711 IEMOP_HLP_DONE_DECODING(); \
4712 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4713 } \
4714 } \
4715 (void)0
4716
4717
4718/**
4719 * @opmaps grp1_81
4720 * @opcode /0
4721 * @opflclass arithmetic
4722 */
4723FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4724{
4725 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4726 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4727 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4728}
4729
4730
4731/**
4732 * @opmaps grp1_81
4733 * @opcode /1
4734 * @opflclass logical
4735 */
4736FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4737{
4738 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4739 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4740 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4741}
4742
4743
4744/**
4745 * @opmaps grp1_81
4746 * @opcode /2
4747 * @opflclass arithmetic_carry
4748 */
4749FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4750{
4751 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4752 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4753 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4754}
4755
4756
4757/**
4758 * @opmaps grp1_81
4759 * @opcode /3
4760 * @opflclass arithmetic_carry
4761 */
4762FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4763{
4764 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4765 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4766 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4767}
4768
4769
4770/**
4771 * @opmaps grp1_81
4772 * @opcode /4
4773 * @opflclass logical
4774 */
4775FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4776{
4777 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4778 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4779 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4780}
4781
4782
4783/**
4784 * @opmaps grp1_81
4785 * @opcode /5
4786 * @opflclass arithmetic
4787 */
4788FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4789{
4790 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4791 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4792 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4793}
4794
4795
4796/**
4797 * @opmaps grp1_81
4798 * @opcode /6
4799 * @opflclass logical
4800 */
4801FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4802{
4803 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4804 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4805 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4806}
4807
4808
4809/**
4810 * @opmaps grp1_81
4811 * @opcode /7
4812 * @opflclass arithmetic
4813 */
4814FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4815{
4816 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4817 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4818}
4819
4820
4821/**
4822 * @opcode 0x81
4823 */
4824FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4825{
4826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4827 switch (IEM_GET_MODRM_REG_8(bRm))
4828 {
4829 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4830 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4831 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4832 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4833 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4834 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4835 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4836 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4838 }
4839}
4840
4841
4842/**
4843 * @opcode 0x82
4844 * @opmnemonic grp1_82
4845 * @opgroup og_groups
4846 */
4847FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4848{
4849 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4850 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4851}
4852
4853
4854/**
4855 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4856 * iemOp_Grp1_Ev_Ib.
4857 */
4858#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4859 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4860 { \
4861 /* \
4862 * Register target \
4863 */ \
4864 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4865 switch (pVCpu->iem.s.enmEffOpSize) \
4866 { \
4867 case IEMMODE_16BIT: \
4868 IEM_MC_BEGIN(3, 0, 0, 0); \
4869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4870 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4871 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4872 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4873 \
4874 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4875 IEM_MC_REF_EFLAGS(pEFlags); \
4876 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4877 \
4878 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4879 IEM_MC_END(); \
4880 break; \
4881 \
4882 case IEMMODE_32BIT: \
4883 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4885 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4886 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4887 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4888 \
4889 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4890 IEM_MC_REF_EFLAGS(pEFlags); \
4891 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4892 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4893 \
4894 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4895 IEM_MC_END(); \
4896 break; \
4897 \
4898 case IEMMODE_64BIT: \
4899 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4901 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4902 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4903 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4904 \
4905 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4906 IEM_MC_REF_EFLAGS(pEFlags); \
4907 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4908 \
4909 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4910 IEM_MC_END(); \
4911 break; \
4912 \
4913 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4914 } \
4915 } \
4916 else \
4917 { \
4918 /* \
4919 * Memory target. \
4920 */ \
4921 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4922 { \
4923 switch (pVCpu->iem.s.enmEffOpSize) \
4924 { \
4925 case IEMMODE_16BIT: \
4926 IEM_MC_BEGIN(3, 3, 0, 0); \
4927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4929 \
4930 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4931 IEMOP_HLP_DONE_DECODING(); \
4932 \
4933 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4934 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4935 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4936 \
4937 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4938 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4939 IEM_MC_FETCH_EFLAGS(EFlags); \
4940 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4941 \
4942 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4943 IEM_MC_COMMIT_EFLAGS(EFlags); \
4944 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4945 IEM_MC_END(); \
4946 break; \
4947 \
4948 case IEMMODE_32BIT: \
4949 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4952 \
4953 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4954 IEMOP_HLP_DONE_DECODING(); \
4955 \
4956 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4957 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4958 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4959 \
4960 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4961 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4962 IEM_MC_FETCH_EFLAGS(EFlags); \
4963 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4964 \
4965 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4966 IEM_MC_COMMIT_EFLAGS(EFlags); \
4967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4968 IEM_MC_END(); \
4969 break; \
4970 \
4971 case IEMMODE_64BIT: \
4972 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4975 \
4976 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4977 IEMOP_HLP_DONE_DECODING(); \
4978 \
4979 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4980 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4981 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4982 \
4983 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4984 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4985 IEM_MC_FETCH_EFLAGS(EFlags); \
4986 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4987 \
4988 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4989 IEM_MC_COMMIT_EFLAGS(EFlags); \
4990 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4991 IEM_MC_END(); \
4992 break; \
4993 \
4994 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4995 } \
4996 } \
4997 else \
4998 { \
4999 (void)0
5000/* Separate macro to work around parsing issue in IEMAllInstPython.py */
5001#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
5002 switch (pVCpu->iem.s.enmEffOpSize) \
5003 { \
5004 case IEMMODE_16BIT: \
5005 IEM_MC_BEGIN(3, 3, 0, 0); \
5006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5008 \
5009 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5010 IEMOP_HLP_DONE_DECODING(); \
5011 \
5012 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5013 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5014 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5015 \
5016 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5017 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5018 IEM_MC_FETCH_EFLAGS(EFlags); \
5019 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
5020 \
5021 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5022 IEM_MC_COMMIT_EFLAGS(EFlags); \
5023 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5024 IEM_MC_END(); \
5025 break; \
5026 \
5027 case IEMMODE_32BIT: \
5028 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5031 \
5032 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5033 IEMOP_HLP_DONE_DECODING(); \
5034 \
5035 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5036 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5037 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5038 \
5039 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5040 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5041 IEM_MC_FETCH_EFLAGS(EFlags); \
5042 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
5043 \
5044 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5045 IEM_MC_COMMIT_EFLAGS(EFlags); \
5046 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5047 IEM_MC_END(); \
5048 break; \
5049 \
5050 case IEMMODE_64BIT: \
5051 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5054 \
5055 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5056 IEMOP_HLP_DONE_DECODING(); \
5057 \
5058 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5059 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5060 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5061 \
5062 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5063 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5064 IEM_MC_FETCH_EFLAGS(EFlags); \
5065 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
5066 \
5067 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5068 IEM_MC_COMMIT_EFLAGS(EFlags); \
5069 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5070 IEM_MC_END(); \
5071 break; \
5072 \
5073 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5074 } \
5075 } \
5076 } \
5077 (void)0
5078
5079/* read-only variant */
5080#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
5081 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5082 { \
5083 /* \
5084 * Register target \
5085 */ \
5086 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5087 switch (pVCpu->iem.s.enmEffOpSize) \
5088 { \
5089 case IEMMODE_16BIT: \
5090 IEM_MC_BEGIN(3, 0, 0, 0); \
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5092 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5093 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5094 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5095 \
5096 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5097 IEM_MC_REF_EFLAGS(pEFlags); \
5098 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5099 \
5100 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5101 IEM_MC_END(); \
5102 break; \
5103 \
5104 case IEMMODE_32BIT: \
5105 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
5106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5107 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5108 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5109 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5110 \
5111 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5112 IEM_MC_REF_EFLAGS(pEFlags); \
5113 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5114 \
5115 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5116 IEM_MC_END(); \
5117 break; \
5118 \
5119 case IEMMODE_64BIT: \
5120 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
5121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5122 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5123 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5124 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5125 \
5126 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5127 IEM_MC_REF_EFLAGS(pEFlags); \
5128 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5129 \
5130 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5131 IEM_MC_END(); \
5132 break; \
5133 \
5134 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5135 } \
5136 } \
5137 else \
5138 { \
5139 /* \
5140 * Memory target. \
5141 */ \
5142 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5143 { \
5144 switch (pVCpu->iem.s.enmEffOpSize) \
5145 { \
5146 case IEMMODE_16BIT: \
5147 IEM_MC_BEGIN(3, 3, 0, 0); \
5148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5150 \
5151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5152 IEMOP_HLP_DONE_DECODING(); \
5153 \
5154 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5155 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5156 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5157 \
5158 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5160 IEM_MC_FETCH_EFLAGS(EFlags); \
5161 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5162 \
5163 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5164 IEM_MC_COMMIT_EFLAGS(EFlags); \
5165 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5166 IEM_MC_END(); \
5167 break; \
5168 \
5169 case IEMMODE_32BIT: \
5170 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5173 \
5174 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5175 IEMOP_HLP_DONE_DECODING(); \
5176 \
5177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5178 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5179 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5180 \
5181 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5182 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5183 IEM_MC_FETCH_EFLAGS(EFlags); \
5184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5185 \
5186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5187 IEM_MC_COMMIT_EFLAGS(EFlags); \
5188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5189 IEM_MC_END(); \
5190 break; \
5191 \
5192 case IEMMODE_64BIT: \
5193 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5196 \
5197 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5198 IEMOP_HLP_DONE_DECODING(); \
5199 \
5200 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5201 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5202 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5203 \
5204 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5205 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5206 IEM_MC_FETCH_EFLAGS(EFlags); \
5207 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5208 \
5209 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5210 IEM_MC_COMMIT_EFLAGS(EFlags); \
5211 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5212 IEM_MC_END(); \
5213 break; \
5214 \
5215 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5216 } \
5217 } \
5218 else \
5219 { \
5220 IEMOP_HLP_DONE_DECODING(); \
5221 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5222 } \
5223 } \
5224 (void)0
5225
5226/**
5227 * @opmaps grp1_83
5228 * @opcode /0
5229 * @opflclass arithmetic
5230 */
5231FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5232{
5233 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5234 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5235 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5236}
5237
5238
5239/**
5240 * @opmaps grp1_83
5241 * @opcode /1
5242 * @opflclass logical
5243 */
5244FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5245{
5246 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5247 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5248 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5249}
5250
5251
5252/**
5253 * @opmaps grp1_83
5254 * @opcode /2
5255 * @opflclass arithmetic_carry
5256 */
5257FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5258{
5259 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5260 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5261 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5262}
5263
5264
5265/**
5266 * @opmaps grp1_83
5267 * @opcode /3
5268 * @opflclass arithmetic_carry
5269 */
5270FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5271{
5272 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5273 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5274 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5275}
5276
5277
5278/**
5279 * @opmaps grp1_83
5280 * @opcode /4
5281 * @opflclass logical
5282 */
5283FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5284{
5285 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5286 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5287 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5288}
5289
5290
5291/**
5292 * @opmaps grp1_83
5293 * @opcode /5
5294 * @opflclass arithmetic
5295 */
5296FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5297{
5298 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5299 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5300 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5301}
5302
5303
5304/**
5305 * @opmaps grp1_83
5306 * @opcode /6
5307 * @opflclass logical
5308 */
5309FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5310{
5311 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5312 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5313 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5314}
5315
5316
5317/**
5318 * @opmaps grp1_83
5319 * @opcode /7
5320 * @opflclass arithmetic
5321 */
5322FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5323{
5324 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5325 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5326}
5327
5328
5329/**
5330 * @opcode 0x83
5331 */
5332FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5333{
5334 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5335 to the 386 even if absent in the intel reference manuals and some
5336 3rd party opcode listings. */
5337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5338 switch (IEM_GET_MODRM_REG_8(bRm))
5339 {
5340 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5341 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5342 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5343 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5344 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5345 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5346 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5347 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5349 }
5350}
5351
5352
5353/**
5354 * @opcode 0x84
5355 * @opflclass logical
5356 */
5357FNIEMOP_DEF(iemOp_test_Eb_Gb)
5358{
5359 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5360 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5361
5362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5363
5364 /*
5365 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5366 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5367 */
5368 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5369 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5370 {
5371 IEM_MC_BEGIN(3, 0, 0, 0);
5372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5373 IEM_MC_ARG(uint8_t, u8Src, 1);
5374 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5375 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5376 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5377 IEM_MC_LOCAL(uint32_t, uEFlags);
5378 IEM_MC_FETCH_EFLAGS(uEFlags);
5379 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u8Src, u8Src, uEFlags, 8);
5380 IEM_MC_COMMIT_EFLAGS(uEFlags);
5381 } IEM_MC_NATIVE_ELSE() {
5382 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5383 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5384 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5385 IEM_MC_REF_EFLAGS(pEFlags);
5386 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
5387 } IEM_MC_NATIVE_ENDIF();
5388 IEM_MC_ADVANCE_RIP_AND_FINISH();
5389 IEM_MC_END();
5390 }
5391
5392 IEMOP_BODY_BINARY_rm_r8_RO(bRm, iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5393}
5394
5395
5396/**
5397 * @opcode 0x85
5398 * @opflclass logical
5399 */
5400FNIEMOP_DEF(iemOp_test_Ev_Gv)
5401{
5402 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5403 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5404
5405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5406
5407 /*
5408 * Deal with special case of 'test rN, rN' which is frequently used for testing for zero/non-zero registers.
5409 * This block only makes a differences when emitting native code, where we'll save a register fetch.
5410 */
5411 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
5412 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
5413 {
5414 switch (pVCpu->iem.s.enmEffOpSize)
5415 {
5416 case IEMMODE_16BIT:
5417 IEM_MC_BEGIN(3, 0, 0, 0);
5418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5419 IEM_MC_ARG(uint16_t, u16Src, 1);
5420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5421 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5422 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5423 IEM_MC_LOCAL(uint32_t, uEFlags);
5424 IEM_MC_FETCH_EFLAGS(uEFlags);
5425 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u16Src, u16Src, uEFlags, 16);
5426 IEM_MC_COMMIT_EFLAGS(uEFlags);
5427 } IEM_MC_NATIVE_ELSE() {
5428 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5429 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5430 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5431 IEM_MC_REF_EFLAGS(pEFlags);
5432 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
5433 } IEM_MC_NATIVE_ENDIF();
5434 IEM_MC_ADVANCE_RIP_AND_FINISH();
5435 IEM_MC_END();
5436 break;
5437
5438 case IEMMODE_32BIT:
5439 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
5440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5441 IEM_MC_ARG(uint32_t, u32Src, 1);
5442 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5443 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5444 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5445 IEM_MC_LOCAL(uint32_t, uEFlags);
5446 IEM_MC_FETCH_EFLAGS(uEFlags);
5447 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u32Src, u32Src, uEFlags, 32);
5448 IEM_MC_COMMIT_EFLAGS(uEFlags);
5449 } IEM_MC_NATIVE_ELSE() {
5450 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5451 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5452 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5453 IEM_MC_REF_EFLAGS(pEFlags);
5454 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
5455 } IEM_MC_NATIVE_ENDIF();
5456 IEM_MC_ADVANCE_RIP_AND_FINISH();
5457 IEM_MC_END();
5458 break;
5459
5460 case IEMMODE_64BIT:
5461 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
5462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5463 IEM_MC_ARG(uint64_t, u64Src, 1);
5464 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
5465 IEM_MC_NATIVE_IF(RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64) {
5466 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */
5467 IEM_MC_LOCAL(uint32_t, uEFlags);
5468 IEM_MC_FETCH_EFLAGS(uEFlags);
5469 IEM_MC_NATIVE_EMIT_4(iemNativeEmit_test_r_r_efl, u64Src, u64Src, uEFlags, 64);
5470 IEM_MC_COMMIT_EFLAGS(uEFlags);
5471 } IEM_MC_NATIVE_ELSE() {
5472 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5473 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5474 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5475 IEM_MC_REF_EFLAGS(pEFlags);
5476 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
5477 } IEM_MC_NATIVE_ENDIF();
5478 IEM_MC_ADVANCE_RIP_AND_FINISH();
5479 IEM_MC_END();
5480 break;
5481
5482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5483 }
5484 }
5485
5486 IEMOP_BODY_BINARY_rm_rv_RO(bRm, iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
5487}
5488
5489
5490/**
5491 * @opcode 0x86
5492 */
5493FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5494{
5495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5496 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5497
5498 /*
5499 * If rm is denoting a register, no more instruction bytes.
5500 */
5501 if (IEM_IS_MODRM_REG_MODE(bRm))
5502 {
5503 IEM_MC_BEGIN(0, 2, 0, 0);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_LOCAL(uint8_t, uTmp1);
5506 IEM_MC_LOCAL(uint8_t, uTmp2);
5507
5508 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5509 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5510 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5511 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5512
5513 IEM_MC_ADVANCE_RIP_AND_FINISH();
5514 IEM_MC_END();
5515 }
5516 else
5517 {
5518 /*
5519 * We're accessing memory.
5520 */
5521#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5522 IEM_MC_BEGIN(2, 4, 0, 0); \
5523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5524 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5525 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5526 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5527 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5528 \
5529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5530 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5531 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5532 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5533 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5534 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5535 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5536 \
5537 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5538 IEM_MC_END()
5539
5540 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5541 {
5542 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5543 }
5544 else
5545 {
5546 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5547 }
5548 }
5549}
5550
5551
5552/**
5553 * @opcode 0x87
5554 */
5555FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5556{
5557 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5559
5560 /*
5561 * If rm is denoting a register, no more instruction bytes.
5562 */
5563 if (IEM_IS_MODRM_REG_MODE(bRm))
5564 {
5565 switch (pVCpu->iem.s.enmEffOpSize)
5566 {
5567 case IEMMODE_16BIT:
5568 IEM_MC_BEGIN(0, 2, 0, 0);
5569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5570 IEM_MC_LOCAL(uint16_t, uTmp1);
5571 IEM_MC_LOCAL(uint16_t, uTmp2);
5572
5573 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5574 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5575 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5576 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5577
5578 IEM_MC_ADVANCE_RIP_AND_FINISH();
5579 IEM_MC_END();
5580 break;
5581
5582 case IEMMODE_32BIT:
5583 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5585 IEM_MC_LOCAL(uint32_t, uTmp1);
5586 IEM_MC_LOCAL(uint32_t, uTmp2);
5587
5588 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5589 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5590 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5591 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5592
5593 IEM_MC_ADVANCE_RIP_AND_FINISH();
5594 IEM_MC_END();
5595 break;
5596
5597 case IEMMODE_64BIT:
5598 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5600 IEM_MC_LOCAL(uint64_t, uTmp1);
5601 IEM_MC_LOCAL(uint64_t, uTmp2);
5602
5603 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5604 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5605 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5606 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5607
5608 IEM_MC_ADVANCE_RIP_AND_FINISH();
5609 IEM_MC_END();
5610 break;
5611
5612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5613 }
5614 }
5615 else
5616 {
5617 /*
5618 * We're accessing memory.
5619 */
5620#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5621 do { \
5622 switch (pVCpu->iem.s.enmEffOpSize) \
5623 { \
5624 case IEMMODE_16BIT: \
5625 IEM_MC_BEGIN(2, 4, 0, 0); \
5626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5627 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5628 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5629 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5630 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5631 \
5632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5633 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5634 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5635 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5636 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5637 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5638 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5639 \
5640 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5641 IEM_MC_END(); \
5642 break; \
5643 \
5644 case IEMMODE_32BIT: \
5645 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5647 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5648 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5649 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5650 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5651 \
5652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5653 IEMOP_HLP_DONE_DECODING(); \
5654 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5655 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5656 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5657 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5658 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5659 \
5660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5661 IEM_MC_END(); \
5662 break; \
5663 \
5664 case IEMMODE_64BIT: \
5665 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5667 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5668 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5669 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5670 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5671 \
5672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5673 IEMOP_HLP_DONE_DECODING(); \
5674 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5675 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5676 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5677 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5678 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5679 \
5680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5681 IEM_MC_END(); \
5682 break; \
5683 \
5684 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5685 } \
5686 } while (0)
5687 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5688 {
5689 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5690 }
5691 else
5692 {
5693 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5694 }
5695 }
5696}
5697
5698
5699/**
5700 * @opcode 0x88
5701 */
5702FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5703{
5704 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5705
5706 uint8_t bRm;
5707 IEM_OPCODE_GET_NEXT_U8(&bRm);
5708
5709 /*
5710 * If rm is denoting a register, no more instruction bytes.
5711 */
5712 if (IEM_IS_MODRM_REG_MODE(bRm))
5713 {
5714 IEM_MC_BEGIN(0, 1, 0, 0);
5715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5716 IEM_MC_LOCAL(uint8_t, u8Value);
5717 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5718 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5719 IEM_MC_ADVANCE_RIP_AND_FINISH();
5720 IEM_MC_END();
5721 }
5722 else
5723 {
5724 /*
5725 * We're writing a register to memory.
5726 */
5727 IEM_MC_BEGIN(0, 2, 0, 0);
5728 IEM_MC_LOCAL(uint8_t, u8Value);
5729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5732 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5733 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5734 IEM_MC_ADVANCE_RIP_AND_FINISH();
5735 IEM_MC_END();
5736 }
5737}
5738
5739
5740/**
5741 * @opcode 0x89
5742 */
5743FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5744{
5745 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5746
5747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5748
5749 /*
5750 * If rm is denoting a register, no more instruction bytes.
5751 */
5752 if (IEM_IS_MODRM_REG_MODE(bRm))
5753 {
5754 switch (pVCpu->iem.s.enmEffOpSize)
5755 {
5756 case IEMMODE_16BIT:
5757 IEM_MC_BEGIN(0, 1, 0, 0);
5758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5759 IEM_MC_LOCAL(uint16_t, u16Value);
5760 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5761 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5762 IEM_MC_ADVANCE_RIP_AND_FINISH();
5763 IEM_MC_END();
5764 break;
5765
5766 case IEMMODE_32BIT:
5767 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5769 IEM_MC_LOCAL(uint32_t, u32Value);
5770 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5771 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5772 IEM_MC_ADVANCE_RIP_AND_FINISH();
5773 IEM_MC_END();
5774 break;
5775
5776 case IEMMODE_64BIT:
5777 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5779 IEM_MC_LOCAL(uint64_t, u64Value);
5780 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5781 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5782 IEM_MC_ADVANCE_RIP_AND_FINISH();
5783 IEM_MC_END();
5784 break;
5785
5786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5787 }
5788 }
5789 else
5790 {
5791 /*
5792 * We're writing a register to memory.
5793 */
5794 switch (pVCpu->iem.s.enmEffOpSize)
5795 {
5796 case IEMMODE_16BIT:
5797 IEM_MC_BEGIN(0, 2, 0, 0);
5798 IEM_MC_LOCAL(uint16_t, u16Value);
5799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5802 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5803 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5804 IEM_MC_ADVANCE_RIP_AND_FINISH();
5805 IEM_MC_END();
5806 break;
5807
5808 case IEMMODE_32BIT:
5809 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5810 IEM_MC_LOCAL(uint32_t, u32Value);
5811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5814 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5815 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5816 IEM_MC_ADVANCE_RIP_AND_FINISH();
5817 IEM_MC_END();
5818 break;
5819
5820 case IEMMODE_64BIT:
5821 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5822 IEM_MC_LOCAL(uint64_t, u64Value);
5823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5826 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5827 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5828 IEM_MC_ADVANCE_RIP_AND_FINISH();
5829 IEM_MC_END();
5830 break;
5831
5832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5833 }
5834 }
5835}
5836
5837
5838/**
5839 * @opcode 0x8a
5840 */
5841FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5842{
5843 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5844
5845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5846
5847 /*
5848 * If rm is denoting a register, no more instruction bytes.
5849 */
5850 if (IEM_IS_MODRM_REG_MODE(bRm))
5851 {
5852 IEM_MC_BEGIN(0, 1, 0, 0);
5853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5854 IEM_MC_LOCAL(uint8_t, u8Value);
5855 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5856 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5857 IEM_MC_ADVANCE_RIP_AND_FINISH();
5858 IEM_MC_END();
5859 }
5860 else
5861 {
5862 /*
5863 * We're loading a register from memory.
5864 */
5865 IEM_MC_BEGIN(0, 2, 0, 0);
5866 IEM_MC_LOCAL(uint8_t, u8Value);
5867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5870 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5871 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5872 IEM_MC_ADVANCE_RIP_AND_FINISH();
5873 IEM_MC_END();
5874 }
5875}
5876
5877
5878/**
5879 * @opcode 0x8b
5880 */
5881FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5882{
5883 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5884
5885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5886
5887 /*
5888 * If rm is denoting a register, no more instruction bytes.
5889 */
5890 if (IEM_IS_MODRM_REG_MODE(bRm))
5891 {
5892 switch (pVCpu->iem.s.enmEffOpSize)
5893 {
5894 case IEMMODE_16BIT:
5895 IEM_MC_BEGIN(0, 1, 0, 0);
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5897 IEM_MC_LOCAL(uint16_t, u16Value);
5898 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5899 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5900 IEM_MC_ADVANCE_RIP_AND_FINISH();
5901 IEM_MC_END();
5902 break;
5903
5904 case IEMMODE_32BIT:
5905 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5907 IEM_MC_LOCAL(uint32_t, u32Value);
5908 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5909 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5910 IEM_MC_ADVANCE_RIP_AND_FINISH();
5911 IEM_MC_END();
5912 break;
5913
5914 case IEMMODE_64BIT:
5915 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5917 IEM_MC_LOCAL(uint64_t, u64Value);
5918 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5919 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5920 IEM_MC_ADVANCE_RIP_AND_FINISH();
5921 IEM_MC_END();
5922 break;
5923
5924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5925 }
5926 }
5927 else
5928 {
5929 /*
5930 * We're loading a register from memory.
5931 */
5932 switch (pVCpu->iem.s.enmEffOpSize)
5933 {
5934 case IEMMODE_16BIT:
5935 IEM_MC_BEGIN(0, 2, 0, 0);
5936 IEM_MC_LOCAL(uint16_t, u16Value);
5937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5940 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5941 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5942 IEM_MC_ADVANCE_RIP_AND_FINISH();
5943 IEM_MC_END();
5944 break;
5945
5946 case IEMMODE_32BIT:
5947 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5948 IEM_MC_LOCAL(uint32_t, u32Value);
5949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5952 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5954 IEM_MC_ADVANCE_RIP_AND_FINISH();
5955 IEM_MC_END();
5956 break;
5957
5958 case IEMMODE_64BIT:
5959 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5960 IEM_MC_LOCAL(uint64_t, u64Value);
5961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5964 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5965 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5966 IEM_MC_ADVANCE_RIP_AND_FINISH();
5967 IEM_MC_END();
5968 break;
5969
5970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5971 }
5972 }
5973}
5974
5975
5976/**
5977 * opcode 0x63
5978 * @todo Table fixme
5979 */
5980FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5981{
5982 if (!IEM_IS_64BIT_CODE(pVCpu))
5983 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5984 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5985 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5986 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5987}
5988
5989
5990/**
5991 * @opcode 0x8c
5992 */
5993FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5994{
5995 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5996
5997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5998
5999 /*
6000 * Check that the destination register exists. The REX.R prefix is ignored.
6001 */
6002 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6003 if (iSegReg > X86_SREG_GS)
6004 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6005
6006 /*
6007 * If rm is denoting a register, no more instruction bytes.
6008 * In that case, the operand size is respected and the upper bits are
6009 * cleared (starting with some pentium).
6010 */
6011 if (IEM_IS_MODRM_REG_MODE(bRm))
6012 {
6013 switch (pVCpu->iem.s.enmEffOpSize)
6014 {
6015 case IEMMODE_16BIT:
6016 IEM_MC_BEGIN(0, 1, 0, 0);
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 IEM_MC_LOCAL(uint16_t, u16Value);
6019 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6020 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
6021 IEM_MC_ADVANCE_RIP_AND_FINISH();
6022 IEM_MC_END();
6023 break;
6024
6025 case IEMMODE_32BIT:
6026 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_LOCAL(uint32_t, u32Value);
6029 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
6030 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
6031 IEM_MC_ADVANCE_RIP_AND_FINISH();
6032 IEM_MC_END();
6033 break;
6034
6035 case IEMMODE_64BIT:
6036 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 IEM_MC_LOCAL(uint64_t, u64Value);
6039 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
6040 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
6041 IEM_MC_ADVANCE_RIP_AND_FINISH();
6042 IEM_MC_END();
6043 break;
6044
6045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6046 }
6047 }
6048 else
6049 {
6050 /*
6051 * We're saving the register to memory. The access is word sized
6052 * regardless of operand size prefixes.
6053 */
6054#if 0 /* not necessary */
6055 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6056#endif
6057 IEM_MC_BEGIN(0, 2, 0, 0);
6058 IEM_MC_LOCAL(uint16_t, u16Value);
6059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6062 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
6063 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
6064 IEM_MC_ADVANCE_RIP_AND_FINISH();
6065 IEM_MC_END();
6066 }
6067}
6068
6069
6070
6071
6072/**
6073 * @opcode 0x8d
6074 */
6075FNIEMOP_DEF(iemOp_lea_Gv_M)
6076{
6077 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
6078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6079 if (IEM_IS_MODRM_REG_MODE(bRm))
6080 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
6081
6082 switch (pVCpu->iem.s.enmEffOpSize)
6083 {
6084 case IEMMODE_16BIT:
6085 IEM_MC_BEGIN(0, 2, 0, 0);
6086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6090 * operand-size, which is usually the case. It'll save an instruction
6091 * and a register. */
6092 IEM_MC_LOCAL(uint16_t, u16Cast);
6093 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
6094 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
6095 IEM_MC_ADVANCE_RIP_AND_FINISH();
6096 IEM_MC_END();
6097 break;
6098
6099 case IEMMODE_32BIT:
6100 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6104 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
6105 * operand-size, which is usually the case. It'll save an instruction
6106 * and a register. */
6107 IEM_MC_LOCAL(uint32_t, u32Cast);
6108 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
6109 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
6110 IEM_MC_ADVANCE_RIP_AND_FINISH();
6111 IEM_MC_END();
6112 break;
6113
6114 case IEMMODE_64BIT:
6115 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6119 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
6120 IEM_MC_ADVANCE_RIP_AND_FINISH();
6121 IEM_MC_END();
6122 break;
6123
6124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6125 }
6126}
6127
6128
6129/**
6130 * @opcode 0x8e
6131 */
6132FNIEMOP_DEF(iemOp_mov_Sw_Ev)
6133{
6134 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
6135
6136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6137
6138 /*
6139 * The practical operand size is 16-bit.
6140 */
6141#if 0 /* not necessary */
6142 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6143#endif
6144
6145 /*
6146 * Check that the destination register exists and can be used with this
6147 * instruction. The REX.R prefix is ignored.
6148 */
6149 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6150 /** @todo r=bird: What does 8086 do here wrt CS? */
6151 if ( iSegReg == X86_SREG_CS
6152 || iSegReg > X86_SREG_GS)
6153 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6154
6155 /*
6156 * If rm is denoting a register, no more instruction bytes.
6157 *
6158 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6159 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6160 * register. This is a restriction of the current recompiler
6161 * approach.
6162 */
6163 if (IEM_IS_MODRM_REG_MODE(bRm))
6164 {
6165#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6166 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
6167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6168 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6169 IEM_MC_ARG(uint16_t, u16Value, 1); \
6170 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6171 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6172 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6173 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6174 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6175 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6176 iemCImpl_load_SReg, iSRegArg, u16Value); \
6177 IEM_MC_END()
6178
6179 if (iSegReg == X86_SREG_SS)
6180 {
6181 if (IEM_IS_32BIT_CODE(pVCpu))
6182 {
6183 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6184 }
6185 else
6186 {
6187 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6188 }
6189 }
6190 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6191 {
6192 IEMOP_MOV_SW_EV_REG_BODY(0);
6193 }
6194 else
6195 {
6196 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6197 }
6198#undef IEMOP_MOV_SW_EV_REG_BODY
6199 }
6200 else
6201 {
6202 /*
6203 * We're loading the register from memory. The access is word sized
6204 * regardless of operand size prefixes.
6205 */
6206#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6207 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
6208 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6209 IEM_MC_ARG(uint16_t, u16Value, 1); \
6210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6213 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6214 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6215 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6216 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6217 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6218 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6219 iemCImpl_load_SReg, iSRegArg, u16Value); \
6220 IEM_MC_END()
6221
6222 if (iSegReg == X86_SREG_SS)
6223 {
6224 if (IEM_IS_32BIT_CODE(pVCpu))
6225 {
6226 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6227 }
6228 else
6229 {
6230 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6231 }
6232 }
6233 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6234 {
6235 IEMOP_MOV_SW_EV_MEM_BODY(0);
6236 }
6237 else
6238 {
6239 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6240 }
6241#undef IEMOP_MOV_SW_EV_MEM_BODY
6242 }
6243}
6244
6245
6246/** Opcode 0x8f /0. */
6247FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6248{
6249 /* This bugger is rather annoying as it requires rSP to be updated before
6250 doing the effective address calculations. Will eventually require a
6251 split between the R/M+SIB decoding and the effective address
6252 calculation - which is something that is required for any attempt at
6253 reusing this code for a recompiler. It may also be good to have if we
6254 need to delay #UD exception caused by invalid lock prefixes.
6255
6256 For now, we'll do a mostly safe interpreter-only implementation here. */
6257 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6258 * now until tests show it's checked.. */
6259 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6260
6261 /* Register access is relatively easy and can share code. */
6262 if (IEM_IS_MODRM_REG_MODE(bRm))
6263 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6264
6265 /*
6266 * Memory target.
6267 *
6268 * Intel says that RSP is incremented before it's used in any effective
6269 * address calcuations. This means some serious extra annoyance here since
6270 * we decode and calculate the effective address in one step and like to
6271 * delay committing registers till everything is done.
6272 *
6273 * So, we'll decode and calculate the effective address twice. This will
6274 * require some recoding if turned into a recompiler.
6275 */
6276 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6277
6278#if 1 /* This can be compiled, optimize later if needed. */
6279 switch (pVCpu->iem.s.enmEffOpSize)
6280 {
6281 case IEMMODE_16BIT:
6282 IEM_MC_BEGIN(2, 0, 0, 0);
6283 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6286 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6287 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6288 IEM_MC_END();
6289 break;
6290
6291 case IEMMODE_32BIT:
6292 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6293 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6296 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6297 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6298 IEM_MC_END();
6299 break;
6300
6301 case IEMMODE_64BIT:
6302 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6303 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6306 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6307 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6308 IEM_MC_END();
6309 break;
6310
6311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6312 }
6313
6314#else
6315# ifndef TST_IEM_CHECK_MC
6316 /* Calc effective address with modified ESP. */
6317/** @todo testcase */
6318 RTGCPTR GCPtrEff;
6319 VBOXSTRICTRC rcStrict;
6320 switch (pVCpu->iem.s.enmEffOpSize)
6321 {
6322 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6323 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6324 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6326 }
6327 if (rcStrict != VINF_SUCCESS)
6328 return rcStrict;
6329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6330
6331 /* Perform the operation - this should be CImpl. */
6332 RTUINT64U TmpRsp;
6333 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6334 switch (pVCpu->iem.s.enmEffOpSize)
6335 {
6336 case IEMMODE_16BIT:
6337 {
6338 uint16_t u16Value;
6339 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6340 if (rcStrict == VINF_SUCCESS)
6341 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6342 break;
6343 }
6344
6345 case IEMMODE_32BIT:
6346 {
6347 uint32_t u32Value;
6348 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6349 if (rcStrict == VINF_SUCCESS)
6350 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6351 break;
6352 }
6353
6354 case IEMMODE_64BIT:
6355 {
6356 uint64_t u64Value;
6357 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6358 if (rcStrict == VINF_SUCCESS)
6359 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6360 break;
6361 }
6362
6363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6364 }
6365 if (rcStrict == VINF_SUCCESS)
6366 {
6367 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6368 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6369 }
6370 return rcStrict;
6371
6372# else
6373 return VERR_IEM_IPE_2;
6374# endif
6375#endif
6376}
6377
6378
6379/**
6380 * @opcode 0x8f
6381 */
6382FNIEMOP_DEF(iemOp_Grp1A__xop)
6383{
6384 /*
6385 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6386 * three byte VEX prefix, except that the mmmmm field cannot have the values
6387 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6388 */
6389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6390 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6391 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6392
6393 IEMOP_MNEMONIC(xop, "xop");
6394 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6395 {
6396 /** @todo Test when exctly the XOP conformance checks kick in during
6397 * instruction decoding and fetching (using \#PF). */
6398 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6399 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6400 if ( ( pVCpu->iem.s.fPrefixes
6401 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6402 == 0)
6403 {
6404 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6405 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6406 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6407 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6408 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6409 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6410 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6411 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6412 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6413
6414 /** @todo XOP: Just use new tables and decoders. */
6415 switch (bRm & 0x1f)
6416 {
6417 case 8: /* xop opcode map 8. */
6418 IEMOP_BITCH_ABOUT_STUB();
6419 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6420
6421 case 9: /* xop opcode map 9. */
6422 IEMOP_BITCH_ABOUT_STUB();
6423 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6424
6425 case 10: /* xop opcode map 10. */
6426 IEMOP_BITCH_ABOUT_STUB();
6427 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6428
6429 default:
6430 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6431 IEMOP_RAISE_INVALID_OPCODE_RET();
6432 }
6433 }
6434 else
6435 Log(("XOP: Invalid prefix mix!\n"));
6436 }
6437 else
6438 Log(("XOP: XOP support disabled!\n"));
6439 IEMOP_RAISE_INVALID_OPCODE_RET();
6440}
6441
6442
6443/**
6444 * Common 'xchg reg,rAX' helper.
6445 */
6446FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6447{
6448 iReg |= pVCpu->iem.s.uRexB;
6449 switch (pVCpu->iem.s.enmEffOpSize)
6450 {
6451 case IEMMODE_16BIT:
6452 IEM_MC_BEGIN(0, 2, 0, 0);
6453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6454 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6455 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6456 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6457 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6458 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6459 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6460 IEM_MC_ADVANCE_RIP_AND_FINISH();
6461 IEM_MC_END();
6462 break;
6463
6464 case IEMMODE_32BIT:
6465 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6467 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6468 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6469 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6470 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6471 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6472 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6473 IEM_MC_ADVANCE_RIP_AND_FINISH();
6474 IEM_MC_END();
6475 break;
6476
6477 case IEMMODE_64BIT:
6478 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6481 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6482 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6483 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6484 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6485 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6486 IEM_MC_ADVANCE_RIP_AND_FINISH();
6487 IEM_MC_END();
6488 break;
6489
6490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6491 }
6492}
6493
6494
6495/**
6496 * @opcode 0x90
6497 */
6498FNIEMOP_DEF(iemOp_nop)
6499{
6500 /* R8/R8D and RAX/EAX can be exchanged. */
6501 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6502 {
6503 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6504 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6505 }
6506
6507 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6508 {
6509 IEMOP_MNEMONIC(pause, "pause");
6510 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6511 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6512 if (!IEM_IS_IN_GUEST(pVCpu))
6513 { /* probable */ }
6514#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6515 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6516 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6517#endif
6518#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6519 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6520 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6521#endif
6522 }
6523 else
6524 IEMOP_MNEMONIC(nop, "nop");
6525 /** @todo testcase: lock nop; lock pause */
6526 IEM_MC_BEGIN(0, 0, 0, 0);
6527 IEMOP_HLP_DONE_DECODING();
6528 IEM_MC_ADVANCE_RIP_AND_FINISH();
6529 IEM_MC_END();
6530}
6531
6532
6533/**
6534 * @opcode 0x91
6535 */
6536FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6537{
6538 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6539 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6540}
6541
6542
6543/**
6544 * @opcode 0x92
6545 */
6546FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6547{
6548 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6549 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6550}
6551
6552
6553/**
6554 * @opcode 0x93
6555 */
6556FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6557{
6558 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6559 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6560}
6561
6562
6563/**
6564 * @opcode 0x94
6565 */
6566FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6567{
6568 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6569 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6570}
6571
6572
6573/**
6574 * @opcode 0x95
6575 */
6576FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6577{
6578 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6579 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6580}
6581
6582
6583/**
6584 * @opcode 0x96
6585 */
6586FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6587{
6588 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6589 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6590}
6591
6592
6593/**
6594 * @opcode 0x97
6595 */
6596FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6597{
6598 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6599 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6600}
6601
6602
6603/**
6604 * @opcode 0x98
6605 */
6606FNIEMOP_DEF(iemOp_cbw)
6607{
6608 switch (pVCpu->iem.s.enmEffOpSize)
6609 {
6610 case IEMMODE_16BIT:
6611 IEMOP_MNEMONIC(cbw, "cbw");
6612 IEM_MC_BEGIN(0, 1, 0, 0);
6613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6614 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6615 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6616 } IEM_MC_ELSE() {
6617 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6618 } IEM_MC_ENDIF();
6619 IEM_MC_ADVANCE_RIP_AND_FINISH();
6620 IEM_MC_END();
6621 break;
6622
6623 case IEMMODE_32BIT:
6624 IEMOP_MNEMONIC(cwde, "cwde");
6625 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6627 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6628 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6629 } IEM_MC_ELSE() {
6630 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6631 } IEM_MC_ENDIF();
6632 IEM_MC_ADVANCE_RIP_AND_FINISH();
6633 IEM_MC_END();
6634 break;
6635
6636 case IEMMODE_64BIT:
6637 IEMOP_MNEMONIC(cdqe, "cdqe");
6638 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6640 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6641 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6642 } IEM_MC_ELSE() {
6643 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6644 } IEM_MC_ENDIF();
6645 IEM_MC_ADVANCE_RIP_AND_FINISH();
6646 IEM_MC_END();
6647 break;
6648
6649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6650 }
6651}
6652
6653
6654/**
6655 * @opcode 0x99
6656 */
6657FNIEMOP_DEF(iemOp_cwd)
6658{
6659 switch (pVCpu->iem.s.enmEffOpSize)
6660 {
6661 case IEMMODE_16BIT:
6662 IEMOP_MNEMONIC(cwd, "cwd");
6663 IEM_MC_BEGIN(0, 1, 0, 0);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6666 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6667 } IEM_MC_ELSE() {
6668 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6669 } IEM_MC_ENDIF();
6670 IEM_MC_ADVANCE_RIP_AND_FINISH();
6671 IEM_MC_END();
6672 break;
6673
6674 case IEMMODE_32BIT:
6675 IEMOP_MNEMONIC(cdq, "cdq");
6676 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6678 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6679 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6680 } IEM_MC_ELSE() {
6681 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6682 } IEM_MC_ENDIF();
6683 IEM_MC_ADVANCE_RIP_AND_FINISH();
6684 IEM_MC_END();
6685 break;
6686
6687 case IEMMODE_64BIT:
6688 IEMOP_MNEMONIC(cqo, "cqo");
6689 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6691 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6692 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6693 } IEM_MC_ELSE() {
6694 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6695 } IEM_MC_ENDIF();
6696 IEM_MC_ADVANCE_RIP_AND_FINISH();
6697 IEM_MC_END();
6698 break;
6699
6700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6701 }
6702}
6703
6704
6705/**
6706 * @opcode 0x9a
6707 */
6708FNIEMOP_DEF(iemOp_call_Ap)
6709{
6710 IEMOP_MNEMONIC(call_Ap, "call Ap");
6711 IEMOP_HLP_NO_64BIT();
6712
6713 /* Decode the far pointer address and pass it on to the far call C implementation. */
6714 uint32_t off32Seg;
6715 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6716 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6717 else
6718 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6719 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6721 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6722 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6723 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6724 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6725}
6726
6727
6728/** Opcode 0x9b. (aka fwait) */
6729FNIEMOP_DEF(iemOp_wait)
6730{
6731 IEMOP_MNEMONIC(wait, "wait");
6732 IEM_MC_BEGIN(0, 0, 0, 0);
6733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6734 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6735 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6736 IEM_MC_ADVANCE_RIP_AND_FINISH();
6737 IEM_MC_END();
6738}
6739
6740
6741/**
6742 * @opcode 0x9c
6743 */
6744FNIEMOP_DEF(iemOp_pushf_Fv)
6745{
6746 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6749 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6750 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6751}
6752
6753
6754/**
6755 * @opcode 0x9d
6756 */
6757FNIEMOP_DEF(iemOp_popf_Fv)
6758{
6759 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6761 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6762 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6763 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6764 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6765}
6766
6767
6768/**
6769 * @opcode 0x9e
6770 * @opflmodify cf,pf,af,zf,sf
6771 */
6772FNIEMOP_DEF(iemOp_sahf)
6773{
6774 IEMOP_MNEMONIC(sahf, "sahf");
6775 if ( IEM_IS_64BIT_CODE(pVCpu)
6776 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6777 IEMOP_RAISE_INVALID_OPCODE_RET();
6778 IEM_MC_BEGIN(0, 2, 0, 0);
6779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6780 IEM_MC_LOCAL(uint32_t, u32Flags);
6781 IEM_MC_LOCAL(uint32_t, EFlags);
6782 IEM_MC_FETCH_EFLAGS(EFlags);
6783 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6784 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6785 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6786 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6787 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6788 IEM_MC_COMMIT_EFLAGS(EFlags);
6789 IEM_MC_ADVANCE_RIP_AND_FINISH();
6790 IEM_MC_END();
6791}
6792
6793
6794/**
6795 * @opcode 0x9f
6796 * @opfltest cf,pf,af,zf,sf
6797 */
6798FNIEMOP_DEF(iemOp_lahf)
6799{
6800 IEMOP_MNEMONIC(lahf, "lahf");
6801 if ( IEM_IS_64BIT_CODE(pVCpu)
6802 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6803 IEMOP_RAISE_INVALID_OPCODE_RET();
6804 IEM_MC_BEGIN(0, 1, 0, 0);
6805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6806 IEM_MC_LOCAL(uint8_t, u8Flags);
6807 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6808 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6809 IEM_MC_ADVANCE_RIP_AND_FINISH();
6810 IEM_MC_END();
6811}
6812
6813
6814/**
6815 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6816 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6817 * Will return/throw on failures.
6818 * @param a_GCPtrMemOff The variable to store the offset in.
6819 */
6820#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6821 do \
6822 { \
6823 switch (pVCpu->iem.s.enmEffAddrMode) \
6824 { \
6825 case IEMMODE_16BIT: \
6826 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6827 break; \
6828 case IEMMODE_32BIT: \
6829 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6830 break; \
6831 case IEMMODE_64BIT: \
6832 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6833 break; \
6834 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6835 } \
6836 } while (0)
6837
6838/**
6839 * @opcode 0xa0
6840 */
6841FNIEMOP_DEF(iemOp_mov_AL_Ob)
6842{
6843 /*
6844 * Get the offset.
6845 */
6846 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6847 RTGCPTR GCPtrMemOffDecode;
6848 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6849
6850 /*
6851 * Fetch AL.
6852 */
6853 IEM_MC_BEGIN(0, 2, 0, 0);
6854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6855 IEM_MC_LOCAL(uint8_t, u8Tmp);
6856 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6857 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6858 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6859 IEM_MC_ADVANCE_RIP_AND_FINISH();
6860 IEM_MC_END();
6861}
6862
6863
6864/**
6865 * @opcode 0xa1
6866 */
6867FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6868{
6869 /*
6870 * Get the offset.
6871 */
6872 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6873 RTGCPTR GCPtrMemOffDecode;
6874 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6875
6876 /*
6877 * Fetch rAX.
6878 */
6879 switch (pVCpu->iem.s.enmEffOpSize)
6880 {
6881 case IEMMODE_16BIT:
6882 IEM_MC_BEGIN(0, 2, 0, 0);
6883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6884 IEM_MC_LOCAL(uint16_t, u16Tmp);
6885 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6886 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6887 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6888 IEM_MC_ADVANCE_RIP_AND_FINISH();
6889 IEM_MC_END();
6890 break;
6891
6892 case IEMMODE_32BIT:
6893 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6895 IEM_MC_LOCAL(uint32_t, u32Tmp);
6896 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6897 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6898 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6899 IEM_MC_ADVANCE_RIP_AND_FINISH();
6900 IEM_MC_END();
6901 break;
6902
6903 case IEMMODE_64BIT:
6904 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6906 IEM_MC_LOCAL(uint64_t, u64Tmp);
6907 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6908 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6909 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6910 IEM_MC_ADVANCE_RIP_AND_FINISH();
6911 IEM_MC_END();
6912 break;
6913
6914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6915 }
6916}
6917
6918
6919/**
6920 * @opcode 0xa2
6921 */
6922FNIEMOP_DEF(iemOp_mov_Ob_AL)
6923{
6924 /*
6925 * Get the offset.
6926 */
6927 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6928 RTGCPTR GCPtrMemOffDecode;
6929 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6930
6931 /*
6932 * Store AL.
6933 */
6934 IEM_MC_BEGIN(0, 2, 0, 0);
6935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6936 IEM_MC_LOCAL(uint8_t, u8Tmp);
6937 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6938 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6939 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6940 IEM_MC_ADVANCE_RIP_AND_FINISH();
6941 IEM_MC_END();
6942}
6943
6944
6945/**
6946 * @opcode 0xa3
6947 */
6948FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6949{
6950 /*
6951 * Get the offset.
6952 */
6953 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6954 RTGCPTR GCPtrMemOffDecode;
6955 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6956
6957 /*
6958 * Store rAX.
6959 */
6960 switch (pVCpu->iem.s.enmEffOpSize)
6961 {
6962 case IEMMODE_16BIT:
6963 IEM_MC_BEGIN(0, 2, 0, 0);
6964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6965 IEM_MC_LOCAL(uint16_t, u16Tmp);
6966 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6967 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6968 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6969 IEM_MC_ADVANCE_RIP_AND_FINISH();
6970 IEM_MC_END();
6971 break;
6972
6973 case IEMMODE_32BIT:
6974 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6976 IEM_MC_LOCAL(uint32_t, u32Tmp);
6977 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6978 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6979 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6980 IEM_MC_ADVANCE_RIP_AND_FINISH();
6981 IEM_MC_END();
6982 break;
6983
6984 case IEMMODE_64BIT:
6985 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6987 IEM_MC_LOCAL(uint64_t, u64Tmp);
6988 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6989 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6990 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6991 IEM_MC_ADVANCE_RIP_AND_FINISH();
6992 IEM_MC_END();
6993 break;
6994
6995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6996 }
6997}
6998
6999/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
7000#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
7001 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7003 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7004 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7005 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7006 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7007 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7008 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7010 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7011 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7012 } IEM_MC_ELSE() { \
7013 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7014 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7015 } IEM_MC_ENDIF(); \
7016 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7017 IEM_MC_END() \
7018
7019/**
7020 * @opcode 0xa4
7021 * @opfltest df
7022 */
7023FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
7024{
7025 /*
7026 * Use the C implementation if a repeat prefix is encountered.
7027 */
7028 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7029 {
7030 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
7031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7032 switch (pVCpu->iem.s.enmEffAddrMode)
7033 {
7034 case IEMMODE_16BIT:
7035 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7036 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7037 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7038 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7039 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
7040 case IEMMODE_32BIT:
7041 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7042 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7043 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7044 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7045 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
7046 case IEMMODE_64BIT:
7047 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7048 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7049 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7050 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7051 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
7052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7053 }
7054 }
7055
7056 /*
7057 * Sharing case implementation with movs[wdq] below.
7058 */
7059 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
7060 switch (pVCpu->iem.s.enmEffAddrMode)
7061 {
7062 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7063 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7064 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
7065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7066 }
7067}
7068
7069
7070/**
7071 * @opcode 0xa5
7072 * @opfltest df
7073 */
7074FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
7075{
7076
7077 /*
7078 * Use the C implementation if a repeat prefix is encountered.
7079 */
7080 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7081 {
7082 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
7083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7084 switch (pVCpu->iem.s.enmEffOpSize)
7085 {
7086 case IEMMODE_16BIT:
7087 switch (pVCpu->iem.s.enmEffAddrMode)
7088 {
7089 case IEMMODE_16BIT:
7090 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7091 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7092 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7093 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7094 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
7095 case IEMMODE_32BIT:
7096 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7097 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7098 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7099 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7100 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
7101 case IEMMODE_64BIT:
7102 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7103 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7104 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7105 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7106 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
7107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7108 }
7109 break;
7110 case IEMMODE_32BIT:
7111 switch (pVCpu->iem.s.enmEffAddrMode)
7112 {
7113 case IEMMODE_16BIT:
7114 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7115 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7116 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7117 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7118 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
7119 case IEMMODE_32BIT:
7120 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7121 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7122 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7123 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7124 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
7125 case IEMMODE_64BIT:
7126 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7127 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7128 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7129 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7130 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
7131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7132 }
7133 case IEMMODE_64BIT:
7134 switch (pVCpu->iem.s.enmEffAddrMode)
7135 {
7136 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
7137 case IEMMODE_32BIT:
7138 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7139 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7140 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7141 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7142 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
7143 case IEMMODE_64BIT:
7144 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7145 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7146 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7147 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7148 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7150 }
7151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7152 }
7153 }
7154
7155 /*
7156 * Annoying double switch here.
7157 * Using ugly macro for implementing the cases, sharing it with movsb.
7158 */
7159 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7160 switch (pVCpu->iem.s.enmEffOpSize)
7161 {
7162 case IEMMODE_16BIT:
7163 switch (pVCpu->iem.s.enmEffAddrMode)
7164 {
7165 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7166 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7167 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7169 }
7170 break;
7171
7172 case IEMMODE_32BIT:
7173 switch (pVCpu->iem.s.enmEffAddrMode)
7174 {
7175 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7176 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7177 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7179 }
7180 break;
7181
7182 case IEMMODE_64BIT:
7183 switch (pVCpu->iem.s.enmEffAddrMode)
7184 {
7185 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7186 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7187 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7189 }
7190 break;
7191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7192 }
7193}
7194
7195#undef IEM_MOVS_CASE
7196
7197/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7198#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7199 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
7200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7201 \
7202 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7203 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7204 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7205 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7206 \
7207 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7208 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7209 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
7210 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7211 \
7212 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7213 IEM_MC_REF_EFLAGS(pEFlags); \
7214 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
7215 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
7216 \
7217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7218 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7219 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7220 } IEM_MC_ELSE() { \
7221 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7222 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7223 } IEM_MC_ENDIF(); \
7224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7225 IEM_MC_END() \
7226
7227/**
7228 * @opcode 0xa6
7229 * @opflclass arithmetic
7230 * @opfltest df
7231 */
7232FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7233{
7234
7235 /*
7236 * Use the C implementation if a repeat prefix is encountered.
7237 */
7238 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7239 {
7240 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7242 switch (pVCpu->iem.s.enmEffAddrMode)
7243 {
7244 case IEMMODE_16BIT:
7245 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7246 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7247 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7248 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7249 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7250 case IEMMODE_32BIT:
7251 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7252 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7253 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7254 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7255 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7256 case IEMMODE_64BIT:
7257 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7258 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7259 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7260 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7261 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7263 }
7264 }
7265 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7266 {
7267 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7269 switch (pVCpu->iem.s.enmEffAddrMode)
7270 {
7271 case IEMMODE_16BIT:
7272 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7273 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7274 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7275 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7276 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7277 case IEMMODE_32BIT:
7278 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7279 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7280 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7281 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7282 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7283 case IEMMODE_64BIT:
7284 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7285 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7286 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7287 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7288 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7290 }
7291 }
7292
7293 /*
7294 * Sharing case implementation with cmps[wdq] below.
7295 */
7296 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7297 switch (pVCpu->iem.s.enmEffAddrMode)
7298 {
7299 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7300 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7301 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7303 }
7304}
7305
7306
7307/**
7308 * @opcode 0xa7
7309 * @opflclass arithmetic
7310 * @opfltest df
7311 */
7312FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7313{
7314 /*
7315 * Use the C implementation if a repeat prefix is encountered.
7316 */
7317 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7318 {
7319 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7321 switch (pVCpu->iem.s.enmEffOpSize)
7322 {
7323 case IEMMODE_16BIT:
7324 switch (pVCpu->iem.s.enmEffAddrMode)
7325 {
7326 case IEMMODE_16BIT:
7327 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7328 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7329 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7330 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7331 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7332 case IEMMODE_32BIT:
7333 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7334 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7335 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7336 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7337 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7338 case IEMMODE_64BIT:
7339 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7340 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7341 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7342 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7343 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7345 }
7346 break;
7347 case IEMMODE_32BIT:
7348 switch (pVCpu->iem.s.enmEffAddrMode)
7349 {
7350 case IEMMODE_16BIT:
7351 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7352 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7353 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7354 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7355 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7356 case IEMMODE_32BIT:
7357 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7358 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7359 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7360 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7361 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7362 case IEMMODE_64BIT:
7363 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7364 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7365 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7366 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7367 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7369 }
7370 case IEMMODE_64BIT:
7371 switch (pVCpu->iem.s.enmEffAddrMode)
7372 {
7373 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7374 case IEMMODE_32BIT:
7375 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7376 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7377 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7378 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7379 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7380 case IEMMODE_64BIT:
7381 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7382 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7383 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7384 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7385 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7387 }
7388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7389 }
7390 }
7391
7392 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7393 {
7394 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7396 switch (pVCpu->iem.s.enmEffOpSize)
7397 {
7398 case IEMMODE_16BIT:
7399 switch (pVCpu->iem.s.enmEffAddrMode)
7400 {
7401 case IEMMODE_16BIT:
7402 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7403 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7404 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7405 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7406 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7407 case IEMMODE_32BIT:
7408 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7409 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7410 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7411 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7412 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7413 case IEMMODE_64BIT:
7414 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7415 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7416 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7417 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7418 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7420 }
7421 break;
7422 case IEMMODE_32BIT:
7423 switch (pVCpu->iem.s.enmEffAddrMode)
7424 {
7425 case IEMMODE_16BIT:
7426 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7427 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7428 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7429 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7430 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7431 case IEMMODE_32BIT:
7432 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7434 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7435 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7436 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7437 case IEMMODE_64BIT:
7438 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7439 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7440 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7441 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7442 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7444 }
7445 case IEMMODE_64BIT:
7446 switch (pVCpu->iem.s.enmEffAddrMode)
7447 {
7448 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7449 case IEMMODE_32BIT:
7450 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7451 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7452 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7454 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7455 case IEMMODE_64BIT:
7456 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7460 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7462 }
7463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7464 }
7465 }
7466
7467 /*
7468 * Annoying double switch here.
7469 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7470 */
7471 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7472 switch (pVCpu->iem.s.enmEffOpSize)
7473 {
7474 case IEMMODE_16BIT:
7475 switch (pVCpu->iem.s.enmEffAddrMode)
7476 {
7477 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7478 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7479 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7481 }
7482 break;
7483
7484 case IEMMODE_32BIT:
7485 switch (pVCpu->iem.s.enmEffAddrMode)
7486 {
7487 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7488 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7489 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7491 }
7492 break;
7493
7494 case IEMMODE_64BIT:
7495 switch (pVCpu->iem.s.enmEffAddrMode)
7496 {
7497 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7498 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7499 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7501 }
7502 break;
7503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7504 }
7505}
7506
7507#undef IEM_CMPS_CASE
7508
7509/**
7510 * @opcode 0xa8
7511 * @opflclass logical
7512 */
7513FNIEMOP_DEF(iemOp_test_AL_Ib)
7514{
7515 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7517 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7518}
7519
7520
7521/**
7522 * @opcode 0xa9
7523 * @opflclass logical
7524 */
7525FNIEMOP_DEF(iemOp_test_eAX_Iz)
7526{
7527 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7528 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7529 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7530}
7531
7532
7533/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7534#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7535 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7537 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7538 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7539 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7540 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7541 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7542 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7543 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7544 } IEM_MC_ELSE() { \
7545 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7546 } IEM_MC_ENDIF(); \
7547 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7548 IEM_MC_END() \
7549
7550/**
7551 * @opcode 0xaa
7552 */
7553FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7554{
7555 /*
7556 * Use the C implementation if a repeat prefix is encountered.
7557 */
7558 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7559 {
7560 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7562 switch (pVCpu->iem.s.enmEffAddrMode)
7563 {
7564 case IEMMODE_16BIT:
7565 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7566 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7567 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7568 iemCImpl_stos_al_m16);
7569 case IEMMODE_32BIT:
7570 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7571 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7572 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7573 iemCImpl_stos_al_m32);
7574 case IEMMODE_64BIT:
7575 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7576 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7577 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7578 iemCImpl_stos_al_m64);
7579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7580 }
7581 }
7582
7583 /*
7584 * Sharing case implementation with stos[wdq] below.
7585 */
7586 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7587 switch (pVCpu->iem.s.enmEffAddrMode)
7588 {
7589 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7590 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7591 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7593 }
7594}
7595
7596
7597/**
7598 * @opcode 0xab
7599 */
7600FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7601{
7602 /*
7603 * Use the C implementation if a repeat prefix is encountered.
7604 */
7605 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7606 {
7607 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7609 switch (pVCpu->iem.s.enmEffOpSize)
7610 {
7611 case IEMMODE_16BIT:
7612 switch (pVCpu->iem.s.enmEffAddrMode)
7613 {
7614 case IEMMODE_16BIT:
7615 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7616 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7617 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7618 iemCImpl_stos_ax_m16);
7619 case IEMMODE_32BIT:
7620 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7621 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7622 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7623 iemCImpl_stos_ax_m32);
7624 case IEMMODE_64BIT:
7625 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7626 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7627 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7628 iemCImpl_stos_ax_m64);
7629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7630 }
7631 break;
7632 case IEMMODE_32BIT:
7633 switch (pVCpu->iem.s.enmEffAddrMode)
7634 {
7635 case IEMMODE_16BIT:
7636 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7637 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7638 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7639 iemCImpl_stos_eax_m16);
7640 case IEMMODE_32BIT:
7641 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7642 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7643 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7644 iemCImpl_stos_eax_m32);
7645 case IEMMODE_64BIT:
7646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7647 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7648 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7649 iemCImpl_stos_eax_m64);
7650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7651 }
7652 case IEMMODE_64BIT:
7653 switch (pVCpu->iem.s.enmEffAddrMode)
7654 {
7655 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7656 case IEMMODE_32BIT:
7657 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7658 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7659 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7660 iemCImpl_stos_rax_m32);
7661 case IEMMODE_64BIT:
7662 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7663 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7664 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7665 iemCImpl_stos_rax_m64);
7666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7667 }
7668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7669 }
7670 }
7671
7672 /*
7673 * Annoying double switch here.
7674 * Using ugly macro for implementing the cases, sharing it with stosb.
7675 */
7676 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7677 switch (pVCpu->iem.s.enmEffOpSize)
7678 {
7679 case IEMMODE_16BIT:
7680 switch (pVCpu->iem.s.enmEffAddrMode)
7681 {
7682 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7683 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7684 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7686 }
7687 break;
7688
7689 case IEMMODE_32BIT:
7690 switch (pVCpu->iem.s.enmEffAddrMode)
7691 {
7692 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7693 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7694 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7696 }
7697 break;
7698
7699 case IEMMODE_64BIT:
7700 switch (pVCpu->iem.s.enmEffAddrMode)
7701 {
7702 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7703 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7704 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7706 }
7707 break;
7708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7709 }
7710}
7711
7712#undef IEM_STOS_CASE
7713
7714/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7715#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7716 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7718 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7719 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7720 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7721 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7722 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7724 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7725 } IEM_MC_ELSE() { \
7726 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7727 } IEM_MC_ENDIF(); \
7728 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7729 IEM_MC_END() \
7730
7731/**
7732 * @opcode 0xac
7733 * @opfltest df
7734 */
7735FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7736{
7737 /*
7738 * Use the C implementation if a repeat prefix is encountered.
7739 */
7740 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7741 {
7742 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7744 switch (pVCpu->iem.s.enmEffAddrMode)
7745 {
7746 case IEMMODE_16BIT:
7747 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7748 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7749 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7751 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7752 case IEMMODE_32BIT:
7753 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7754 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7755 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7756 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7757 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7758 case IEMMODE_64BIT:
7759 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7760 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7761 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7762 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7763 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7765 }
7766 }
7767
7768 /*
7769 * Sharing case implementation with stos[wdq] below.
7770 */
7771 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7772 switch (pVCpu->iem.s.enmEffAddrMode)
7773 {
7774 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7775 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7776 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7778 }
7779}
7780
7781
7782/**
7783 * @opcode 0xad
7784 * @opfltest df
7785 */
7786FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7787{
7788 /*
7789 * Use the C implementation if a repeat prefix is encountered.
7790 */
7791 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7792 {
7793 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7795 switch (pVCpu->iem.s.enmEffOpSize)
7796 {
7797 case IEMMODE_16BIT:
7798 switch (pVCpu->iem.s.enmEffAddrMode)
7799 {
7800 case IEMMODE_16BIT:
7801 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7802 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7804 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7805 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7806 case IEMMODE_32BIT:
7807 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7808 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7809 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7811 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7812 case IEMMODE_64BIT:
7813 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7814 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7817 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7819 }
7820 break;
7821 case IEMMODE_32BIT:
7822 switch (pVCpu->iem.s.enmEffAddrMode)
7823 {
7824 case IEMMODE_16BIT:
7825 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7826 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7829 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7830 case IEMMODE_32BIT:
7831 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7832 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7833 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7835 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7836 case IEMMODE_64BIT:
7837 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7841 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7843 }
7844 case IEMMODE_64BIT:
7845 switch (pVCpu->iem.s.enmEffAddrMode)
7846 {
7847 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7848 case IEMMODE_32BIT:
7849 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7850 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7851 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7852 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7853 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7854 case IEMMODE_64BIT:
7855 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7856 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7857 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7858 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7859 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7861 }
7862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7863 }
7864 }
7865
7866 /*
7867 * Annoying double switch here.
7868 * Using ugly macro for implementing the cases, sharing it with lodsb.
7869 */
7870 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7871 switch (pVCpu->iem.s.enmEffOpSize)
7872 {
7873 case IEMMODE_16BIT:
7874 switch (pVCpu->iem.s.enmEffAddrMode)
7875 {
7876 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7877 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7878 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7880 }
7881 break;
7882
7883 case IEMMODE_32BIT:
7884 switch (pVCpu->iem.s.enmEffAddrMode)
7885 {
7886 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7887 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7888 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7890 }
7891 break;
7892
7893 case IEMMODE_64BIT:
7894 switch (pVCpu->iem.s.enmEffAddrMode)
7895 {
7896 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7897 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7898 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7900 }
7901 break;
7902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7903 }
7904}
7905
7906#undef IEM_LODS_CASE
7907
7908/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7909#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7910 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7912 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7913 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7914 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7915 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7916 \
7917 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7918 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7919 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7920 IEM_MC_REF_EFLAGS(pEFlags); \
7921 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7922 \
7923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7924 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7925 } IEM_MC_ELSE() { \
7926 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7927 } IEM_MC_ENDIF(); \
7928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7929 IEM_MC_END();
7930
7931/**
7932 * @opcode 0xae
7933 * @opflclass arithmetic
7934 * @opfltest df
7935 */
7936FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7937{
7938 /*
7939 * Use the C implementation if a repeat prefix is encountered.
7940 */
7941 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7942 {
7943 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7945 switch (pVCpu->iem.s.enmEffAddrMode)
7946 {
7947 case IEMMODE_16BIT:
7948 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7949 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7950 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7951 iemCImpl_repe_scas_al_m16);
7952 case IEMMODE_32BIT:
7953 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7954 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7955 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7956 iemCImpl_repe_scas_al_m32);
7957 case IEMMODE_64BIT:
7958 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7959 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7960 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7961 iemCImpl_repe_scas_al_m64);
7962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7963 }
7964 }
7965 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7966 {
7967 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969 switch (pVCpu->iem.s.enmEffAddrMode)
7970 {
7971 case IEMMODE_16BIT:
7972 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7973 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7974 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7975 iemCImpl_repne_scas_al_m16);
7976 case IEMMODE_32BIT:
7977 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7978 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7979 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7980 iemCImpl_repne_scas_al_m32);
7981 case IEMMODE_64BIT:
7982 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7983 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7985 iemCImpl_repne_scas_al_m64);
7986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7987 }
7988 }
7989
7990 /*
7991 * Sharing case implementation with stos[wdq] below.
7992 */
7993 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7994 switch (pVCpu->iem.s.enmEffAddrMode)
7995 {
7996 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7997 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7998 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8000 }
8001}
8002
8003
8004/**
8005 * @opcode 0xaf
8006 * @opflclass arithmetic
8007 * @opfltest df
8008 */
8009FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
8010{
8011 /*
8012 * Use the C implementation if a repeat prefix is encountered.
8013 */
8014 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
8015 {
8016 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
8017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8018 switch (pVCpu->iem.s.enmEffOpSize)
8019 {
8020 case IEMMODE_16BIT:
8021 switch (pVCpu->iem.s.enmEffAddrMode)
8022 {
8023 case IEMMODE_16BIT:
8024 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8025 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8026 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8027 iemCImpl_repe_scas_ax_m16);
8028 case IEMMODE_32BIT:
8029 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8030 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8031 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8032 iemCImpl_repe_scas_ax_m32);
8033 case IEMMODE_64BIT:
8034 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8035 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8036 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8037 iemCImpl_repe_scas_ax_m64);
8038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8039 }
8040 break;
8041 case IEMMODE_32BIT:
8042 switch (pVCpu->iem.s.enmEffAddrMode)
8043 {
8044 case IEMMODE_16BIT:
8045 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8046 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8047 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8048 iemCImpl_repe_scas_eax_m16);
8049 case IEMMODE_32BIT:
8050 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8051 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8052 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8053 iemCImpl_repe_scas_eax_m32);
8054 case IEMMODE_64BIT:
8055 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8056 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8057 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8058 iemCImpl_repe_scas_eax_m64);
8059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8060 }
8061 case IEMMODE_64BIT:
8062 switch (pVCpu->iem.s.enmEffAddrMode)
8063 {
8064 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
8065 case IEMMODE_32BIT:
8066 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8067 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8068 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8069 iemCImpl_repe_scas_rax_m32);
8070 case IEMMODE_64BIT:
8071 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8072 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8073 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8074 iemCImpl_repe_scas_rax_m64);
8075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8076 }
8077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8078 }
8079 }
8080 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
8081 {
8082 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
8083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8084 switch (pVCpu->iem.s.enmEffOpSize)
8085 {
8086 case IEMMODE_16BIT:
8087 switch (pVCpu->iem.s.enmEffAddrMode)
8088 {
8089 case IEMMODE_16BIT:
8090 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8091 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8092 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8093 iemCImpl_repne_scas_ax_m16);
8094 case IEMMODE_32BIT:
8095 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8096 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8097 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8098 iemCImpl_repne_scas_ax_m32);
8099 case IEMMODE_64BIT:
8100 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8102 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8103 iemCImpl_repne_scas_ax_m64);
8104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8105 }
8106 break;
8107 case IEMMODE_32BIT:
8108 switch (pVCpu->iem.s.enmEffAddrMode)
8109 {
8110 case IEMMODE_16BIT:
8111 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8112 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8113 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8114 iemCImpl_repne_scas_eax_m16);
8115 case IEMMODE_32BIT:
8116 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8117 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8118 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8119 iemCImpl_repne_scas_eax_m32);
8120 case IEMMODE_64BIT:
8121 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8122 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8123 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8124 iemCImpl_repne_scas_eax_m64);
8125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8126 }
8127 case IEMMODE_64BIT:
8128 switch (pVCpu->iem.s.enmEffAddrMode)
8129 {
8130 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
8131 case IEMMODE_32BIT:
8132 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8135 iemCImpl_repne_scas_rax_m32);
8136 case IEMMODE_64BIT:
8137 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8138 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8139 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8140 iemCImpl_repne_scas_rax_m64);
8141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8142 }
8143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8144 }
8145 }
8146
8147 /*
8148 * Annoying double switch here.
8149 * Using ugly macro for implementing the cases, sharing it with scasb.
8150 */
8151 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8152 switch (pVCpu->iem.s.enmEffOpSize)
8153 {
8154 case IEMMODE_16BIT:
8155 switch (pVCpu->iem.s.enmEffAddrMode)
8156 {
8157 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8158 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8159 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8161 }
8162 break;
8163
8164 case IEMMODE_32BIT:
8165 switch (pVCpu->iem.s.enmEffAddrMode)
8166 {
8167 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8168 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8169 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8171 }
8172 break;
8173
8174 case IEMMODE_64BIT:
8175 switch (pVCpu->iem.s.enmEffAddrMode)
8176 {
8177 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8178 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8179 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8181 }
8182 break;
8183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8184 }
8185}
8186
8187#undef IEM_SCAS_CASE
8188
8189/**
8190 * Common 'mov r8, imm8' helper.
8191 */
8192FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8193{
8194 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8195 IEM_MC_BEGIN(0, 0, 0, 0);
8196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8197 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8198 IEM_MC_ADVANCE_RIP_AND_FINISH();
8199 IEM_MC_END();
8200}
8201
8202
8203/**
8204 * @opcode 0xb0
8205 */
8206FNIEMOP_DEF(iemOp_mov_AL_Ib)
8207{
8208 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8209 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8210}
8211
8212
8213/**
8214 * @opcode 0xb1
8215 */
8216FNIEMOP_DEF(iemOp_CL_Ib)
8217{
8218 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8219 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8220}
8221
8222
8223/**
8224 * @opcode 0xb2
8225 */
8226FNIEMOP_DEF(iemOp_DL_Ib)
8227{
8228 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8229 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8230}
8231
8232
8233/**
8234 * @opcode 0xb3
8235 */
8236FNIEMOP_DEF(iemOp_BL_Ib)
8237{
8238 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8239 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8240}
8241
8242
8243/**
8244 * @opcode 0xb4
8245 */
8246FNIEMOP_DEF(iemOp_mov_AH_Ib)
8247{
8248 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8249 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8250}
8251
8252
8253/**
8254 * @opcode 0xb5
8255 */
8256FNIEMOP_DEF(iemOp_CH_Ib)
8257{
8258 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8259 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8260}
8261
8262
8263/**
8264 * @opcode 0xb6
8265 */
8266FNIEMOP_DEF(iemOp_DH_Ib)
8267{
8268 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8269 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8270}
8271
8272
8273/**
8274 * @opcode 0xb7
8275 */
8276FNIEMOP_DEF(iemOp_BH_Ib)
8277{
8278 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8279 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8280}
8281
8282
8283/**
8284 * Common 'mov regX,immX' helper.
8285 */
8286FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8287{
8288 switch (pVCpu->iem.s.enmEffOpSize)
8289 {
8290 case IEMMODE_16BIT:
8291 IEM_MC_BEGIN(0, 0, 0, 0);
8292 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8294 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8295 IEM_MC_ADVANCE_RIP_AND_FINISH();
8296 IEM_MC_END();
8297 break;
8298
8299 case IEMMODE_32BIT:
8300 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8301 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8303 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8304 IEM_MC_ADVANCE_RIP_AND_FINISH();
8305 IEM_MC_END();
8306 break;
8307
8308 case IEMMODE_64BIT:
8309 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8310 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8312 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8313 IEM_MC_ADVANCE_RIP_AND_FINISH();
8314 IEM_MC_END();
8315 break;
8316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8317 }
8318}
8319
8320
8321/**
8322 * @opcode 0xb8
8323 */
8324FNIEMOP_DEF(iemOp_eAX_Iv)
8325{
8326 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8327 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8328}
8329
8330
8331/**
8332 * @opcode 0xb9
8333 */
8334FNIEMOP_DEF(iemOp_eCX_Iv)
8335{
8336 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8337 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8338}
8339
8340
8341/**
8342 * @opcode 0xba
8343 */
8344FNIEMOP_DEF(iemOp_eDX_Iv)
8345{
8346 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8347 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8348}
8349
8350
8351/**
8352 * @opcode 0xbb
8353 */
8354FNIEMOP_DEF(iemOp_eBX_Iv)
8355{
8356 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8357 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8358}
8359
8360
8361/**
8362 * @opcode 0xbc
8363 */
8364FNIEMOP_DEF(iemOp_eSP_Iv)
8365{
8366 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8367 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8368}
8369
8370
8371/**
8372 * @opcode 0xbd
8373 */
8374FNIEMOP_DEF(iemOp_eBP_Iv)
8375{
8376 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8377 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8378}
8379
8380
8381/**
8382 * @opcode 0xbe
8383 */
8384FNIEMOP_DEF(iemOp_eSI_Iv)
8385{
8386 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8387 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8388}
8389
8390
8391/**
8392 * @opcode 0xbf
8393 */
8394FNIEMOP_DEF(iemOp_eDI_Iv)
8395{
8396 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8397 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8398}
8399
8400
8401/**
8402 * @opcode 0xc0
8403 */
8404FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8405{
8406 IEMOP_HLP_MIN_186();
8407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8408
8409 /* Need to use a body macro here since the EFLAGS behaviour differs between
8410 the shifts, rotates and rotate w/ carry. Sigh. */
8411#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8412 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8413 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8414 { \
8415 /* register */ \
8416 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8417 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8419 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8420 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8421 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8422 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8423 IEM_MC_REF_EFLAGS(pEFlags); \
8424 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8425 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8426 IEM_MC_END(); \
8427 } \
8428 else \
8429 { \
8430 /* memory */ \
8431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8434 \
8435 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8437 \
8438 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8439 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8440 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8441 \
8442 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8443 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8444 IEM_MC_FETCH_EFLAGS(EFlags); \
8445 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8446 \
8447 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8448 IEM_MC_COMMIT_EFLAGS(EFlags); \
8449 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8450 IEM_MC_END(); \
8451 } (void)0
8452
8453 switch (IEM_GET_MODRM_REG_8(bRm))
8454 {
8455 /**
8456 * @opdone
8457 * @opmaps grp2_c0
8458 * @opcode /0
8459 * @opflclass rotate_count
8460 */
8461 case 0:
8462 {
8463 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8464 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8465 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8466 break;
8467 }
8468 /**
8469 * @opdone
8470 * @opmaps grp2_c0
8471 * @opcode /1
8472 * @opflclass rotate_count
8473 */
8474 case 1:
8475 {
8476 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8477 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8478 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8479 break;
8480 }
8481 /**
8482 * @opdone
8483 * @opmaps grp2_c0
8484 * @opcode /2
8485 * @opflclass rotate_carry_count
8486 */
8487 case 2:
8488 {
8489 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8490 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8491 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8492 break;
8493 }
8494 /**
8495 * @opdone
8496 * @opmaps grp2_c0
8497 * @opcode /3
8498 * @opflclass rotate_carry_count
8499 */
8500 case 3:
8501 {
8502 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8503 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8504 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8505 break;
8506 }
8507 /**
8508 * @opdone
8509 * @opmaps grp2_c0
8510 * @opcode /4
8511 * @opflclass shift_count
8512 */
8513 case 4:
8514 {
8515 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8517 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8518 break;
8519 }
8520 /**
8521 * @opdone
8522 * @opmaps grp2_c0
8523 * @opcode /5
8524 * @opflclass shift_count
8525 */
8526 case 5:
8527 {
8528 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8530 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8531 break;
8532 }
8533 /**
8534 * @opdone
8535 * @opmaps grp2_c0
8536 * @opcode /7
8537 * @opflclass shift_count
8538 */
8539 case 7:
8540 {
8541 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8542 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8543 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8544 break;
8545 }
8546
8547 /** @opdone */
8548 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8549 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8550 }
8551#undef GRP2_BODY_Eb_Ib
8552}
8553
8554
8555/* Need to use a body macro here since the EFLAGS behaviour differs between
8556 the shifts, rotates and rotate w/ carry. Sigh. */
8557#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8558 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8559 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8560 { \
8561 /* register */ \
8562 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8563 switch (pVCpu->iem.s.enmEffOpSize) \
8564 { \
8565 case IEMMODE_16BIT: \
8566 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8568 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8569 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8570 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8571 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8572 IEM_MC_REF_EFLAGS(pEFlags); \
8573 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8574 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8575 IEM_MC_END(); \
8576 break; \
8577 \
8578 case IEMMODE_32BIT: \
8579 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8581 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8582 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8583 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8584 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8585 IEM_MC_REF_EFLAGS(pEFlags); \
8586 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8587 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8588 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8589 IEM_MC_END(); \
8590 break; \
8591 \
8592 case IEMMODE_64BIT: \
8593 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8595 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8596 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8597 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8598 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8599 IEM_MC_REF_EFLAGS(pEFlags); \
8600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8601 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8602 IEM_MC_END(); \
8603 break; \
8604 \
8605 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8606 } \
8607 } \
8608 else \
8609 { \
8610 /* memory */ \
8611 switch (pVCpu->iem.s.enmEffOpSize) \
8612 { \
8613 case IEMMODE_16BIT: \
8614 IEM_MC_BEGIN(3, 3, 0, 0); \
8615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8617 \
8618 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8620 \
8621 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8622 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8623 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8624 \
8625 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8626 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8627 IEM_MC_FETCH_EFLAGS(EFlags); \
8628 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8629 \
8630 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8631 IEM_MC_COMMIT_EFLAGS(EFlags); \
8632 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8633 IEM_MC_END(); \
8634 break; \
8635 \
8636 case IEMMODE_32BIT: \
8637 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8640 \
8641 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8643 \
8644 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8645 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8646 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8647 \
8648 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8649 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8650 IEM_MC_FETCH_EFLAGS(EFlags); \
8651 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8652 \
8653 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8654 IEM_MC_COMMIT_EFLAGS(EFlags); \
8655 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8656 IEM_MC_END(); \
8657 break; \
8658 \
8659 case IEMMODE_64BIT: \
8660 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8663 \
8664 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8666 \
8667 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8668 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8669 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8670 \
8671 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8672 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8673 IEM_MC_FETCH_EFLAGS(EFlags); \
8674 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8675 \
8676 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8677 IEM_MC_COMMIT_EFLAGS(EFlags); \
8678 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8679 IEM_MC_END(); \
8680 break; \
8681 \
8682 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8683 } \
8684 } (void)0
8685
8686/**
8687 * @opmaps grp2_c1
8688 * @opcode /0
8689 * @opflclass rotate_count
8690 */
8691FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8692{
8693 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8694 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8695}
8696
8697
8698/**
8699 * @opmaps grp2_c1
8700 * @opcode /1
8701 * @opflclass rotate_count
8702 */
8703FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8704{
8705 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8706 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8707}
8708
8709
8710/**
8711 * @opmaps grp2_c1
8712 * @opcode /2
8713 * @opflclass rotate_carry_count
8714 */
8715FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8716{
8717 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8718 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8719}
8720
8721
8722/**
8723 * @opmaps grp2_c1
8724 * @opcode /3
8725 * @opflclass rotate_carry_count
8726 */
8727FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8728{
8729 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8730 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8731}
8732
8733
8734/**
8735 * @opmaps grp2_c1
8736 * @opcode /4
8737 * @opflclass shift_count
8738 */
8739FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8740{
8741 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8742 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8743}
8744
8745
8746/**
8747 * @opmaps grp2_c1
8748 * @opcode /5
8749 * @opflclass shift_count
8750 */
8751FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8752{
8753 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8754 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8755}
8756
8757
8758/**
8759 * @opmaps grp2_c1
8760 * @opcode /7
8761 * @opflclass shift_count
8762 */
8763FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8764{
8765 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8766 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8767}
8768
8769#undef GRP2_BODY_Ev_Ib
8770
8771/**
8772 * @opcode 0xc1
8773 */
8774FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8775{
8776 IEMOP_HLP_MIN_186();
8777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8778
8779 switch (IEM_GET_MODRM_REG_8(bRm))
8780 {
8781 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8782 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8783 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8784 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8785 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8786 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8787 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
8788 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8790 }
8791}
8792
8793
8794/**
8795 * @opcode 0xc2
8796 */
8797FNIEMOP_DEF(iemOp_retn_Iw)
8798{
8799 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8800 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8801 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8803 switch (pVCpu->iem.s.enmEffOpSize)
8804 {
8805 case IEMMODE_16BIT:
8806 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8807 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8808 case IEMMODE_32BIT:
8809 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8810 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8811 case IEMMODE_64BIT:
8812 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8813 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8815 }
8816}
8817
8818
8819/**
8820 * @opcode 0xc3
8821 */
8822FNIEMOP_DEF(iemOp_retn)
8823{
8824 IEMOP_MNEMONIC(retn, "retn");
8825 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8827 switch (pVCpu->iem.s.enmEffOpSize)
8828 {
8829 case IEMMODE_16BIT:
8830 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8832 case IEMMODE_32BIT:
8833 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8834 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8835 case IEMMODE_64BIT:
8836 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8837 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8839 }
8840}
8841
8842
8843/**
8844 * @opcode 0xc4
8845 */
8846FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8847{
8848 /* The LDS instruction is invalid 64-bit mode. In legacy and
8849 compatability mode it is invalid with MOD=3.
8850 The use as a VEX prefix is made possible by assigning the inverted
8851 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8852 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8854 if ( IEM_IS_64BIT_CODE(pVCpu)
8855 || IEM_IS_MODRM_REG_MODE(bRm) )
8856 {
8857 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8858 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8859 {
8860 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8861 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8862 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8863 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8864 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8865 if (IEM_IS_64BIT_CODE(pVCpu))
8866 {
8867#if 1
8868 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
8869 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
8870 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
8871 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
8872#else
8873 if (bVex2 & 0x80 /* VEX.W */)
8874 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8875 if (~bRm & 0x20 /* VEX.~B */)
8876 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
8877 if (~bRm & 0x40 /* VEX.~X */)
8878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
8879 if (~bRm & 0x80 /* VEX.~R */)
8880 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
8881#endif
8882 }
8883 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8884 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8885 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8886 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8887 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8888 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8889
8890 switch (bRm & 0x1f)
8891 {
8892 case 1: /* 0x0f lead opcode byte. */
8893#ifdef IEM_WITH_VEX
8894 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8895#else
8896 IEMOP_BITCH_ABOUT_STUB();
8897 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8898#endif
8899
8900 case 2: /* 0x0f 0x38 lead opcode bytes. */
8901#ifdef IEM_WITH_VEX
8902 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8903#else
8904 IEMOP_BITCH_ABOUT_STUB();
8905 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8906#endif
8907
8908 case 3: /* 0x0f 0x3a lead opcode bytes. */
8909#ifdef IEM_WITH_VEX
8910 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8911#else
8912 IEMOP_BITCH_ABOUT_STUB();
8913 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8914#endif
8915
8916 default:
8917 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8918 IEMOP_RAISE_INVALID_OPCODE_RET();
8919 }
8920 }
8921 Log(("VEX3: VEX support disabled!\n"));
8922 IEMOP_RAISE_INVALID_OPCODE_RET();
8923 }
8924
8925 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8926 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8927}
8928
8929
8930/**
8931 * @opcode 0xc5
8932 */
8933FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8934{
8935 /* The LES instruction is invalid 64-bit mode. In legacy and
8936 compatability mode it is invalid with MOD=3.
8937 The use as a VEX prefix is made possible by assigning the inverted
8938 REX.R to the top MOD bit, and the top bit in the inverted register
8939 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8940 to accessing registers 0..7 in this VEX form. */
8941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8942 if ( IEM_IS_64BIT_CODE(pVCpu)
8943 || IEM_IS_MODRM_REG_MODE(bRm))
8944 {
8945 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8946 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8947 {
8948 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8949 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8950 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8951 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8952 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
8953 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
8954 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8955 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8956 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8957 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8958
8959#ifdef IEM_WITH_VEX
8960 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8961#else
8962 IEMOP_BITCH_ABOUT_STUB();
8963 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8964#endif
8965 }
8966
8967 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8968 Log(("VEX2: VEX support disabled!\n"));
8969 IEMOP_RAISE_INVALID_OPCODE_RET();
8970 }
8971
8972 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8973 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8974}
8975
8976
8977/**
8978 * @opcode 0xc6
8979 */
8980FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8981{
8982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8983 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8984 IEMOP_RAISE_INVALID_OPCODE_RET();
8985 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8986
8987 if (IEM_IS_MODRM_REG_MODE(bRm))
8988 {
8989 /* register access */
8990 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8991 IEM_MC_BEGIN(0, 0, 0, 0);
8992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8993 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8994 IEM_MC_ADVANCE_RIP_AND_FINISH();
8995 IEM_MC_END();
8996 }
8997 else
8998 {
8999 /* memory access. */
9000 IEM_MC_BEGIN(0, 1, 0, 0);
9001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9003 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9005 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
9006 IEM_MC_ADVANCE_RIP_AND_FINISH();
9007 IEM_MC_END();
9008 }
9009}
9010
9011
9012/**
9013 * @opcode 0xc7
9014 */
9015FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
9016{
9017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9018 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
9019 IEMOP_RAISE_INVALID_OPCODE_RET();
9020 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
9021
9022 if (IEM_IS_MODRM_REG_MODE(bRm))
9023 {
9024 /* register access */
9025 switch (pVCpu->iem.s.enmEffOpSize)
9026 {
9027 case IEMMODE_16BIT:
9028 IEM_MC_BEGIN(0, 0, 0, 0);
9029 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9031 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
9032 IEM_MC_ADVANCE_RIP_AND_FINISH();
9033 IEM_MC_END();
9034 break;
9035
9036 case IEMMODE_32BIT:
9037 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
9038 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9040 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
9041 IEM_MC_ADVANCE_RIP_AND_FINISH();
9042 IEM_MC_END();
9043 break;
9044
9045 case IEMMODE_64BIT:
9046 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
9047 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9049 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
9050 IEM_MC_ADVANCE_RIP_AND_FINISH();
9051 IEM_MC_END();
9052 break;
9053
9054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9055 }
9056 }
9057 else
9058 {
9059 /* memory access. */
9060 switch (pVCpu->iem.s.enmEffOpSize)
9061 {
9062 case IEMMODE_16BIT:
9063 IEM_MC_BEGIN(0, 1, 0, 0);
9064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9066 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9068 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
9069 IEM_MC_ADVANCE_RIP_AND_FINISH();
9070 IEM_MC_END();
9071 break;
9072
9073 case IEMMODE_32BIT:
9074 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
9075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9077 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9079 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
9080 IEM_MC_ADVANCE_RIP_AND_FINISH();
9081 IEM_MC_END();
9082 break;
9083
9084 case IEMMODE_64BIT:
9085 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
9086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9088 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9090 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
9091 IEM_MC_ADVANCE_RIP_AND_FINISH();
9092 IEM_MC_END();
9093 break;
9094
9095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9096 }
9097 }
9098}
9099
9100
9101
9102
9103/**
9104 * @opcode 0xc8
9105 */
9106FNIEMOP_DEF(iemOp_enter_Iw_Ib)
9107{
9108 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
9109 IEMOP_HLP_MIN_186();
9110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9111 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
9112 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
9113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9114 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
9115 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9116 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9117 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
9118}
9119
9120
9121/**
9122 * @opcode 0xc9
9123 */
9124FNIEMOP_DEF(iemOp_leave)
9125{
9126 IEMOP_MNEMONIC(leave, "leave");
9127 IEMOP_HLP_MIN_186();
9128 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9130 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
9131 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9132 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9133 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
9134}
9135
9136
9137/**
9138 * @opcode 0xca
9139 */
9140FNIEMOP_DEF(iemOp_retf_Iw)
9141{
9142 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
9143 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9145 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9146 | IEM_CIMPL_F_MODE,
9147 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9148 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9149 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9150 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9151 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9152 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9153 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9154 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9155 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9156 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9157 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9158 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9159 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9160 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9161 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9162 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9163 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9164 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9165}
9166
9167
9168/**
9169 * @opcode 0xcb
9170 */
9171FNIEMOP_DEF(iemOp_retf)
9172{
9173 IEMOP_MNEMONIC(retf, "retf");
9174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9175 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9176 | IEM_CIMPL_F_MODE,
9177 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9178 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9179 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9180 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9181 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9182 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9183 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9184 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9185 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9186 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9187 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9188 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9189 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9190 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9191 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9192 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9193 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9194 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9195}
9196
9197
9198/**
9199 * @opcode 0xcc
9200 */
9201FNIEMOP_DEF(iemOp_int3)
9202{
9203 IEMOP_MNEMONIC(int3, "int3");
9204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9205 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9206 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9207 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9208}
9209
9210
9211/**
9212 * @opcode 0xcd
9213 */
9214FNIEMOP_DEF(iemOp_int_Ib)
9215{
9216 IEMOP_MNEMONIC(int_Ib, "int Ib");
9217 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9219 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9220 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9221 iemCImpl_int, u8Int, IEMINT_INTN);
9222 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9223}
9224
9225
9226/**
9227 * @opcode 0xce
9228 */
9229FNIEMOP_DEF(iemOp_into)
9230{
9231 IEMOP_MNEMONIC(into, "into");
9232 IEMOP_HLP_NO_64BIT();
9233 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9234 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9235 UINT64_MAX,
9236 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9237 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9238}
9239
9240
9241/**
9242 * @opcode 0xcf
9243 */
9244FNIEMOP_DEF(iemOp_iret)
9245{
9246 IEMOP_MNEMONIC(iret, "iret");
9247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9248 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9249 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9250 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9251 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9252 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9253 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9254 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9255 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9256 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9257 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9258 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9259 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9260 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9261 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9262 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9263 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9264 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9265 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9266 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9267 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9268 /* Segment registers are sanitized when returning to an outer ring, or fully
9269 reloaded when returning to v86 mode. Thus the large flush list above. */
9270}
9271
9272
9273/**
9274 * @opcode 0xd0
9275 */
9276FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9277{
9278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9279
9280 /* Need to use a body macro here since the EFLAGS behaviour differs between
9281 the shifts, rotates and rotate w/ carry. Sigh. */
9282#define GRP2_BODY_Eb_1(a_pImplExpr) \
9283 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9284 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9285 { \
9286 /* register */ \
9287 IEM_MC_BEGIN(3, 0, 0, 0); \
9288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9289 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9290 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9291 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9292 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9293 IEM_MC_REF_EFLAGS(pEFlags); \
9294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9295 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9296 IEM_MC_END(); \
9297 } \
9298 else \
9299 { \
9300 /* memory */ \
9301 IEM_MC_BEGIN(3, 3, 0, 0); \
9302 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9303 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9304 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9306 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9307 \
9308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9310 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9311 IEM_MC_FETCH_EFLAGS(EFlags); \
9312 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9313 \
9314 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9315 IEM_MC_COMMIT_EFLAGS(EFlags); \
9316 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9317 IEM_MC_END(); \
9318 } (void)0
9319
9320 switch (IEM_GET_MODRM_REG_8(bRm))
9321 {
9322 /**
9323 * @opdone
9324 * @opmaps grp2_d0
9325 * @opcode /0
9326 * @opflclass rotate_1
9327 */
9328 case 0:
9329 {
9330 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9331 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9332 break;
9333 }
9334 /**
9335 * @opdone
9336 * @opmaps grp2_d0
9337 * @opcode /1
9338 * @opflclass rotate_1
9339 */
9340 case 1:
9341 {
9342 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9343 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9344 break;
9345 }
9346 /**
9347 * @opdone
9348 * @opmaps grp2_d0
9349 * @opcode /2
9350 * @opflclass rotate_carry_1
9351 */
9352 case 2:
9353 {
9354 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9355 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9356 break;
9357 }
9358 /**
9359 * @opdone
9360 * @opmaps grp2_d0
9361 * @opcode /3
9362 * @opflclass rotate_carry_1
9363 */
9364 case 3:
9365 {
9366 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9367 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9368 break;
9369 }
9370 /**
9371 * @opdone
9372 * @opmaps grp2_d0
9373 * @opcode /4
9374 * @opflclass shift_1
9375 */
9376 case 4:
9377 {
9378 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9379 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9380 break;
9381 }
9382 /**
9383 * @opdone
9384 * @opmaps grp2_d0
9385 * @opcode /5
9386 * @opflclass shift_1
9387 */
9388 case 5:
9389 {
9390 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9391 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9392 break;
9393 }
9394 /**
9395 * @opdone
9396 * @opmaps grp2_d0
9397 * @opcode /7
9398 * @opflclass shift_1
9399 */
9400 case 7:
9401 {
9402 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9403 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9404 break;
9405 }
9406 /** @opdone */
9407 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9408 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9409 }
9410#undef GRP2_BODY_Eb_1
9411}
9412
9413
9414/* Need to use a body macro here since the EFLAGS behaviour differs between
9415 the shifts, rotates and rotate w/ carry. Sigh. */
9416#define GRP2_BODY_Ev_1(a_pImplExpr) \
9417 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9418 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9419 { \
9420 /* register */ \
9421 switch (pVCpu->iem.s.enmEffOpSize) \
9422 { \
9423 case IEMMODE_16BIT: \
9424 IEM_MC_BEGIN(3, 0, 0, 0); \
9425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9426 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9427 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9428 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9429 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9430 IEM_MC_REF_EFLAGS(pEFlags); \
9431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9432 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9433 IEM_MC_END(); \
9434 break; \
9435 \
9436 case IEMMODE_32BIT: \
9437 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9439 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9440 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9441 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9442 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9443 IEM_MC_REF_EFLAGS(pEFlags); \
9444 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9445 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9446 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9447 IEM_MC_END(); \
9448 break; \
9449 \
9450 case IEMMODE_64BIT: \
9451 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9454 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9455 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9456 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9457 IEM_MC_REF_EFLAGS(pEFlags); \
9458 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9460 IEM_MC_END(); \
9461 break; \
9462 \
9463 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9464 } \
9465 } \
9466 else \
9467 { \
9468 /* memory */ \
9469 switch (pVCpu->iem.s.enmEffOpSize) \
9470 { \
9471 case IEMMODE_16BIT: \
9472 IEM_MC_BEGIN(3, 3, 0, 0); \
9473 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9474 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9475 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9477 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9478 \
9479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9481 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9482 IEM_MC_FETCH_EFLAGS(EFlags); \
9483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9484 \
9485 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9486 IEM_MC_COMMIT_EFLAGS(EFlags); \
9487 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9488 IEM_MC_END(); \
9489 break; \
9490 \
9491 case IEMMODE_32BIT: \
9492 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9493 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9494 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9495 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9497 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9498 \
9499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9501 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9502 IEM_MC_FETCH_EFLAGS(EFlags); \
9503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9504 \
9505 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9506 IEM_MC_COMMIT_EFLAGS(EFlags); \
9507 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9508 IEM_MC_END(); \
9509 break; \
9510 \
9511 case IEMMODE_64BIT: \
9512 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9513 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9514 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9515 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9517 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9518 \
9519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9521 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9522 IEM_MC_FETCH_EFLAGS(EFlags); \
9523 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9524 \
9525 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9526 IEM_MC_COMMIT_EFLAGS(EFlags); \
9527 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9528 IEM_MC_END(); \
9529 break; \
9530 \
9531 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9532 } \
9533 } (void)0
9534
9535/**
9536 * @opmaps grp2_d1
9537 * @opcode /0
9538 * @opflclass rotate_1
9539 */
9540FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9541{
9542 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9543 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9544}
9545
9546
9547/**
9548 * @opmaps grp2_d1
9549 * @opcode /1
9550 * @opflclass rotate_1
9551 */
9552FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9553{
9554 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9555 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9556}
9557
9558
9559/**
9560 * @opmaps grp2_d1
9561 * @opcode /2
9562 * @opflclass rotate_carry_1
9563 */
9564FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9565{
9566 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9567 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9568}
9569
9570
9571/**
9572 * @opmaps grp2_d1
9573 * @opcode /3
9574 * @opflclass rotate_carry_1
9575 */
9576FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9577{
9578 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9579 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9580}
9581
9582
9583/**
9584 * @opmaps grp2_d1
9585 * @opcode /4
9586 * @opflclass shift_1
9587 */
9588FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9589{
9590 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9591 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9592}
9593
9594
9595/**
9596 * @opmaps grp2_d1
9597 * @opcode /5
9598 * @opflclass shift_1
9599 */
9600FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9601{
9602 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9603 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9604}
9605
9606
9607/**
9608 * @opmaps grp2_d1
9609 * @opcode /7
9610 * @opflclass shift_1
9611 */
9612FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9613{
9614 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9615 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9616}
9617
9618#undef GRP2_BODY_Ev_1
9619
9620/**
9621 * @opcode 0xd1
9622 */
9623FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9624{
9625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9626 switch (IEM_GET_MODRM_REG_8(bRm))
9627 {
9628 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9629 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9630 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9631 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9632 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9633 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9634 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9635 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9636 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9637 }
9638}
9639
9640
9641/**
9642 * @opcode 0xd2
9643 */
9644FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9645{
9646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9647
9648 /* Need to use a body macro here since the EFLAGS behaviour differs between
9649 the shifts, rotates and rotate w/ carry. Sigh. */
9650#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9651 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9652 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9653 { \
9654 /* register */ \
9655 IEM_MC_BEGIN(3, 0, 0, 0); \
9656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9657 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9658 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9659 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9660 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9661 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9662 IEM_MC_REF_EFLAGS(pEFlags); \
9663 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9664 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9665 IEM_MC_END(); \
9666 } \
9667 else \
9668 { \
9669 /* memory */ \
9670 IEM_MC_BEGIN(3, 3, 0, 0); \
9671 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9672 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9673 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9675 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9676 \
9677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9679 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9680 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9681 IEM_MC_FETCH_EFLAGS(EFlags); \
9682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9683 \
9684 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9685 IEM_MC_COMMIT_EFLAGS(EFlags); \
9686 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9687 IEM_MC_END(); \
9688 } (void)0
9689
9690 switch (IEM_GET_MODRM_REG_8(bRm))
9691 {
9692 /**
9693 * @opdone
9694 * @opmaps grp2_d0
9695 * @opcode /0
9696 * @opflclass rotate_count
9697 */
9698 case 0:
9699 {
9700 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9701 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9702 break;
9703 }
9704 /**
9705 * @opdone
9706 * @opmaps grp2_d0
9707 * @opcode /1
9708 * @opflclass rotate_count
9709 */
9710 case 1:
9711 {
9712 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9713 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9714 break;
9715 }
9716 /**
9717 * @opdone
9718 * @opmaps grp2_d0
9719 * @opcode /2
9720 * @opflclass rotate_carry_count
9721 */
9722 case 2:
9723 {
9724 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9725 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9726 break;
9727 }
9728 /**
9729 * @opdone
9730 * @opmaps grp2_d0
9731 * @opcode /3
9732 * @opflclass rotate_carry_count
9733 */
9734 case 3:
9735 {
9736 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9737 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9738 break;
9739 }
9740 /**
9741 * @opdone
9742 * @opmaps grp2_d0
9743 * @opcode /4
9744 * @opflclass shift_count
9745 */
9746 case 4:
9747 {
9748 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9749 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9750 break;
9751 }
9752 /**
9753 * @opdone
9754 * @opmaps grp2_d0
9755 * @opcode /5
9756 * @opflclass shift_count
9757 */
9758 case 5:
9759 {
9760 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9761 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9762 break;
9763 }
9764 /**
9765 * @opdone
9766 * @opmaps grp2_d0
9767 * @opcode /7
9768 * @opflclass shift_count
9769 */
9770 case 7:
9771 {
9772 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9773 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9774 break;
9775 }
9776 /** @opdone */
9777 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9778 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9779 }
9780#undef GRP2_BODY_Eb_CL
9781}
9782
9783
9784/* Need to use a body macro here since the EFLAGS behaviour differs between
9785 the shifts, rotates and rotate w/ carry. Sigh. */
9786#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9787 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9788 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9789 { \
9790 /* register */ \
9791 switch (pVCpu->iem.s.enmEffOpSize) \
9792 { \
9793 case IEMMODE_16BIT: \
9794 IEM_MC_BEGIN(3, 0, 0, 0); \
9795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9796 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9797 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9798 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9799 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9800 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9801 IEM_MC_REF_EFLAGS(pEFlags); \
9802 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9804 IEM_MC_END(); \
9805 break; \
9806 \
9807 case IEMMODE_32BIT: \
9808 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9810 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9811 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9812 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9813 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9814 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9815 IEM_MC_REF_EFLAGS(pEFlags); \
9816 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9817 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9819 IEM_MC_END(); \
9820 break; \
9821 \
9822 case IEMMODE_64BIT: \
9823 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9825 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9826 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9827 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9828 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9829 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9830 IEM_MC_REF_EFLAGS(pEFlags); \
9831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9832 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9833 IEM_MC_END(); \
9834 break; \
9835 \
9836 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9837 } \
9838 } \
9839 else \
9840 { \
9841 /* memory */ \
9842 switch (pVCpu->iem.s.enmEffOpSize) \
9843 { \
9844 case IEMMODE_16BIT: \
9845 IEM_MC_BEGIN(3, 3, 0, 0); \
9846 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9847 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9848 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9850 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9851 \
9852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9854 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9855 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9856 IEM_MC_FETCH_EFLAGS(EFlags); \
9857 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9858 \
9859 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9860 IEM_MC_COMMIT_EFLAGS(EFlags); \
9861 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9862 IEM_MC_END(); \
9863 break; \
9864 \
9865 case IEMMODE_32BIT: \
9866 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9867 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9868 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9869 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9871 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9872 \
9873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9875 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9876 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9877 IEM_MC_FETCH_EFLAGS(EFlags); \
9878 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9879 \
9880 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9881 IEM_MC_COMMIT_EFLAGS(EFlags); \
9882 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9883 IEM_MC_END(); \
9884 break; \
9885 \
9886 case IEMMODE_64BIT: \
9887 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9888 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9889 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9890 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9892 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9893 \
9894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9896 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9897 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9898 IEM_MC_FETCH_EFLAGS(EFlags); \
9899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9900 \
9901 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9902 IEM_MC_COMMIT_EFLAGS(EFlags); \
9903 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9904 IEM_MC_END(); \
9905 break; \
9906 \
9907 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9908 } \
9909 } (void)0
9910
9911
9912/**
9913 * @opmaps grp2_d0
9914 * @opcode /0
9915 * @opflclass rotate_count
9916 */
9917FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
9918{
9919 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9920 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9921}
9922
9923
9924/**
9925 * @opmaps grp2_d0
9926 * @opcode /1
9927 * @opflclass rotate_count
9928 */
9929FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
9930{
9931 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9932 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9933}
9934
9935
9936/**
9937 * @opmaps grp2_d0
9938 * @opcode /2
9939 * @opflclass rotate_carry_count
9940 */
9941FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
9942{
9943 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9944 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9945}
9946
9947
9948/**
9949 * @opmaps grp2_d0
9950 * @opcode /3
9951 * @opflclass rotate_carry_count
9952 */
9953FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
9954{
9955 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9956 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9957}
9958
9959
9960/**
9961 * @opmaps grp2_d0
9962 * @opcode /4
9963 * @opflclass shift_count
9964 */
9965FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
9966{
9967 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9968 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9969}
9970
9971
9972/**
9973 * @opmaps grp2_d0
9974 * @opcode /5
9975 * @opflclass shift_count
9976 */
9977FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
9978{
9979 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9980 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9981}
9982
9983
9984/**
9985 * @opmaps grp2_d0
9986 * @opcode /7
9987 * @opflclass shift_count
9988 */
9989FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
9990{
9991 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9992 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9993}
9994
9995#undef GRP2_BODY_Ev_CL
9996
9997/**
9998 * @opcode 0xd3
9999 */
10000FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
10001{
10002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10003 switch (IEM_GET_MODRM_REG_8(bRm))
10004 {
10005 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
10006 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
10007 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
10008 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
10009 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
10010 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
10011 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
10012 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10013 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10014 }
10015}
10016
10017
10018/**
10019 * @opcode 0xd4
10020 * @opflmodify cf,pf,af,zf,sf,of
10021 * @opflundef cf,af,of
10022 */
10023FNIEMOP_DEF(iemOp_aam_Ib)
10024{
10025/** @todo testcase: aam */
10026 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
10027 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10029 IEMOP_HLP_NO_64BIT();
10030 if (!bImm)
10031 IEMOP_RAISE_DIVIDE_ERROR_RET();
10032 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
10033}
10034
10035
10036/**
10037 * @opcode 0xd5
10038 * @opflmodify cf,pf,af,zf,sf,of
10039 * @opflundef cf,af,of
10040 */
10041FNIEMOP_DEF(iemOp_aad_Ib)
10042{
10043/** @todo testcase: aad? */
10044 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
10045 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
10046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10047 IEMOP_HLP_NO_64BIT();
10048 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
10049}
10050
10051
10052/**
10053 * @opcode 0xd6
10054 */
10055FNIEMOP_DEF(iemOp_salc)
10056{
10057 IEMOP_MNEMONIC(salc, "salc");
10058 IEMOP_HLP_NO_64BIT();
10059
10060 IEM_MC_BEGIN(0, 0, 0, 0);
10061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10062 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10063 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
10064 } IEM_MC_ELSE() {
10065 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
10066 } IEM_MC_ENDIF();
10067 IEM_MC_ADVANCE_RIP_AND_FINISH();
10068 IEM_MC_END();
10069}
10070
10071
10072/**
10073 * @opcode 0xd7
10074 */
10075FNIEMOP_DEF(iemOp_xlat)
10076{
10077 IEMOP_MNEMONIC(xlat, "xlat");
10078 switch (pVCpu->iem.s.enmEffAddrMode)
10079 {
10080 case IEMMODE_16BIT:
10081 IEM_MC_BEGIN(2, 0, 0, 0);
10082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10083 IEM_MC_LOCAL(uint8_t, u8Tmp);
10084 IEM_MC_LOCAL(uint16_t, u16Addr);
10085 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
10086 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
10087 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
10088 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10089 IEM_MC_ADVANCE_RIP_AND_FINISH();
10090 IEM_MC_END();
10091 break;
10092
10093 case IEMMODE_32BIT:
10094 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
10095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10096 IEM_MC_LOCAL(uint8_t, u8Tmp);
10097 IEM_MC_LOCAL(uint32_t, u32Addr);
10098 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
10099 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
10100 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
10101 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10102 IEM_MC_ADVANCE_RIP_AND_FINISH();
10103 IEM_MC_END();
10104 break;
10105
10106 case IEMMODE_64BIT:
10107 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
10108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10109 IEM_MC_LOCAL(uint8_t, u8Tmp);
10110 IEM_MC_LOCAL(uint64_t, u64Addr);
10111 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10112 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10113 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
10114 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10115 IEM_MC_ADVANCE_RIP_AND_FINISH();
10116 IEM_MC_END();
10117 break;
10118
10119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10120 }
10121}
10122
10123
10124/**
10125 * Common worker for FPU instructions working on ST0 and STn, and storing the
10126 * result in ST0.
10127 *
10128 * @param bRm Mod R/M byte.
10129 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10130 */
10131FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10132{
10133 IEM_MC_BEGIN(3, 1, 0, 0);
10134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10135 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10136 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10137 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10138 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10139
10140 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10141 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10142 IEM_MC_PREPARE_FPU_USAGE();
10143 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10144 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10145 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10146 } IEM_MC_ELSE() {
10147 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10148 } IEM_MC_ENDIF();
10149 IEM_MC_ADVANCE_RIP_AND_FINISH();
10150
10151 IEM_MC_END();
10152}
10153
10154
10155/**
10156 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10157 * flags.
10158 *
10159 * @param bRm Mod R/M byte.
10160 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10161 */
10162FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10163{
10164 IEM_MC_BEGIN(3, 1, 0, 0);
10165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10166 IEM_MC_LOCAL(uint16_t, u16Fsw);
10167 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10168 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10169 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10170
10171 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10172 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10173 IEM_MC_PREPARE_FPU_USAGE();
10174 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10175 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10176 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10177 } IEM_MC_ELSE() {
10178 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10179 } IEM_MC_ENDIF();
10180 IEM_MC_ADVANCE_RIP_AND_FINISH();
10181
10182 IEM_MC_END();
10183}
10184
10185
10186/**
10187 * Common worker for FPU instructions working on ST0 and STn, only affecting
10188 * flags, and popping when done.
10189 *
10190 * @param bRm Mod R/M byte.
10191 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10192 */
10193FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10194{
10195 IEM_MC_BEGIN(3, 1, 0, 0);
10196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10197 IEM_MC_LOCAL(uint16_t, u16Fsw);
10198 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10199 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10200 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10201
10202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10203 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10204 IEM_MC_PREPARE_FPU_USAGE();
10205 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10206 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10207 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10208 } IEM_MC_ELSE() {
10209 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10210 } IEM_MC_ENDIF();
10211 IEM_MC_ADVANCE_RIP_AND_FINISH();
10212
10213 IEM_MC_END();
10214}
10215
10216
10217/** Opcode 0xd8 11/0. */
10218FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10219{
10220 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10221 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10222}
10223
10224
10225/** Opcode 0xd8 11/1. */
10226FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10227{
10228 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10229 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10230}
10231
10232
10233/** Opcode 0xd8 11/2. */
10234FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10235{
10236 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10237 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10238}
10239
10240
10241/** Opcode 0xd8 11/3. */
10242FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10243{
10244 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10245 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10246}
10247
10248
10249/** Opcode 0xd8 11/4. */
10250FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10251{
10252 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10253 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10254}
10255
10256
10257/** Opcode 0xd8 11/5. */
10258FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10259{
10260 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10261 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10262}
10263
10264
10265/** Opcode 0xd8 11/6. */
10266FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10267{
10268 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10269 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10270}
10271
10272
10273/** Opcode 0xd8 11/7. */
10274FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10275{
10276 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10277 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10278}
10279
10280
10281/**
10282 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10283 * the result in ST0.
10284 *
10285 * @param bRm Mod R/M byte.
10286 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10287 */
10288FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10289{
10290 IEM_MC_BEGIN(3, 3, 0, 0);
10291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10292 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10293 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10294 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10295 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10296 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10297
10298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10300
10301 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10302 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10303 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10304
10305 IEM_MC_PREPARE_FPU_USAGE();
10306 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10307 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10308 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10309 } IEM_MC_ELSE() {
10310 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10311 } IEM_MC_ENDIF();
10312 IEM_MC_ADVANCE_RIP_AND_FINISH();
10313
10314 IEM_MC_END();
10315}
10316
10317
10318/** Opcode 0xd8 !11/0. */
10319FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10320{
10321 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10322 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10323}
10324
10325
10326/** Opcode 0xd8 !11/1. */
10327FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10328{
10329 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10330 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10331}
10332
10333
10334/** Opcode 0xd8 !11/2. */
10335FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10336{
10337 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10338
10339 IEM_MC_BEGIN(3, 3, 0, 0);
10340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10341 IEM_MC_LOCAL(uint16_t, u16Fsw);
10342 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10343 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10344 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10345 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10346
10347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10349
10350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10351 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10352 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10353
10354 IEM_MC_PREPARE_FPU_USAGE();
10355 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10356 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10357 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10358 } IEM_MC_ELSE() {
10359 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10360 } IEM_MC_ENDIF();
10361 IEM_MC_ADVANCE_RIP_AND_FINISH();
10362
10363 IEM_MC_END();
10364}
10365
10366
10367/** Opcode 0xd8 !11/3. */
10368FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10369{
10370 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10371
10372 IEM_MC_BEGIN(3, 3, 0, 0);
10373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10374 IEM_MC_LOCAL(uint16_t, u16Fsw);
10375 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10376 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10377 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10378 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10379
10380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10382
10383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10385 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10386
10387 IEM_MC_PREPARE_FPU_USAGE();
10388 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10389 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10390 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10391 } IEM_MC_ELSE() {
10392 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10393 } IEM_MC_ENDIF();
10394 IEM_MC_ADVANCE_RIP_AND_FINISH();
10395
10396 IEM_MC_END();
10397}
10398
10399
10400/** Opcode 0xd8 !11/4. */
10401FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10402{
10403 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10404 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10405}
10406
10407
10408/** Opcode 0xd8 !11/5. */
10409FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10410{
10411 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10412 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10413}
10414
10415
10416/** Opcode 0xd8 !11/6. */
10417FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10418{
10419 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10421}
10422
10423
10424/** Opcode 0xd8 !11/7. */
10425FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10426{
10427 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10428 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10429}
10430
10431
10432/**
10433 * @opcode 0xd8
10434 */
10435FNIEMOP_DEF(iemOp_EscF0)
10436{
10437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10438 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10439
10440 if (IEM_IS_MODRM_REG_MODE(bRm))
10441 {
10442 switch (IEM_GET_MODRM_REG_8(bRm))
10443 {
10444 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10445 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10446 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10447 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10448 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10449 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10450 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10451 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10453 }
10454 }
10455 else
10456 {
10457 switch (IEM_GET_MODRM_REG_8(bRm))
10458 {
10459 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10460 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10461 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10462 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10463 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10464 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10465 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10466 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10468 }
10469 }
10470}
10471
10472
10473/** Opcode 0xd9 /0 mem32real
10474 * @sa iemOp_fld_m64r */
10475FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10476{
10477 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10478
10479 IEM_MC_BEGIN(2, 3, 0, 0);
10480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10481 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10482 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10483 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10484 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10485
10486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10488
10489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10490 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10491 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10492 IEM_MC_PREPARE_FPU_USAGE();
10493 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10494 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10495 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10496 } IEM_MC_ELSE() {
10497 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10498 } IEM_MC_ENDIF();
10499 IEM_MC_ADVANCE_RIP_AND_FINISH();
10500
10501 IEM_MC_END();
10502}
10503
10504
10505/** Opcode 0xd9 !11/2 mem32real */
10506FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10507{
10508 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10509 IEM_MC_BEGIN(3, 3, 0, 0);
10510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10512
10513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10516 IEM_MC_PREPARE_FPU_USAGE();
10517
10518 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10519 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10520 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10521
10522 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10523 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10524 IEM_MC_LOCAL(uint16_t, u16Fsw);
10525 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10526 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10527 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10528 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10529 } IEM_MC_ELSE() {
10530 IEM_MC_IF_FCW_IM() {
10531 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10532 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10533 } IEM_MC_ELSE() {
10534 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10535 } IEM_MC_ENDIF();
10536 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10537 } IEM_MC_ENDIF();
10538 IEM_MC_ADVANCE_RIP_AND_FINISH();
10539
10540 IEM_MC_END();
10541}
10542
10543
10544/** Opcode 0xd9 !11/3 */
10545FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10546{
10547 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10548 IEM_MC_BEGIN(3, 3, 0, 0);
10549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10551
10552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10553 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10554 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10555 IEM_MC_PREPARE_FPU_USAGE();
10556
10557 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10558 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10559 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10560
10561 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10562 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10563 IEM_MC_LOCAL(uint16_t, u16Fsw);
10564 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10565 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10566 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10567 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10568 } IEM_MC_ELSE() {
10569 IEM_MC_IF_FCW_IM() {
10570 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10571 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10572 } IEM_MC_ELSE() {
10573 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10574 } IEM_MC_ENDIF();
10575 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10576 } IEM_MC_ENDIF();
10577 IEM_MC_ADVANCE_RIP_AND_FINISH();
10578
10579 IEM_MC_END();
10580}
10581
10582
10583/** Opcode 0xd9 !11/4 */
10584FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10585{
10586 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10587 IEM_MC_BEGIN(3, 0, 0, 0);
10588 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10590
10591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10592 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10593 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10594
10595 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10596 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10597 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10598 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10599 IEM_MC_END();
10600}
10601
10602
10603/** Opcode 0xd9 !11/5 */
10604FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10605{
10606 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10607 IEM_MC_BEGIN(1, 1, 0, 0);
10608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10610
10611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10613 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10614
10615 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10616 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10617
10618 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10619 iemCImpl_fldcw, u16Fsw);
10620 IEM_MC_END();
10621}
10622
10623
10624/** Opcode 0xd9 !11/6 */
10625FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10626{
10627 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10628 IEM_MC_BEGIN(3, 0, 0, 0);
10629 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10631
10632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10633 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10634 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10635
10636 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10637 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10638 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10639 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10640 IEM_MC_END();
10641}
10642
10643
10644/** Opcode 0xd9 !11/7 */
10645FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10646{
10647 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10648 IEM_MC_BEGIN(2, 0, 0, 0);
10649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10650 IEM_MC_LOCAL(uint16_t, u16Fcw);
10651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10653 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10654 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10655 IEM_MC_FETCH_FCW(u16Fcw);
10656 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10657 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10658 IEM_MC_END();
10659}
10660
10661
10662/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10663FNIEMOP_DEF(iemOp_fnop)
10664{
10665 IEMOP_MNEMONIC(fnop, "fnop");
10666 IEM_MC_BEGIN(0, 0, 0, 0);
10667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10670 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10671 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10672 * intel optimizations. Investigate. */
10673 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10674 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10675 IEM_MC_END();
10676}
10677
10678
10679/** Opcode 0xd9 11/0 stN */
10680FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10681{
10682 IEMOP_MNEMONIC(fld_stN, "fld stN");
10683 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10684 * indicates that it does. */
10685 IEM_MC_BEGIN(0, 2, 0, 0);
10686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10687 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10688 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10689 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10690 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10691
10692 IEM_MC_PREPARE_FPU_USAGE();
10693 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10694 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10695 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10696 } IEM_MC_ELSE() {
10697 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10698 } IEM_MC_ENDIF();
10699
10700 IEM_MC_ADVANCE_RIP_AND_FINISH();
10701 IEM_MC_END();
10702}
10703
10704
10705/** Opcode 0xd9 11/3 stN */
10706FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10707{
10708 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10709 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10710 * indicates that it does. */
10711 IEM_MC_BEGIN(2, 3, 0, 0);
10712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10713 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10714 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10715 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10716 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10717 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10718 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10719 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10720
10721 IEM_MC_PREPARE_FPU_USAGE();
10722 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10723 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10724 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10725 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10726 } IEM_MC_ELSE() {
10727 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10728 } IEM_MC_ENDIF();
10729
10730 IEM_MC_ADVANCE_RIP_AND_FINISH();
10731 IEM_MC_END();
10732}
10733
10734
10735/** Opcode 0xd9 11/4, 0xdd 11/2. */
10736FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10737{
10738 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10739
10740 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10741 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10742 if (!iDstReg)
10743 {
10744 IEM_MC_BEGIN(0, 1, 0, 0);
10745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10746 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10749
10750 IEM_MC_PREPARE_FPU_USAGE();
10751 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10752 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10753 } IEM_MC_ELSE() {
10754 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10755 } IEM_MC_ENDIF();
10756
10757 IEM_MC_ADVANCE_RIP_AND_FINISH();
10758 IEM_MC_END();
10759 }
10760 else
10761 {
10762 IEM_MC_BEGIN(0, 2, 0, 0);
10763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10764 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10765 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10766 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10767 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10768
10769 IEM_MC_PREPARE_FPU_USAGE();
10770 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10771 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10772 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10773 } IEM_MC_ELSE() {
10774 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10775 } IEM_MC_ENDIF();
10776
10777 IEM_MC_ADVANCE_RIP_AND_FINISH();
10778 IEM_MC_END();
10779 }
10780}
10781
10782
10783/**
10784 * Common worker for FPU instructions working on ST0 and replaces it with the
10785 * result, i.e. unary operators.
10786 *
10787 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10788 */
10789FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10790{
10791 IEM_MC_BEGIN(2, 1, 0, 0);
10792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10793 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10794 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10795 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10796
10797 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10798 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10799 IEM_MC_PREPARE_FPU_USAGE();
10800 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10801 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10802 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10803 } IEM_MC_ELSE() {
10804 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10805 } IEM_MC_ENDIF();
10806 IEM_MC_ADVANCE_RIP_AND_FINISH();
10807
10808 IEM_MC_END();
10809}
10810
10811
10812/** Opcode 0xd9 0xe0. */
10813FNIEMOP_DEF(iemOp_fchs)
10814{
10815 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10816 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10817}
10818
10819
10820/** Opcode 0xd9 0xe1. */
10821FNIEMOP_DEF(iemOp_fabs)
10822{
10823 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10824 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10825}
10826
10827
10828/** Opcode 0xd9 0xe4. */
10829FNIEMOP_DEF(iemOp_ftst)
10830{
10831 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10832 IEM_MC_BEGIN(2, 1, 0, 0);
10833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10834 IEM_MC_LOCAL(uint16_t, u16Fsw);
10835 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10836 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10837
10838 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10839 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10840 IEM_MC_PREPARE_FPU_USAGE();
10841 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10842 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10843 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10844 } IEM_MC_ELSE() {
10845 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10846 } IEM_MC_ENDIF();
10847 IEM_MC_ADVANCE_RIP_AND_FINISH();
10848
10849 IEM_MC_END();
10850}
10851
10852
10853/** Opcode 0xd9 0xe5. */
10854FNIEMOP_DEF(iemOp_fxam)
10855{
10856 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10857 IEM_MC_BEGIN(2, 1, 0, 0);
10858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10859 IEM_MC_LOCAL(uint16_t, u16Fsw);
10860 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10861 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10862
10863 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10864 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10865 IEM_MC_PREPARE_FPU_USAGE();
10866 IEM_MC_REF_FPUREG(pr80Value, 0);
10867 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10868 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10869 IEM_MC_ADVANCE_RIP_AND_FINISH();
10870
10871 IEM_MC_END();
10872}
10873
10874
10875/**
10876 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10877 *
10878 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10879 */
10880FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10881{
10882 IEM_MC_BEGIN(1, 1, 0, 0);
10883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10884 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10885 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10886
10887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10888 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10889 IEM_MC_PREPARE_FPU_USAGE();
10890 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10891 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10892 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10893 } IEM_MC_ELSE() {
10894 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10895 } IEM_MC_ENDIF();
10896 IEM_MC_ADVANCE_RIP_AND_FINISH();
10897
10898 IEM_MC_END();
10899}
10900
10901
10902/** Opcode 0xd9 0xe8. */
10903FNIEMOP_DEF(iemOp_fld1)
10904{
10905 IEMOP_MNEMONIC(fld1, "fld1");
10906 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10907}
10908
10909
10910/** Opcode 0xd9 0xe9. */
10911FNIEMOP_DEF(iemOp_fldl2t)
10912{
10913 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10914 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10915}
10916
10917
10918/** Opcode 0xd9 0xea. */
10919FNIEMOP_DEF(iemOp_fldl2e)
10920{
10921 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10922 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10923}
10924
10925/** Opcode 0xd9 0xeb. */
10926FNIEMOP_DEF(iemOp_fldpi)
10927{
10928 IEMOP_MNEMONIC(fldpi, "fldpi");
10929 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10930}
10931
10932
10933/** Opcode 0xd9 0xec. */
10934FNIEMOP_DEF(iemOp_fldlg2)
10935{
10936 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10937 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10938}
10939
10940/** Opcode 0xd9 0xed. */
10941FNIEMOP_DEF(iemOp_fldln2)
10942{
10943 IEMOP_MNEMONIC(fldln2, "fldln2");
10944 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10945}
10946
10947
10948/** Opcode 0xd9 0xee. */
10949FNIEMOP_DEF(iemOp_fldz)
10950{
10951 IEMOP_MNEMONIC(fldz, "fldz");
10952 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10953}
10954
10955
10956/** Opcode 0xd9 0xf0.
10957 *
10958 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10959 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10960 * to produce proper results for +Inf and -Inf.
10961 *
10962 * This is probably usful in the implementation pow() and similar.
10963 */
10964FNIEMOP_DEF(iemOp_f2xm1)
10965{
10966 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10967 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10968}
10969
10970
10971/**
10972 * Common worker for FPU instructions working on STn and ST0, storing the result
10973 * in STn, and popping the stack unless IE, DE or ZE was raised.
10974 *
10975 * @param bRm Mod R/M byte.
10976 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10977 */
10978FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10979{
10980 IEM_MC_BEGIN(3, 1, 0, 0);
10981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10982 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10983 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10984 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10985 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10986
10987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10988 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10989
10990 IEM_MC_PREPARE_FPU_USAGE();
10991 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10992 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10993 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10994 } IEM_MC_ELSE() {
10995 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10996 } IEM_MC_ENDIF();
10997 IEM_MC_ADVANCE_RIP_AND_FINISH();
10998
10999 IEM_MC_END();
11000}
11001
11002
11003/** Opcode 0xd9 0xf1. */
11004FNIEMOP_DEF(iemOp_fyl2x)
11005{
11006 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
11007 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
11008}
11009
11010
11011/**
11012 * Common worker for FPU instructions working on ST0 and having two outputs, one
11013 * replacing ST0 and one pushed onto the stack.
11014 *
11015 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11016 */
11017FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
11018{
11019 IEM_MC_BEGIN(2, 1, 0, 0);
11020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11021 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
11022 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
11023 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11024
11025 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11026 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11027 IEM_MC_PREPARE_FPU_USAGE();
11028 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11029 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
11030 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
11031 } IEM_MC_ELSE() {
11032 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
11033 } IEM_MC_ENDIF();
11034 IEM_MC_ADVANCE_RIP_AND_FINISH();
11035
11036 IEM_MC_END();
11037}
11038
11039
11040/** Opcode 0xd9 0xf2. */
11041FNIEMOP_DEF(iemOp_fptan)
11042{
11043 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
11044 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
11045}
11046
11047
11048/** Opcode 0xd9 0xf3. */
11049FNIEMOP_DEF(iemOp_fpatan)
11050{
11051 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
11052 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
11053}
11054
11055
11056/** Opcode 0xd9 0xf4. */
11057FNIEMOP_DEF(iemOp_fxtract)
11058{
11059 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
11060 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
11061}
11062
11063
11064/** Opcode 0xd9 0xf5. */
11065FNIEMOP_DEF(iemOp_fprem1)
11066{
11067 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
11068 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
11069}
11070
11071
11072/** Opcode 0xd9 0xf6. */
11073FNIEMOP_DEF(iemOp_fdecstp)
11074{
11075 IEMOP_MNEMONIC(fdecstp, "fdecstp");
11076 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11077 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11078 * FINCSTP and FDECSTP. */
11079 IEM_MC_BEGIN(0, 0, 0, 0);
11080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11081
11082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11083 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11084
11085 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11086 IEM_MC_FPU_STACK_DEC_TOP();
11087 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11088
11089 IEM_MC_ADVANCE_RIP_AND_FINISH();
11090 IEM_MC_END();
11091}
11092
11093
11094/** Opcode 0xd9 0xf7. */
11095FNIEMOP_DEF(iemOp_fincstp)
11096{
11097 IEMOP_MNEMONIC(fincstp, "fincstp");
11098 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
11099 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
11100 * FINCSTP and FDECSTP. */
11101 IEM_MC_BEGIN(0, 0, 0, 0);
11102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11103
11104 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11105 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11106
11107 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11108 IEM_MC_FPU_STACK_INC_TOP();
11109 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
11110
11111 IEM_MC_ADVANCE_RIP_AND_FINISH();
11112 IEM_MC_END();
11113}
11114
11115
11116/** Opcode 0xd9 0xf8. */
11117FNIEMOP_DEF(iemOp_fprem)
11118{
11119 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
11120 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11121}
11122
11123
11124/** Opcode 0xd9 0xf9. */
11125FNIEMOP_DEF(iemOp_fyl2xp1)
11126{
11127 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
11128 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11129}
11130
11131
11132/** Opcode 0xd9 0xfa. */
11133FNIEMOP_DEF(iemOp_fsqrt)
11134{
11135 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
11136 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11137}
11138
11139
11140/** Opcode 0xd9 0xfb. */
11141FNIEMOP_DEF(iemOp_fsincos)
11142{
11143 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
11144 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11145}
11146
11147
11148/** Opcode 0xd9 0xfc. */
11149FNIEMOP_DEF(iemOp_frndint)
11150{
11151 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11152 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11153}
11154
11155
11156/** Opcode 0xd9 0xfd. */
11157FNIEMOP_DEF(iemOp_fscale)
11158{
11159 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11160 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11161}
11162
11163
11164/** Opcode 0xd9 0xfe. */
11165FNIEMOP_DEF(iemOp_fsin)
11166{
11167 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11168 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11169}
11170
11171
11172/** Opcode 0xd9 0xff. */
11173FNIEMOP_DEF(iemOp_fcos)
11174{
11175 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11176 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11177}
11178
11179
11180/** Used by iemOp_EscF1. */
11181IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11182{
11183 /* 0xe0 */ iemOp_fchs,
11184 /* 0xe1 */ iemOp_fabs,
11185 /* 0xe2 */ iemOp_Invalid,
11186 /* 0xe3 */ iemOp_Invalid,
11187 /* 0xe4 */ iemOp_ftst,
11188 /* 0xe5 */ iemOp_fxam,
11189 /* 0xe6 */ iemOp_Invalid,
11190 /* 0xe7 */ iemOp_Invalid,
11191 /* 0xe8 */ iemOp_fld1,
11192 /* 0xe9 */ iemOp_fldl2t,
11193 /* 0xea */ iemOp_fldl2e,
11194 /* 0xeb */ iemOp_fldpi,
11195 /* 0xec */ iemOp_fldlg2,
11196 /* 0xed */ iemOp_fldln2,
11197 /* 0xee */ iemOp_fldz,
11198 /* 0xef */ iemOp_Invalid,
11199 /* 0xf0 */ iemOp_f2xm1,
11200 /* 0xf1 */ iemOp_fyl2x,
11201 /* 0xf2 */ iemOp_fptan,
11202 /* 0xf3 */ iemOp_fpatan,
11203 /* 0xf4 */ iemOp_fxtract,
11204 /* 0xf5 */ iemOp_fprem1,
11205 /* 0xf6 */ iemOp_fdecstp,
11206 /* 0xf7 */ iemOp_fincstp,
11207 /* 0xf8 */ iemOp_fprem,
11208 /* 0xf9 */ iemOp_fyl2xp1,
11209 /* 0xfa */ iemOp_fsqrt,
11210 /* 0xfb */ iemOp_fsincos,
11211 /* 0xfc */ iemOp_frndint,
11212 /* 0xfd */ iemOp_fscale,
11213 /* 0xfe */ iemOp_fsin,
11214 /* 0xff */ iemOp_fcos
11215};
11216
11217
11218/**
11219 * @opcode 0xd9
11220 */
11221FNIEMOP_DEF(iemOp_EscF1)
11222{
11223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11224 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11225
11226 if (IEM_IS_MODRM_REG_MODE(bRm))
11227 {
11228 switch (IEM_GET_MODRM_REG_8(bRm))
11229 {
11230 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11231 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11232 case 2:
11233 if (bRm == 0xd0)
11234 return FNIEMOP_CALL(iemOp_fnop);
11235 IEMOP_RAISE_INVALID_OPCODE_RET();
11236 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11237 case 4:
11238 case 5:
11239 case 6:
11240 case 7:
11241 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11242 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11244 }
11245 }
11246 else
11247 {
11248 switch (IEM_GET_MODRM_REG_8(bRm))
11249 {
11250 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11251 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11252 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11253 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11254 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11255 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11256 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11257 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11259 }
11260 }
11261}
11262
11263
11264/** Opcode 0xda 11/0. */
11265FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11266{
11267 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11268 IEM_MC_BEGIN(0, 1, 0, 0);
11269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11270 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11271
11272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11273 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11274
11275 IEM_MC_PREPARE_FPU_USAGE();
11276 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11277 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11278 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11279 } IEM_MC_ENDIF();
11280 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11281 } IEM_MC_ELSE() {
11282 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11283 } IEM_MC_ENDIF();
11284 IEM_MC_ADVANCE_RIP_AND_FINISH();
11285
11286 IEM_MC_END();
11287}
11288
11289
11290/** Opcode 0xda 11/1. */
11291FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11292{
11293 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11294 IEM_MC_BEGIN(0, 1, 0, 0);
11295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11296 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11297
11298 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11299 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11300
11301 IEM_MC_PREPARE_FPU_USAGE();
11302 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11304 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11305 } IEM_MC_ENDIF();
11306 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11307 } IEM_MC_ELSE() {
11308 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11309 } IEM_MC_ENDIF();
11310 IEM_MC_ADVANCE_RIP_AND_FINISH();
11311
11312 IEM_MC_END();
11313}
11314
11315
11316/** Opcode 0xda 11/2. */
11317FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11318{
11319 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11320 IEM_MC_BEGIN(0, 1, 0, 0);
11321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11322 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11323
11324 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11325 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11326
11327 IEM_MC_PREPARE_FPU_USAGE();
11328 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11329 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11330 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11331 } IEM_MC_ENDIF();
11332 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11333 } IEM_MC_ELSE() {
11334 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11335 } IEM_MC_ENDIF();
11336 IEM_MC_ADVANCE_RIP_AND_FINISH();
11337
11338 IEM_MC_END();
11339}
11340
11341
11342/** Opcode 0xda 11/3. */
11343FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11344{
11345 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11346 IEM_MC_BEGIN(0, 1, 0, 0);
11347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11348 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11349
11350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11351 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11352
11353 IEM_MC_PREPARE_FPU_USAGE();
11354 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11355 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11356 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11357 } IEM_MC_ENDIF();
11358 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11359 } IEM_MC_ELSE() {
11360 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11361 } IEM_MC_ENDIF();
11362 IEM_MC_ADVANCE_RIP_AND_FINISH();
11363
11364 IEM_MC_END();
11365}
11366
11367
11368/**
11369 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11370 * flags, and popping twice when done.
11371 *
11372 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11373 */
11374FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11375{
11376 IEM_MC_BEGIN(3, 1, 0, 0);
11377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11378 IEM_MC_LOCAL(uint16_t, u16Fsw);
11379 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11380 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11381 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11382
11383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11385
11386 IEM_MC_PREPARE_FPU_USAGE();
11387 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11388 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11389 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11390 } IEM_MC_ELSE() {
11391 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11392 } IEM_MC_ENDIF();
11393 IEM_MC_ADVANCE_RIP_AND_FINISH();
11394
11395 IEM_MC_END();
11396}
11397
11398
11399/** Opcode 0xda 0xe9. */
11400FNIEMOP_DEF(iemOp_fucompp)
11401{
11402 IEMOP_MNEMONIC(fucompp, "fucompp");
11403 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11404}
11405
11406
11407/**
11408 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11409 * the result in ST0.
11410 *
11411 * @param bRm Mod R/M byte.
11412 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11413 */
11414FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11415{
11416 IEM_MC_BEGIN(3, 3, 0, 0);
11417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11418 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11419 IEM_MC_LOCAL(int32_t, i32Val2);
11420 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11421 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11422 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11423
11424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11426
11427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11429 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11430
11431 IEM_MC_PREPARE_FPU_USAGE();
11432 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11433 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11434 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11435 } IEM_MC_ELSE() {
11436 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11437 } IEM_MC_ENDIF();
11438 IEM_MC_ADVANCE_RIP_AND_FINISH();
11439
11440 IEM_MC_END();
11441}
11442
11443
11444/** Opcode 0xda !11/0. */
11445FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11446{
11447 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11448 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11449}
11450
11451
11452/** Opcode 0xda !11/1. */
11453FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11454{
11455 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11456 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11457}
11458
11459
11460/** Opcode 0xda !11/2. */
11461FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11462{
11463 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11464
11465 IEM_MC_BEGIN(3, 3, 0, 0);
11466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11467 IEM_MC_LOCAL(uint16_t, u16Fsw);
11468 IEM_MC_LOCAL(int32_t, i32Val2);
11469 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11470 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11471 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11472
11473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11475
11476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11478 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11479
11480 IEM_MC_PREPARE_FPU_USAGE();
11481 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11482 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11483 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11484 } IEM_MC_ELSE() {
11485 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11486 } IEM_MC_ENDIF();
11487 IEM_MC_ADVANCE_RIP_AND_FINISH();
11488
11489 IEM_MC_END();
11490}
11491
11492
11493/** Opcode 0xda !11/3. */
11494FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11495{
11496 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11497
11498 IEM_MC_BEGIN(3, 3, 0, 0);
11499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11500 IEM_MC_LOCAL(uint16_t, u16Fsw);
11501 IEM_MC_LOCAL(int32_t, i32Val2);
11502 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11503 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11504 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11505
11506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11508
11509 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11510 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11511 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11512
11513 IEM_MC_PREPARE_FPU_USAGE();
11514 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11515 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11516 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11517 } IEM_MC_ELSE() {
11518 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11519 } IEM_MC_ENDIF();
11520 IEM_MC_ADVANCE_RIP_AND_FINISH();
11521
11522 IEM_MC_END();
11523}
11524
11525
11526/** Opcode 0xda !11/4. */
11527FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11528{
11529 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11530 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11531}
11532
11533
11534/** Opcode 0xda !11/5. */
11535FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11536{
11537 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11538 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11539}
11540
11541
11542/** Opcode 0xda !11/6. */
11543FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11544{
11545 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11546 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11547}
11548
11549
11550/** Opcode 0xda !11/7. */
11551FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11552{
11553 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11554 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11555}
11556
11557
11558/**
11559 * @opcode 0xda
11560 */
11561FNIEMOP_DEF(iemOp_EscF2)
11562{
11563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11564 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11565 if (IEM_IS_MODRM_REG_MODE(bRm))
11566 {
11567 switch (IEM_GET_MODRM_REG_8(bRm))
11568 {
11569 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11570 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11571 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11572 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11573 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11574 case 5:
11575 if (bRm == 0xe9)
11576 return FNIEMOP_CALL(iemOp_fucompp);
11577 IEMOP_RAISE_INVALID_OPCODE_RET();
11578 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11579 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11581 }
11582 }
11583 else
11584 {
11585 switch (IEM_GET_MODRM_REG_8(bRm))
11586 {
11587 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11588 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11589 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11590 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11591 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11592 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11593 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11594 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11596 }
11597 }
11598}
11599
11600
11601/** Opcode 0xdb !11/0. */
11602FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11603{
11604 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11605
11606 IEM_MC_BEGIN(2, 3, 0, 0);
11607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11608 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11609 IEM_MC_LOCAL(int32_t, i32Val);
11610 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11611 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11612
11613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11615
11616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11618 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11619
11620 IEM_MC_PREPARE_FPU_USAGE();
11621 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11622 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11623 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11624 } IEM_MC_ELSE() {
11625 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11626 } IEM_MC_ENDIF();
11627 IEM_MC_ADVANCE_RIP_AND_FINISH();
11628
11629 IEM_MC_END();
11630}
11631
11632
11633/** Opcode 0xdb !11/1. */
11634FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11635{
11636 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11637 IEM_MC_BEGIN(3, 3, 0, 0);
11638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11640
11641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11642 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11643 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11644 IEM_MC_PREPARE_FPU_USAGE();
11645
11646 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11647 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11648 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11649
11650 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11651 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11652 IEM_MC_LOCAL(uint16_t, u16Fsw);
11653 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11654 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11655 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11656 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11657 } IEM_MC_ELSE() {
11658 IEM_MC_IF_FCW_IM() {
11659 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11660 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11661 } IEM_MC_ELSE() {
11662 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11663 } IEM_MC_ENDIF();
11664 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11665 } IEM_MC_ENDIF();
11666 IEM_MC_ADVANCE_RIP_AND_FINISH();
11667
11668 IEM_MC_END();
11669}
11670
11671
11672/** Opcode 0xdb !11/2. */
11673FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11674{
11675 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11676 IEM_MC_BEGIN(3, 3, 0, 0);
11677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11679
11680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11681 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11682 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11683 IEM_MC_PREPARE_FPU_USAGE();
11684
11685 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11686 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11687 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11688
11689 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11690 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11691 IEM_MC_LOCAL(uint16_t, u16Fsw);
11692 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11693 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11694 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11695 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11696 } IEM_MC_ELSE() {
11697 IEM_MC_IF_FCW_IM() {
11698 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11699 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11700 } IEM_MC_ELSE() {
11701 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11702 } IEM_MC_ENDIF();
11703 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11704 } IEM_MC_ENDIF();
11705 IEM_MC_ADVANCE_RIP_AND_FINISH();
11706
11707 IEM_MC_END();
11708}
11709
11710
11711/** Opcode 0xdb !11/3. */
11712FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11713{
11714 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11715 IEM_MC_BEGIN(3, 2, 0, 0);
11716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11718
11719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11722 IEM_MC_PREPARE_FPU_USAGE();
11723
11724 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11725 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11726 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11727
11728 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11729 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11730 IEM_MC_LOCAL(uint16_t, u16Fsw);
11731 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11732 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11733 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11734 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11735 } IEM_MC_ELSE() {
11736 IEM_MC_IF_FCW_IM() {
11737 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11738 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11739 } IEM_MC_ELSE() {
11740 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11741 } IEM_MC_ENDIF();
11742 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11743 } IEM_MC_ENDIF();
11744 IEM_MC_ADVANCE_RIP_AND_FINISH();
11745
11746 IEM_MC_END();
11747}
11748
11749
11750/** Opcode 0xdb !11/5. */
11751FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11752{
11753 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11754
11755 IEM_MC_BEGIN(2, 3, 0, 0);
11756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11757 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11758 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11759 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11760 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11761
11762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11764
11765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11766 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11767 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11768
11769 IEM_MC_PREPARE_FPU_USAGE();
11770 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11771 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11772 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11773 } IEM_MC_ELSE() {
11774 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11775 } IEM_MC_ENDIF();
11776 IEM_MC_ADVANCE_RIP_AND_FINISH();
11777
11778 IEM_MC_END();
11779}
11780
11781
11782/** Opcode 0xdb !11/7. */
11783FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11784{
11785 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11786 IEM_MC_BEGIN(3, 3, 0, 0);
11787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11789
11790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11791 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11792 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11793 IEM_MC_PREPARE_FPU_USAGE();
11794
11795 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11796 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11797 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11798
11799 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11800 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11801 IEM_MC_LOCAL(uint16_t, u16Fsw);
11802 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11803 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11804 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11805 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11806 } IEM_MC_ELSE() {
11807 IEM_MC_IF_FCW_IM() {
11808 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11809 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11810 } IEM_MC_ELSE() {
11811 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11812 } IEM_MC_ENDIF();
11813 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11814 } IEM_MC_ENDIF();
11815 IEM_MC_ADVANCE_RIP_AND_FINISH();
11816
11817 IEM_MC_END();
11818}
11819
11820
11821/** Opcode 0xdb 11/0. */
11822FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11823{
11824 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11825 IEM_MC_BEGIN(0, 1, 0, 0);
11826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11827 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11828
11829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11831
11832 IEM_MC_PREPARE_FPU_USAGE();
11833 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11834 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11835 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11836 } IEM_MC_ENDIF();
11837 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11838 } IEM_MC_ELSE() {
11839 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11840 } IEM_MC_ENDIF();
11841 IEM_MC_ADVANCE_RIP_AND_FINISH();
11842
11843 IEM_MC_END();
11844}
11845
11846
11847/** Opcode 0xdb 11/1. */
11848FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11849{
11850 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11851 IEM_MC_BEGIN(0, 1, 0, 0);
11852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11853 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11854
11855 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11856 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11857
11858 IEM_MC_PREPARE_FPU_USAGE();
11859 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11860 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11861 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11862 } IEM_MC_ENDIF();
11863 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11864 } IEM_MC_ELSE() {
11865 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11866 } IEM_MC_ENDIF();
11867 IEM_MC_ADVANCE_RIP_AND_FINISH();
11868
11869 IEM_MC_END();
11870}
11871
11872
11873/** Opcode 0xdb 11/2. */
11874FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11875{
11876 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11877 IEM_MC_BEGIN(0, 1, 0, 0);
11878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11879 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11880
11881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11883
11884 IEM_MC_PREPARE_FPU_USAGE();
11885 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11886 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11887 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11888 } IEM_MC_ENDIF();
11889 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11890 } IEM_MC_ELSE() {
11891 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11892 } IEM_MC_ENDIF();
11893 IEM_MC_ADVANCE_RIP_AND_FINISH();
11894
11895 IEM_MC_END();
11896}
11897
11898
11899/** Opcode 0xdb 11/3. */
11900FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11901{
11902 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11903 IEM_MC_BEGIN(0, 1, 0, 0);
11904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11905 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11906
11907 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11908 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11909
11910 IEM_MC_PREPARE_FPU_USAGE();
11911 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11912 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11913 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11914 } IEM_MC_ENDIF();
11915 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11916 } IEM_MC_ELSE() {
11917 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11918 } IEM_MC_ENDIF();
11919 IEM_MC_ADVANCE_RIP_AND_FINISH();
11920
11921 IEM_MC_END();
11922}
11923
11924
11925/** Opcode 0xdb 0xe0. */
11926FNIEMOP_DEF(iemOp_fneni)
11927{
11928 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11929 IEM_MC_BEGIN(0, 0, 0, 0);
11930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11932 IEM_MC_ADVANCE_RIP_AND_FINISH();
11933 IEM_MC_END();
11934}
11935
11936
11937/** Opcode 0xdb 0xe1. */
11938FNIEMOP_DEF(iemOp_fndisi)
11939{
11940 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11941 IEM_MC_BEGIN(0, 0, 0, 0);
11942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11944 IEM_MC_ADVANCE_RIP_AND_FINISH();
11945 IEM_MC_END();
11946}
11947
11948
11949/** Opcode 0xdb 0xe2. */
11950FNIEMOP_DEF(iemOp_fnclex)
11951{
11952 IEMOP_MNEMONIC(fnclex, "fnclex");
11953 IEM_MC_BEGIN(0, 0, 0, 0);
11954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11956 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11957 IEM_MC_CLEAR_FSW_EX();
11958 IEM_MC_ADVANCE_RIP_AND_FINISH();
11959 IEM_MC_END();
11960}
11961
11962
11963/** Opcode 0xdb 0xe3. */
11964FNIEMOP_DEF(iemOp_fninit)
11965{
11966 IEMOP_MNEMONIC(fninit, "fninit");
11967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11968 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
11969 iemCImpl_finit, false /*fCheckXcpts*/);
11970}
11971
11972
11973/** Opcode 0xdb 0xe4. */
11974FNIEMOP_DEF(iemOp_fnsetpm)
11975{
11976 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11977 IEM_MC_BEGIN(0, 0, 0, 0);
11978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11980 IEM_MC_ADVANCE_RIP_AND_FINISH();
11981 IEM_MC_END();
11982}
11983
11984
11985/** Opcode 0xdb 0xe5. */
11986FNIEMOP_DEF(iemOp_frstpm)
11987{
11988 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11989#if 0 /* #UDs on newer CPUs */
11990 IEM_MC_BEGIN(0, 0, 0, 0);
11991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11992 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11993 IEM_MC_ADVANCE_RIP_AND_FINISH();
11994 IEM_MC_END();
11995 return VINF_SUCCESS;
11996#else
11997 IEMOP_RAISE_INVALID_OPCODE_RET();
11998#endif
11999}
12000
12001
12002/** Opcode 0xdb 11/5. */
12003FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
12004{
12005 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
12006 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12007 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
12008 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12009}
12010
12011
12012/** Opcode 0xdb 11/6. */
12013FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
12014{
12015 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
12016 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12017 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12018 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12019}
12020
12021
12022/**
12023 * @opcode 0xdb
12024 */
12025FNIEMOP_DEF(iemOp_EscF3)
12026{
12027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12028 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
12029 if (IEM_IS_MODRM_REG_MODE(bRm))
12030 {
12031 switch (IEM_GET_MODRM_REG_8(bRm))
12032 {
12033 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
12034 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
12035 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
12036 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
12037 case 4:
12038 switch (bRm)
12039 {
12040 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
12041 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
12042 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
12043 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
12044 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
12045 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
12046 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
12047 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
12048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12049 }
12050 break;
12051 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
12052 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
12053 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12055 }
12056 }
12057 else
12058 {
12059 switch (IEM_GET_MODRM_REG_8(bRm))
12060 {
12061 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
12062 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
12063 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
12064 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
12065 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
12066 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
12067 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12068 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
12069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12070 }
12071 }
12072}
12073
12074
12075/**
12076 * Common worker for FPU instructions working on STn and ST0, and storing the
12077 * result in STn unless IE, DE or ZE was raised.
12078 *
12079 * @param bRm Mod R/M byte.
12080 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12081 */
12082FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12083{
12084 IEM_MC_BEGIN(3, 1, 0, 0);
12085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12086 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12087 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12088 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12089 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12090
12091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12092 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12093
12094 IEM_MC_PREPARE_FPU_USAGE();
12095 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
12096 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12097 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12098 } IEM_MC_ELSE() {
12099 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12100 } IEM_MC_ENDIF();
12101 IEM_MC_ADVANCE_RIP_AND_FINISH();
12102
12103 IEM_MC_END();
12104}
12105
12106
12107/** Opcode 0xdc 11/0. */
12108FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
12109{
12110 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
12111 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12112}
12113
12114
12115/** Opcode 0xdc 11/1. */
12116FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12117{
12118 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
12119 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12120}
12121
12122
12123/** Opcode 0xdc 11/4. */
12124FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12125{
12126 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
12127 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12128}
12129
12130
12131/** Opcode 0xdc 11/5. */
12132FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12133{
12134 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
12135 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12136}
12137
12138
12139/** Opcode 0xdc 11/6. */
12140FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12141{
12142 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
12143 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12144}
12145
12146
12147/** Opcode 0xdc 11/7. */
12148FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12149{
12150 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12151 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12152}
12153
12154
12155/**
12156 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12157 * memory operand, and storing the result in ST0.
12158 *
12159 * @param bRm Mod R/M byte.
12160 * @param pfnImpl Pointer to the instruction implementation (assembly).
12161 */
12162FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12163{
12164 IEM_MC_BEGIN(3, 3, 0, 0);
12165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12166 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12167 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12168 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12169 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12170 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12171
12172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12176
12177 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12178 IEM_MC_PREPARE_FPU_USAGE();
12179 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12180 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12181 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12182 } IEM_MC_ELSE() {
12183 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12184 } IEM_MC_ENDIF();
12185 IEM_MC_ADVANCE_RIP_AND_FINISH();
12186
12187 IEM_MC_END();
12188}
12189
12190
12191/** Opcode 0xdc !11/0. */
12192FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12193{
12194 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12195 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12196}
12197
12198
12199/** Opcode 0xdc !11/1. */
12200FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12201{
12202 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12203 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12204}
12205
12206
12207/** Opcode 0xdc !11/2. */
12208FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12209{
12210 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12211
12212 IEM_MC_BEGIN(3, 3, 0, 0);
12213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12214 IEM_MC_LOCAL(uint16_t, u16Fsw);
12215 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12216 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12217 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12218 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12219
12220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12222
12223 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12224 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12225 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12226
12227 IEM_MC_PREPARE_FPU_USAGE();
12228 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12229 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12230 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12231 } IEM_MC_ELSE() {
12232 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12233 } IEM_MC_ENDIF();
12234 IEM_MC_ADVANCE_RIP_AND_FINISH();
12235
12236 IEM_MC_END();
12237}
12238
12239
12240/** Opcode 0xdc !11/3. */
12241FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12242{
12243 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12244
12245 IEM_MC_BEGIN(3, 3, 0, 0);
12246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12247 IEM_MC_LOCAL(uint16_t, u16Fsw);
12248 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12249 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12250 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12251 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12252
12253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12255
12256 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12257 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12258 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12259
12260 IEM_MC_PREPARE_FPU_USAGE();
12261 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12262 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12263 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12264 } IEM_MC_ELSE() {
12265 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12266 } IEM_MC_ENDIF();
12267 IEM_MC_ADVANCE_RIP_AND_FINISH();
12268
12269 IEM_MC_END();
12270}
12271
12272
12273/** Opcode 0xdc !11/4. */
12274FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12275{
12276 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12277 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12278}
12279
12280
12281/** Opcode 0xdc !11/5. */
12282FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12283{
12284 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12285 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12286}
12287
12288
12289/** Opcode 0xdc !11/6. */
12290FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12291{
12292 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12293 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12294}
12295
12296
12297/** Opcode 0xdc !11/7. */
12298FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12299{
12300 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12301 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12302}
12303
12304
12305/**
12306 * @opcode 0xdc
12307 */
12308FNIEMOP_DEF(iemOp_EscF4)
12309{
12310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12311 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12312 if (IEM_IS_MODRM_REG_MODE(bRm))
12313 {
12314 switch (IEM_GET_MODRM_REG_8(bRm))
12315 {
12316 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12317 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12318 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12319 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12320 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12321 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12322 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12323 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12325 }
12326 }
12327 else
12328 {
12329 switch (IEM_GET_MODRM_REG_8(bRm))
12330 {
12331 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12332 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12333 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12334 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12335 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12336 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12337 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12338 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12340 }
12341 }
12342}
12343
12344
12345/** Opcode 0xdd !11/0.
12346 * @sa iemOp_fld_m32r */
12347FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12348{
12349 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12350
12351 IEM_MC_BEGIN(2, 3, 0, 0);
12352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12353 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12354 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12355 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12356 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12357
12358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12361 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12362
12363 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12364 IEM_MC_PREPARE_FPU_USAGE();
12365 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12366 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12367 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12368 } IEM_MC_ELSE() {
12369 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12370 } IEM_MC_ENDIF();
12371 IEM_MC_ADVANCE_RIP_AND_FINISH();
12372
12373 IEM_MC_END();
12374}
12375
12376
12377/** Opcode 0xdd !11/0. */
12378FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12379{
12380 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12381 IEM_MC_BEGIN(3, 3, 0, 0);
12382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12384
12385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12388 IEM_MC_PREPARE_FPU_USAGE();
12389
12390 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12391 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12392 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12393
12394 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12395 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12396 IEM_MC_LOCAL(uint16_t, u16Fsw);
12397 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12398 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12399 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12400 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12401 } IEM_MC_ELSE() {
12402 IEM_MC_IF_FCW_IM() {
12403 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12404 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12405 } IEM_MC_ELSE() {
12406 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12407 } IEM_MC_ENDIF();
12408 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12409 } IEM_MC_ENDIF();
12410 IEM_MC_ADVANCE_RIP_AND_FINISH();
12411
12412 IEM_MC_END();
12413}
12414
12415
12416/** Opcode 0xdd !11/0. */
12417FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12418{
12419 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12420 IEM_MC_BEGIN(3, 3, 0, 0);
12421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12423
12424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12425 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12426 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12427 IEM_MC_PREPARE_FPU_USAGE();
12428
12429 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12430 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12431 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12432
12433 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12434 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12435 IEM_MC_LOCAL(uint16_t, u16Fsw);
12436 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12437 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12438 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12439 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12440 } IEM_MC_ELSE() {
12441 IEM_MC_IF_FCW_IM() {
12442 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12443 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12444 } IEM_MC_ELSE() {
12445 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12446 } IEM_MC_ENDIF();
12447 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12448 } IEM_MC_ENDIF();
12449 IEM_MC_ADVANCE_RIP_AND_FINISH();
12450
12451 IEM_MC_END();
12452}
12453
12454
12455
12456
12457/** Opcode 0xdd !11/0. */
12458FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12459{
12460 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12461 IEM_MC_BEGIN(3, 3, 0, 0);
12462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12464
12465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12466 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12467 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12468 IEM_MC_PREPARE_FPU_USAGE();
12469
12470 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12471 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12472 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12473
12474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12475 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12476 IEM_MC_LOCAL(uint16_t, u16Fsw);
12477 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12478 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12479 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12480 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12481 } IEM_MC_ELSE() {
12482 IEM_MC_IF_FCW_IM() {
12483 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12484 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12485 } IEM_MC_ELSE() {
12486 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12487 } IEM_MC_ENDIF();
12488 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12489 } IEM_MC_ENDIF();
12490 IEM_MC_ADVANCE_RIP_AND_FINISH();
12491
12492 IEM_MC_END();
12493}
12494
12495
12496/** Opcode 0xdd !11/0. */
12497FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12498{
12499 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12500 IEM_MC_BEGIN(3, 0, 0, 0);
12501 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12503
12504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12506 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12507
12508 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12509 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12510 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12511 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12512 IEM_MC_END();
12513}
12514
12515
12516/** Opcode 0xdd !11/0. */
12517FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12518{
12519 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12520 IEM_MC_BEGIN(3, 0, 0, 0);
12521 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12523
12524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12526 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12527
12528 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12529 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12530 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12531 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12532 IEM_MC_END();
12533}
12534
12535/** Opcode 0xdd !11/0. */
12536FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12537{
12538 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12539
12540 IEM_MC_BEGIN(0, 2, 0, 0);
12541 IEM_MC_LOCAL(uint16_t, u16Tmp);
12542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12543
12544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12547
12548 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12549 IEM_MC_FETCH_FSW(u16Tmp);
12550 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12551 IEM_MC_ADVANCE_RIP_AND_FINISH();
12552
12553/** @todo Debug / drop a hint to the verifier that things may differ
12554 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12555 * NT4SP1. (X86_FSW_PE) */
12556 IEM_MC_END();
12557}
12558
12559
12560/** Opcode 0xdd 11/0. */
12561FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12562{
12563 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12564 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12565 unmodified. */
12566 IEM_MC_BEGIN(0, 0, 0, 0);
12567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12568
12569 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12570 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12571
12572 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12573 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12574 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12575
12576 IEM_MC_ADVANCE_RIP_AND_FINISH();
12577 IEM_MC_END();
12578}
12579
12580
12581/** Opcode 0xdd 11/1. */
12582FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12583{
12584 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12585 IEM_MC_BEGIN(0, 2, 0, 0);
12586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12587 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12588 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12590 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12591
12592 IEM_MC_PREPARE_FPU_USAGE();
12593 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12594 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12595 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12596 } IEM_MC_ELSE() {
12597 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12598 } IEM_MC_ENDIF();
12599
12600 IEM_MC_ADVANCE_RIP_AND_FINISH();
12601 IEM_MC_END();
12602}
12603
12604
12605/** Opcode 0xdd 11/3. */
12606FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12607{
12608 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12609 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12610}
12611
12612
12613/** Opcode 0xdd 11/4. */
12614FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12615{
12616 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12617 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12618}
12619
12620
12621/**
12622 * @opcode 0xdd
12623 */
12624FNIEMOP_DEF(iemOp_EscF5)
12625{
12626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12627 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12628 if (IEM_IS_MODRM_REG_MODE(bRm))
12629 {
12630 switch (IEM_GET_MODRM_REG_8(bRm))
12631 {
12632 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12633 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12634 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12635 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12636 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12637 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12638 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12639 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12641 }
12642 }
12643 else
12644 {
12645 switch (IEM_GET_MODRM_REG_8(bRm))
12646 {
12647 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12648 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12649 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12650 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12651 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12652 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12653 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12654 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12656 }
12657 }
12658}
12659
12660
12661/** Opcode 0xde 11/0. */
12662FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12663{
12664 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12665 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12666}
12667
12668
12669/** Opcode 0xde 11/0. */
12670FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12671{
12672 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12673 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12674}
12675
12676
12677/** Opcode 0xde 0xd9. */
12678FNIEMOP_DEF(iemOp_fcompp)
12679{
12680 IEMOP_MNEMONIC(fcompp, "fcompp");
12681 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12682}
12683
12684
12685/** Opcode 0xde 11/4. */
12686FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12687{
12688 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12689 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12690}
12691
12692
12693/** Opcode 0xde 11/5. */
12694FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12695{
12696 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12697 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12698}
12699
12700
12701/** Opcode 0xde 11/6. */
12702FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12703{
12704 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12705 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12706}
12707
12708
12709/** Opcode 0xde 11/7. */
12710FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12711{
12712 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12713 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12714}
12715
12716
12717/**
12718 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12719 * the result in ST0.
12720 *
12721 * @param bRm Mod R/M byte.
12722 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12723 */
12724FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12725{
12726 IEM_MC_BEGIN(3, 3, 0, 0);
12727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12728 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12729 IEM_MC_LOCAL(int16_t, i16Val2);
12730 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12731 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12732 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12733
12734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12736
12737 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12738 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12739 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12740
12741 IEM_MC_PREPARE_FPU_USAGE();
12742 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12743 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12744 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12745 } IEM_MC_ELSE() {
12746 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12747 } IEM_MC_ENDIF();
12748 IEM_MC_ADVANCE_RIP_AND_FINISH();
12749
12750 IEM_MC_END();
12751}
12752
12753
12754/** Opcode 0xde !11/0. */
12755FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12756{
12757 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12758 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12759}
12760
12761
12762/** Opcode 0xde !11/1. */
12763FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12764{
12765 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12766 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12767}
12768
12769
12770/** Opcode 0xde !11/2. */
12771FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12772{
12773 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12774
12775 IEM_MC_BEGIN(3, 3, 0, 0);
12776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12777 IEM_MC_LOCAL(uint16_t, u16Fsw);
12778 IEM_MC_LOCAL(int16_t, i16Val2);
12779 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12780 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12781 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12782
12783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12785
12786 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12787 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12788 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12789
12790 IEM_MC_PREPARE_FPU_USAGE();
12791 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12792 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12793 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12794 } IEM_MC_ELSE() {
12795 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12796 } IEM_MC_ENDIF();
12797 IEM_MC_ADVANCE_RIP_AND_FINISH();
12798
12799 IEM_MC_END();
12800}
12801
12802
12803/** Opcode 0xde !11/3. */
12804FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12805{
12806 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12807
12808 IEM_MC_BEGIN(3, 3, 0, 0);
12809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12810 IEM_MC_LOCAL(uint16_t, u16Fsw);
12811 IEM_MC_LOCAL(int16_t, i16Val2);
12812 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12813 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12814 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12815
12816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12818
12819 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12820 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12821 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12822
12823 IEM_MC_PREPARE_FPU_USAGE();
12824 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12825 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12826 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12827 } IEM_MC_ELSE() {
12828 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12829 } IEM_MC_ENDIF();
12830 IEM_MC_ADVANCE_RIP_AND_FINISH();
12831
12832 IEM_MC_END();
12833}
12834
12835
12836/** Opcode 0xde !11/4. */
12837FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12838{
12839 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12840 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12841}
12842
12843
12844/** Opcode 0xde !11/5. */
12845FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12846{
12847 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12848 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12849}
12850
12851
12852/** Opcode 0xde !11/6. */
12853FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12854{
12855 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12856 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12857}
12858
12859
12860/** Opcode 0xde !11/7. */
12861FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12862{
12863 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12864 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12865}
12866
12867
12868/**
12869 * @opcode 0xde
12870 */
12871FNIEMOP_DEF(iemOp_EscF6)
12872{
12873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12874 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12875 if (IEM_IS_MODRM_REG_MODE(bRm))
12876 {
12877 switch (IEM_GET_MODRM_REG_8(bRm))
12878 {
12879 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12880 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12881 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12882 case 3: if (bRm == 0xd9)
12883 return FNIEMOP_CALL(iemOp_fcompp);
12884 IEMOP_RAISE_INVALID_OPCODE_RET();
12885 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12886 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12887 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12888 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12890 }
12891 }
12892 else
12893 {
12894 switch (IEM_GET_MODRM_REG_8(bRm))
12895 {
12896 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12897 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12898 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12899 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12900 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12901 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12902 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12903 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12905 }
12906 }
12907}
12908
12909
12910/** Opcode 0xdf 11/0.
12911 * Undocument instruction, assumed to work like ffree + fincstp. */
12912FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12913{
12914 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12915 IEM_MC_BEGIN(0, 0, 0, 0);
12916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12917
12918 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12919 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12920
12921 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12922 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12923 IEM_MC_FPU_STACK_INC_TOP();
12924 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12925
12926 IEM_MC_ADVANCE_RIP_AND_FINISH();
12927 IEM_MC_END();
12928}
12929
12930
12931/** Opcode 0xdf 0xe0. */
12932FNIEMOP_DEF(iemOp_fnstsw_ax)
12933{
12934 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12935 IEM_MC_BEGIN(0, 1, 0, 0);
12936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12937 IEM_MC_LOCAL(uint16_t, u16Tmp);
12938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12939 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12940 IEM_MC_FETCH_FSW(u16Tmp);
12941 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12942 IEM_MC_ADVANCE_RIP_AND_FINISH();
12943 IEM_MC_END();
12944}
12945
12946
12947/** Opcode 0xdf 11/5. */
12948FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12949{
12950 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12951 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12952 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12953 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12954}
12955
12956
12957/** Opcode 0xdf 11/6. */
12958FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12959{
12960 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12961 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12962 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12963 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12964}
12965
12966
12967/** Opcode 0xdf !11/0. */
12968FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12969{
12970 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12971
12972 IEM_MC_BEGIN(2, 3, 0, 0);
12973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12974 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12975 IEM_MC_LOCAL(int16_t, i16Val);
12976 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12977 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12978
12979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12981
12982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12984 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12985
12986 IEM_MC_PREPARE_FPU_USAGE();
12987 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12988 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12989 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12990 } IEM_MC_ELSE() {
12991 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12992 } IEM_MC_ENDIF();
12993 IEM_MC_ADVANCE_RIP_AND_FINISH();
12994
12995 IEM_MC_END();
12996}
12997
12998
12999/** Opcode 0xdf !11/1. */
13000FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
13001{
13002 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
13003 IEM_MC_BEGIN(3, 3, 0, 0);
13004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13006
13007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13008 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13009 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13010 IEM_MC_PREPARE_FPU_USAGE();
13011
13012 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13013 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13014 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13015
13016 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13017 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13018 IEM_MC_LOCAL(uint16_t, u16Fsw);
13019 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13020 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13021 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13022 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13023 } IEM_MC_ELSE() {
13024 IEM_MC_IF_FCW_IM() {
13025 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13026 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13027 } IEM_MC_ELSE() {
13028 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13029 } IEM_MC_ENDIF();
13030 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13031 } IEM_MC_ENDIF();
13032 IEM_MC_ADVANCE_RIP_AND_FINISH();
13033
13034 IEM_MC_END();
13035}
13036
13037
13038/** Opcode 0xdf !11/2. */
13039FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
13040{
13041 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
13042 IEM_MC_BEGIN(3, 3, 0, 0);
13043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13045
13046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13048 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13049 IEM_MC_PREPARE_FPU_USAGE();
13050
13051 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13052 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13053 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13054
13055 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13056 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13057 IEM_MC_LOCAL(uint16_t, u16Fsw);
13058 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13059 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13060 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13061 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13062 } IEM_MC_ELSE() {
13063 IEM_MC_IF_FCW_IM() {
13064 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13065 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13066 } IEM_MC_ELSE() {
13067 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13068 } IEM_MC_ENDIF();
13069 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13070 } IEM_MC_ENDIF();
13071 IEM_MC_ADVANCE_RIP_AND_FINISH();
13072
13073 IEM_MC_END();
13074}
13075
13076
13077/** Opcode 0xdf !11/3. */
13078FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
13079{
13080 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
13081 IEM_MC_BEGIN(3, 3, 0, 0);
13082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13084
13085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13088 IEM_MC_PREPARE_FPU_USAGE();
13089
13090 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13091 IEM_MC_ARG(int16_t *, pi16Dst, 1);
13092 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13093
13094 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13095 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13096 IEM_MC_LOCAL(uint16_t, u16Fsw);
13097 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13098 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
13099 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13100 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13101 } IEM_MC_ELSE() {
13102 IEM_MC_IF_FCW_IM() {
13103 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
13104 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13105 } IEM_MC_ELSE() {
13106 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13107 } IEM_MC_ENDIF();
13108 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13109 } IEM_MC_ENDIF();
13110 IEM_MC_ADVANCE_RIP_AND_FINISH();
13111
13112 IEM_MC_END();
13113}
13114
13115
13116/** Opcode 0xdf !11/4. */
13117FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
13118{
13119 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
13120
13121 IEM_MC_BEGIN(2, 3, 0, 0);
13122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13123 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13124 IEM_MC_LOCAL(RTPBCD80U, d80Val);
13125 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13126 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
13127
13128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13130
13131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13133 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13134
13135 IEM_MC_PREPARE_FPU_USAGE();
13136 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13137 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
13138 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13139 } IEM_MC_ELSE() {
13140 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13141 } IEM_MC_ENDIF();
13142 IEM_MC_ADVANCE_RIP_AND_FINISH();
13143
13144 IEM_MC_END();
13145}
13146
13147
13148/** Opcode 0xdf !11/5. */
13149FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13150{
13151 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13152
13153 IEM_MC_BEGIN(2, 3, 0, 0);
13154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13155 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13156 IEM_MC_LOCAL(int64_t, i64Val);
13157 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13158 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13159
13160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13162
13163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13165 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13166
13167 IEM_MC_PREPARE_FPU_USAGE();
13168 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13169 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13170 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13171 } IEM_MC_ELSE() {
13172 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13173 } IEM_MC_ENDIF();
13174 IEM_MC_ADVANCE_RIP_AND_FINISH();
13175
13176 IEM_MC_END();
13177}
13178
13179
13180/** Opcode 0xdf !11/6. */
13181FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13182{
13183 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13184 IEM_MC_BEGIN(3, 3, 0, 0);
13185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13187
13188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13190 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13191 IEM_MC_PREPARE_FPU_USAGE();
13192
13193 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13194 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13195 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13196
13197 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13198 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13199 IEM_MC_LOCAL(uint16_t, u16Fsw);
13200 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13201 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13202 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13203 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13204 } IEM_MC_ELSE() {
13205 IEM_MC_IF_FCW_IM() {
13206 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13207 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13208 } IEM_MC_ELSE() {
13209 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13210 } IEM_MC_ENDIF();
13211 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13212 } IEM_MC_ENDIF();
13213 IEM_MC_ADVANCE_RIP_AND_FINISH();
13214
13215 IEM_MC_END();
13216}
13217
13218
13219/** Opcode 0xdf !11/7. */
13220FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13221{
13222 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13223 IEM_MC_BEGIN(3, 3, 0, 0);
13224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13226
13227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13230 IEM_MC_PREPARE_FPU_USAGE();
13231
13232 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13233 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13234 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13235
13236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13237 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13238 IEM_MC_LOCAL(uint16_t, u16Fsw);
13239 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13240 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13241 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13242 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13243 } IEM_MC_ELSE() {
13244 IEM_MC_IF_FCW_IM() {
13245 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13246 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13247 } IEM_MC_ELSE() {
13248 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13249 } IEM_MC_ENDIF();
13250 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13251 } IEM_MC_ENDIF();
13252 IEM_MC_ADVANCE_RIP_AND_FINISH();
13253
13254 IEM_MC_END();
13255}
13256
13257
13258/**
13259 * @opcode 0xdf
13260 */
13261FNIEMOP_DEF(iemOp_EscF7)
13262{
13263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13264 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13265 if (IEM_IS_MODRM_REG_MODE(bRm))
13266 {
13267 switch (IEM_GET_MODRM_REG_8(bRm))
13268 {
13269 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13270 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13271 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13272 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13273 case 4: if (bRm == 0xe0)
13274 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13275 IEMOP_RAISE_INVALID_OPCODE_RET();
13276 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13277 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13278 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13280 }
13281 }
13282 else
13283 {
13284 switch (IEM_GET_MODRM_REG_8(bRm))
13285 {
13286 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13287 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13288 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13289 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13290 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13291 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13292 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13293 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13295 }
13296 }
13297}
13298
13299
13300/**
13301 * @opcode 0xe0
13302 * @opfltest zf
13303 */
13304FNIEMOP_DEF(iemOp_loopne_Jb)
13305{
13306 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13307 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13308 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13309
13310 switch (pVCpu->iem.s.enmEffAddrMode)
13311 {
13312 case IEMMODE_16BIT:
13313 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13315 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13316 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13317 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13318 } IEM_MC_ELSE() {
13319 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13320 IEM_MC_ADVANCE_RIP_AND_FINISH();
13321 } IEM_MC_ENDIF();
13322 IEM_MC_END();
13323 break;
13324
13325 case IEMMODE_32BIT:
13326 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13328 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13329 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13330 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13331 } IEM_MC_ELSE() {
13332 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13333 IEM_MC_ADVANCE_RIP_AND_FINISH();
13334 } IEM_MC_ENDIF();
13335 IEM_MC_END();
13336 break;
13337
13338 case IEMMODE_64BIT:
13339 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13341 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13342 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13343 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13344 } IEM_MC_ELSE() {
13345 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13346 IEM_MC_ADVANCE_RIP_AND_FINISH();
13347 } IEM_MC_ENDIF();
13348 IEM_MC_END();
13349 break;
13350
13351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13352 }
13353}
13354
13355
13356/**
13357 * @opcode 0xe1
13358 * @opfltest zf
13359 */
13360FNIEMOP_DEF(iemOp_loope_Jb)
13361{
13362 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13363 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13364 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13365
13366 switch (pVCpu->iem.s.enmEffAddrMode)
13367 {
13368 case IEMMODE_16BIT:
13369 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13371 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13372 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13373 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13374 } IEM_MC_ELSE() {
13375 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13376 IEM_MC_ADVANCE_RIP_AND_FINISH();
13377 } IEM_MC_ENDIF();
13378 IEM_MC_END();
13379 break;
13380
13381 case IEMMODE_32BIT:
13382 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13384 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13385 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13386 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13387 } IEM_MC_ELSE() {
13388 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13389 IEM_MC_ADVANCE_RIP_AND_FINISH();
13390 } IEM_MC_ENDIF();
13391 IEM_MC_END();
13392 break;
13393
13394 case IEMMODE_64BIT:
13395 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13397 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13398 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13399 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13400 } IEM_MC_ELSE() {
13401 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13402 IEM_MC_ADVANCE_RIP_AND_FINISH();
13403 } IEM_MC_ENDIF();
13404 IEM_MC_END();
13405 break;
13406
13407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13408 }
13409}
13410
13411
13412/**
13413 * @opcode 0xe2
13414 */
13415FNIEMOP_DEF(iemOp_loop_Jb)
13416{
13417 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13418 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13420
13421 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13422 * using the 32-bit operand size override. How can that be restarted? See
13423 * weird pseudo code in intel manual. */
13424
13425 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13426 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13427 * the loop causes guest crashes, but when logging it's nice to skip a few million
13428 * lines of useless output. */
13429#if defined(LOG_ENABLED)
13430 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13431 switch (pVCpu->iem.s.enmEffAddrMode)
13432 {
13433 case IEMMODE_16BIT:
13434 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13436 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13437 IEM_MC_ADVANCE_RIP_AND_FINISH();
13438 IEM_MC_END();
13439 break;
13440
13441 case IEMMODE_32BIT:
13442 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13444 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13445 IEM_MC_ADVANCE_RIP_AND_FINISH();
13446 IEM_MC_END();
13447 break;
13448
13449 case IEMMODE_64BIT:
13450 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13452 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13453 IEM_MC_ADVANCE_RIP_AND_FINISH();
13454 IEM_MC_END();
13455 break;
13456
13457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13458 }
13459#endif
13460
13461 switch (pVCpu->iem.s.enmEffAddrMode)
13462 {
13463 case IEMMODE_16BIT:
13464 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13466 IEM_MC_IF_CX_IS_NOT_ONE() {
13467 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13468 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13469 } IEM_MC_ELSE() {
13470 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13471 IEM_MC_ADVANCE_RIP_AND_FINISH();
13472 } IEM_MC_ENDIF();
13473 IEM_MC_END();
13474 break;
13475
13476 case IEMMODE_32BIT:
13477 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13479 IEM_MC_IF_ECX_IS_NOT_ONE() {
13480 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13481 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13482 } IEM_MC_ELSE() {
13483 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13484 IEM_MC_ADVANCE_RIP_AND_FINISH();
13485 } IEM_MC_ENDIF();
13486 IEM_MC_END();
13487 break;
13488
13489 case IEMMODE_64BIT:
13490 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13492 IEM_MC_IF_RCX_IS_NOT_ONE() {
13493 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13494 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13495 } IEM_MC_ELSE() {
13496 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13497 IEM_MC_ADVANCE_RIP_AND_FINISH();
13498 } IEM_MC_ENDIF();
13499 IEM_MC_END();
13500 break;
13501
13502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13503 }
13504}
13505
13506
13507/**
13508 * @opcode 0xe3
13509 */
13510FNIEMOP_DEF(iemOp_jecxz_Jb)
13511{
13512 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13513 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13514 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13515
13516 switch (pVCpu->iem.s.enmEffAddrMode)
13517 {
13518 case IEMMODE_16BIT:
13519 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13521 IEM_MC_IF_CX_IS_NZ() {
13522 IEM_MC_ADVANCE_RIP_AND_FINISH();
13523 } IEM_MC_ELSE() {
13524 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13525 } IEM_MC_ENDIF();
13526 IEM_MC_END();
13527 break;
13528
13529 case IEMMODE_32BIT:
13530 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13532 IEM_MC_IF_ECX_IS_NZ() {
13533 IEM_MC_ADVANCE_RIP_AND_FINISH();
13534 } IEM_MC_ELSE() {
13535 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13536 } IEM_MC_ENDIF();
13537 IEM_MC_END();
13538 break;
13539
13540 case IEMMODE_64BIT:
13541 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13543 IEM_MC_IF_RCX_IS_NZ() {
13544 IEM_MC_ADVANCE_RIP_AND_FINISH();
13545 } IEM_MC_ELSE() {
13546 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13547 } IEM_MC_ENDIF();
13548 IEM_MC_END();
13549 break;
13550
13551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13552 }
13553}
13554
13555
13556/**
13557 * @opcode 0xe4
13558 * @opfltest iopl
13559 */
13560FNIEMOP_DEF(iemOp_in_AL_Ib)
13561{
13562 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13563 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13565 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13566 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13567}
13568
13569
13570/**
13571 * @opcode 0xe5
13572 * @opfltest iopl
13573 */
13574FNIEMOP_DEF(iemOp_in_eAX_Ib)
13575{
13576 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13577 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13579 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13580 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13581 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13582}
13583
13584
13585/**
13586 * @opcode 0xe6
13587 * @opfltest iopl
13588 */
13589FNIEMOP_DEF(iemOp_out_Ib_AL)
13590{
13591 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13592 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13594 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13595 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13596}
13597
13598
13599/**
13600 * @opcode 0xe7
13601 * @opfltest iopl
13602 */
13603FNIEMOP_DEF(iemOp_out_Ib_eAX)
13604{
13605 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13606 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13608 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13609 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13610 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13611}
13612
13613
13614/**
13615 * @opcode 0xe8
13616 */
13617FNIEMOP_DEF(iemOp_call_Jv)
13618{
13619 IEMOP_MNEMONIC(call_Jv, "call Jv");
13620 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13621 switch (pVCpu->iem.s.enmEffOpSize)
13622 {
13623 case IEMMODE_16BIT:
13624 {
13625 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13626 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13627 iemCImpl_call_rel_16, (int16_t)u16Imm);
13628 }
13629
13630 case IEMMODE_32BIT:
13631 {
13632 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13633 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13634 iemCImpl_call_rel_32, (int32_t)u32Imm);
13635 }
13636
13637 case IEMMODE_64BIT:
13638 {
13639 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13640 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13641 iemCImpl_call_rel_64, u64Imm);
13642 }
13643
13644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13645 }
13646}
13647
13648
13649/**
13650 * @opcode 0xe9
13651 */
13652FNIEMOP_DEF(iemOp_jmp_Jv)
13653{
13654 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13655 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13656 switch (pVCpu->iem.s.enmEffOpSize)
13657 {
13658 case IEMMODE_16BIT:
13659 IEM_MC_BEGIN(0, 0, 0, 0);
13660 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13662 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13663 IEM_MC_END();
13664 break;
13665
13666 case IEMMODE_64BIT:
13667 case IEMMODE_32BIT:
13668 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13669 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13671 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13672 IEM_MC_END();
13673 break;
13674
13675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13676 }
13677}
13678
13679
13680/**
13681 * @opcode 0xea
13682 */
13683FNIEMOP_DEF(iemOp_jmp_Ap)
13684{
13685 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13686 IEMOP_HLP_NO_64BIT();
13687
13688 /* Decode the far pointer address and pass it on to the far call C implementation. */
13689 uint32_t off32Seg;
13690 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13691 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13692 else
13693 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13694 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13696 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13697 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13698 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13699 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13700}
13701
13702
13703/**
13704 * @opcode 0xeb
13705 */
13706FNIEMOP_DEF(iemOp_jmp_Jb)
13707{
13708 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13709 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13710 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13711
13712 IEM_MC_BEGIN(0, 0, 0, 0);
13713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13714 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13715 IEM_MC_END();
13716}
13717
13718
13719/**
13720 * @opcode 0xec
13721 * @opfltest iopl
13722 */
13723FNIEMOP_DEF(iemOp_in_AL_DX)
13724{
13725 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13727 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13728 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13729 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13730}
13731
13732
13733/**
13734 * @opcode 0xed
13735 * @opfltest iopl
13736 */
13737FNIEMOP_DEF(iemOp_in_eAX_DX)
13738{
13739 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13741 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13742 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13743 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13744 pVCpu->iem.s.enmEffAddrMode);
13745}
13746
13747
13748/**
13749 * @opcode 0xee
13750 * @opfltest iopl
13751 */
13752FNIEMOP_DEF(iemOp_out_DX_AL)
13753{
13754 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13756 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13757 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13758}
13759
13760
13761/**
13762 * @opcode 0xef
13763 * @opfltest iopl
13764 */
13765FNIEMOP_DEF(iemOp_out_DX_eAX)
13766{
13767 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13769 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13770 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13771 pVCpu->iem.s.enmEffAddrMode);
13772}
13773
13774
13775/**
13776 * @opcode 0xf0
13777 */
13778FNIEMOP_DEF(iemOp_lock)
13779{
13780 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13781 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13782
13783 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13784 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13785}
13786
13787
13788/**
13789 * @opcode 0xf1
13790 */
13791FNIEMOP_DEF(iemOp_int1)
13792{
13793 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13794 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13795 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13796 * LOADALL memo. Needs some testing. */
13797 IEMOP_HLP_MIN_386();
13798 /** @todo testcase! */
13799 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13800 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13801 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13802}
13803
13804
13805/**
13806 * @opcode 0xf2
13807 */
13808FNIEMOP_DEF(iemOp_repne)
13809{
13810 /* This overrides any previous REPE prefix. */
13811 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13812 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13813 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13814
13815 /* For the 4 entry opcode tables, REPNZ overrides any previous
13816 REPZ and operand size prefixes. */
13817 pVCpu->iem.s.idxPrefix = 3;
13818
13819 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13820 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13821}
13822
13823
13824/**
13825 * @opcode 0xf3
13826 */
13827FNIEMOP_DEF(iemOp_repe)
13828{
13829 /* This overrides any previous REPNE prefix. */
13830 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13831 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13832 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13833
13834 /* For the 4 entry opcode tables, REPNZ overrides any previous
13835 REPNZ and operand size prefixes. */
13836 pVCpu->iem.s.idxPrefix = 2;
13837
13838 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13839 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13840}
13841
13842
13843/**
13844 * @opcode 0xf4
13845 */
13846FNIEMOP_DEF(iemOp_hlt)
13847{
13848 IEMOP_MNEMONIC(hlt, "hlt");
13849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13850 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13851}
13852
13853
13854/**
13855 * @opcode 0xf5
13856 * @opflmodify cf
13857 */
13858FNIEMOP_DEF(iemOp_cmc)
13859{
13860 IEMOP_MNEMONIC(cmc, "cmc");
13861 IEM_MC_BEGIN(0, 0, 0, 0);
13862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13863 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13864 IEM_MC_ADVANCE_RIP_AND_FINISH();
13865 IEM_MC_END();
13866}
13867
13868
13869/**
13870 * Body for of 'inc/dec/not/neg Eb'.
13871 */
13872#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13873 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13874 { \
13875 /* register access */ \
13876 IEM_MC_BEGIN(2, 0, 0, 0); \
13877 IEMOP_HLP_DONE_DECODING(); \
13878 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13879 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13880 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13881 IEM_MC_REF_EFLAGS(pEFlags); \
13882 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13883 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13884 IEM_MC_END(); \
13885 } \
13886 else \
13887 { \
13888 /* memory access. */ \
13889 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13890 { \
13891 IEM_MC_BEGIN(2, 2, 0, 0); \
13892 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13893 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13895 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13896 \
13897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13898 IEMOP_HLP_DONE_DECODING(); \
13899 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13900 IEM_MC_FETCH_EFLAGS(EFlags); \
13901 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13902 \
13903 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13904 IEM_MC_COMMIT_EFLAGS(EFlags); \
13905 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13906 IEM_MC_END(); \
13907 } \
13908 else \
13909 { \
13910 IEM_MC_BEGIN(2, 2, 0, 0); \
13911 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13914 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13915 \
13916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13917 IEMOP_HLP_DONE_DECODING(); \
13918 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13919 IEM_MC_FETCH_EFLAGS(EFlags); \
13920 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13921 \
13922 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13923 IEM_MC_COMMIT_EFLAGS(EFlags); \
13924 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13925 IEM_MC_END(); \
13926 } \
13927 } \
13928 (void)0
13929
13930
13931/**
13932 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13933 */
13934#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13935 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13936 { \
13937 /* \
13938 * Register target \
13939 */ \
13940 switch (pVCpu->iem.s.enmEffOpSize) \
13941 { \
13942 case IEMMODE_16BIT: \
13943 IEM_MC_BEGIN(2, 0, 0, 0); \
13944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13945 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13946 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13947 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13948 IEM_MC_REF_EFLAGS(pEFlags); \
13949 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13950 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13951 IEM_MC_END(); \
13952 break; \
13953 \
13954 case IEMMODE_32BIT: \
13955 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13957 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13958 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13959 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13960 IEM_MC_REF_EFLAGS(pEFlags); \
13961 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13962 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13963 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13964 IEM_MC_END(); \
13965 break; \
13966 \
13967 case IEMMODE_64BIT: \
13968 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13970 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13971 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13972 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13973 IEM_MC_REF_EFLAGS(pEFlags); \
13974 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13975 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13976 IEM_MC_END(); \
13977 break; \
13978 \
13979 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13980 } \
13981 } \
13982 else \
13983 { \
13984 /* \
13985 * Memory target. \
13986 */ \
13987 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13988 { \
13989 switch (pVCpu->iem.s.enmEffOpSize) \
13990 { \
13991 case IEMMODE_16BIT: \
13992 IEM_MC_BEGIN(2, 3, 0, 0); \
13993 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13994 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13996 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13997 \
13998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13999 IEMOP_HLP_DONE_DECODING(); \
14000 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14001 IEM_MC_FETCH_EFLAGS(EFlags); \
14002 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
14003 \
14004 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14005 IEM_MC_COMMIT_EFLAGS(EFlags); \
14006 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14007 IEM_MC_END(); \
14008 break; \
14009 \
14010 case IEMMODE_32BIT: \
14011 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
14012 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14013 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14015 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14016 \
14017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14018 IEMOP_HLP_DONE_DECODING(); \
14019 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14020 IEM_MC_FETCH_EFLAGS(EFlags); \
14021 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
14022 \
14023 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14024 IEM_MC_COMMIT_EFLAGS(EFlags); \
14025 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14026 IEM_MC_END(); \
14027 break; \
14028 \
14029 case IEMMODE_64BIT: \
14030 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
14031 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14032 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14034 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14035 \
14036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14037 IEMOP_HLP_DONE_DECODING(); \
14038 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14039 IEM_MC_FETCH_EFLAGS(EFlags); \
14040 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
14041 \
14042 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
14043 IEM_MC_COMMIT_EFLAGS(EFlags); \
14044 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14045 IEM_MC_END(); \
14046 break; \
14047 \
14048 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14049 } \
14050 } \
14051 else \
14052 { \
14053 (void)0
14054
14055#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
14056 switch (pVCpu->iem.s.enmEffOpSize) \
14057 { \
14058 case IEMMODE_16BIT: \
14059 IEM_MC_BEGIN(2, 3, 0, 0); \
14060 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
14061 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14063 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14064 \
14065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14066 IEMOP_HLP_DONE_DECODING(); \
14067 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14068 IEM_MC_FETCH_EFLAGS(EFlags); \
14069 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
14070 \
14071 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14072 IEM_MC_COMMIT_EFLAGS(EFlags); \
14073 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14074 IEM_MC_END(); \
14075 break; \
14076 \
14077 case IEMMODE_32BIT: \
14078 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
14079 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
14080 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14082 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14083 \
14084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14085 IEMOP_HLP_DONE_DECODING(); \
14086 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14087 IEM_MC_FETCH_EFLAGS(EFlags); \
14088 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
14089 \
14090 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14091 IEM_MC_COMMIT_EFLAGS(EFlags); \
14092 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14093 IEM_MC_END(); \
14094 break; \
14095 \
14096 case IEMMODE_64BIT: \
14097 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
14098 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
14099 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
14100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
14102 \
14103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14104 IEMOP_HLP_DONE_DECODING(); \
14105 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14106 IEM_MC_FETCH_EFLAGS(EFlags); \
14107 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
14108 \
14109 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
14110 IEM_MC_COMMIT_EFLAGS(EFlags); \
14111 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14112 IEM_MC_END(); \
14113 break; \
14114 \
14115 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14116 } \
14117 } \
14118 } \
14119 (void)0
14120
14121
14122/**
14123 * @opmaps grp3_f6
14124 * @opcode /0
14125 * @opflclass logical
14126 * @todo also /1
14127 */
14128FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
14129{
14130 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
14131 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14132
14133 if (IEM_IS_MODRM_REG_MODE(bRm))
14134 {
14135 /* register access */
14136 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14137 IEM_MC_BEGIN(3, 0, 0, 0);
14138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14139 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14140 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
14141 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14142 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14143 IEM_MC_REF_EFLAGS(pEFlags);
14144 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14145 IEM_MC_ADVANCE_RIP_AND_FINISH();
14146 IEM_MC_END();
14147 }
14148 else
14149 {
14150 /* memory access. */
14151 IEM_MC_BEGIN(3, 3, 0, 0);
14152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
14154
14155 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14157
14158 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14159 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
14160 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14161
14162 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
14163 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14164 IEM_MC_FETCH_EFLAGS(EFlags);
14165 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14166
14167 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14168 IEM_MC_COMMIT_EFLAGS(EFlags);
14169 IEM_MC_ADVANCE_RIP_AND_FINISH();
14170 IEM_MC_END();
14171 }
14172}
14173
14174
14175/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14176#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14177 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14178 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14179 { \
14180 /* register access */ \
14181 IEM_MC_BEGIN(3, 1, 0, 0); \
14182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14183 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14184 IEM_MC_ARG(uint8_t, u8Value, 1); \
14185 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14186 IEM_MC_LOCAL(int32_t, rc); \
14187 \
14188 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14189 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14190 IEM_MC_REF_EFLAGS(pEFlags); \
14191 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14192 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14193 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14194 } IEM_MC_ELSE() { \
14195 IEM_MC_RAISE_DIVIDE_ERROR(); \
14196 } IEM_MC_ENDIF(); \
14197 \
14198 IEM_MC_END(); \
14199 } \
14200 else \
14201 { \
14202 /* memory access. */ \
14203 IEM_MC_BEGIN(3, 2, 0, 0); \
14204 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14205 IEM_MC_ARG(uint8_t, u8Value, 1); \
14206 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14208 IEM_MC_LOCAL(int32_t, rc); \
14209 \
14210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14212 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14213 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14214 IEM_MC_REF_EFLAGS(pEFlags); \
14215 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14216 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14217 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14218 } IEM_MC_ELSE() { \
14219 IEM_MC_RAISE_DIVIDE_ERROR(); \
14220 } IEM_MC_ENDIF(); \
14221 \
14222 IEM_MC_END(); \
14223 } (void)0
14224
14225
14226/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14227#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14228 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14229 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14230 { \
14231 /* register access */ \
14232 switch (pVCpu->iem.s.enmEffOpSize) \
14233 { \
14234 case IEMMODE_16BIT: \
14235 IEM_MC_BEGIN(4, 1, 0, 0); \
14236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14237 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14238 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14239 IEM_MC_ARG(uint16_t, u16Value, 2); \
14240 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14241 IEM_MC_LOCAL(int32_t, rc); \
14242 \
14243 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14244 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14245 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14246 IEM_MC_REF_EFLAGS(pEFlags); \
14247 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14248 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14249 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14250 } IEM_MC_ELSE() { \
14251 IEM_MC_RAISE_DIVIDE_ERROR(); \
14252 } IEM_MC_ENDIF(); \
14253 \
14254 IEM_MC_END(); \
14255 break; \
14256 \
14257 case IEMMODE_32BIT: \
14258 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
14259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14260 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14261 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14262 IEM_MC_ARG(uint32_t, u32Value, 2); \
14263 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14264 IEM_MC_LOCAL(int32_t, rc); \
14265 \
14266 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14267 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14268 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14269 IEM_MC_REF_EFLAGS(pEFlags); \
14270 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14271 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14272 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14273 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14274 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14275 } IEM_MC_ELSE() { \
14276 IEM_MC_RAISE_DIVIDE_ERROR(); \
14277 } IEM_MC_ENDIF(); \
14278 \
14279 IEM_MC_END(); \
14280 break; \
14281 \
14282 case IEMMODE_64BIT: \
14283 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
14284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14285 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14286 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14287 IEM_MC_ARG(uint64_t, u64Value, 2); \
14288 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14289 IEM_MC_LOCAL(int32_t, rc); \
14290 \
14291 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14292 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14293 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14294 IEM_MC_REF_EFLAGS(pEFlags); \
14295 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14296 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14297 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14298 } IEM_MC_ELSE() { \
14299 IEM_MC_RAISE_DIVIDE_ERROR(); \
14300 } IEM_MC_ENDIF(); \
14301 \
14302 IEM_MC_END(); \
14303 break; \
14304 \
14305 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14306 } \
14307 } \
14308 else \
14309 { \
14310 /* memory access. */ \
14311 switch (pVCpu->iem.s.enmEffOpSize) \
14312 { \
14313 case IEMMODE_16BIT: \
14314 IEM_MC_BEGIN(4, 2, 0, 0); \
14315 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14316 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14317 IEM_MC_ARG(uint16_t, u16Value, 2); \
14318 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14320 IEM_MC_LOCAL(int32_t, rc); \
14321 \
14322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14324 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14325 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14326 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14327 IEM_MC_REF_EFLAGS(pEFlags); \
14328 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14329 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14331 } IEM_MC_ELSE() { \
14332 IEM_MC_RAISE_DIVIDE_ERROR(); \
14333 } IEM_MC_ENDIF(); \
14334 \
14335 IEM_MC_END(); \
14336 break; \
14337 \
14338 case IEMMODE_32BIT: \
14339 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14340 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14341 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14342 IEM_MC_ARG(uint32_t, u32Value, 2); \
14343 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14345 IEM_MC_LOCAL(int32_t, rc); \
14346 \
14347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14349 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14350 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14351 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14352 IEM_MC_REF_EFLAGS(pEFlags); \
14353 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14354 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14355 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14356 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14357 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14358 } IEM_MC_ELSE() { \
14359 IEM_MC_RAISE_DIVIDE_ERROR(); \
14360 } IEM_MC_ENDIF(); \
14361 \
14362 IEM_MC_END(); \
14363 break; \
14364 \
14365 case IEMMODE_64BIT: \
14366 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14367 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14368 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14369 IEM_MC_ARG(uint64_t, u64Value, 2); \
14370 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14372 IEM_MC_LOCAL(int32_t, rc); \
14373 \
14374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14376 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14377 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14378 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14379 IEM_MC_REF_EFLAGS(pEFlags); \
14380 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14381 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14382 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14383 } IEM_MC_ELSE() { \
14384 IEM_MC_RAISE_DIVIDE_ERROR(); \
14385 } IEM_MC_ENDIF(); \
14386 \
14387 IEM_MC_END(); \
14388 break; \
14389 \
14390 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14391 } \
14392 } (void)0
14393
14394
14395/**
14396 * @opmaps grp3_f6
14397 * @opcode /2
14398 * @opflclass unchanged
14399 */
14400FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14401{
14402/** @todo does not modify EFLAGS. */
14403 IEMOP_MNEMONIC(not_Eb, "not Eb");
14404 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14405}
14406
14407
14408/**
14409 * @opmaps grp3_f6
14410 * @opcode /3
14411 * @opflclass arithmetic
14412 */
14413FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14414{
14415 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14416 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14417}
14418
14419
14420/**
14421 * @opcode 0xf6
14422 */
14423FNIEMOP_DEF(iemOp_Grp3_Eb)
14424{
14425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14426 switch (IEM_GET_MODRM_REG_8(bRm))
14427 {
14428 case 0:
14429 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14430 case 1:
14431 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14432 case 2:
14433 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14434 case 3:
14435 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14436 case 4:
14437 {
14438 /**
14439 * @opdone
14440 * @opmaps grp3_f6
14441 * @opcode /4
14442 * @opflclass multiply
14443 */
14444 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14445 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14446 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14447 break;
14448 }
14449 case 5:
14450 {
14451 /**
14452 * @opdone
14453 * @opmaps grp3_f6
14454 * @opcode /5
14455 * @opflclass multiply
14456 */
14457 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14458 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14459 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14460 break;
14461 }
14462 case 6:
14463 {
14464 /**
14465 * @opdone
14466 * @opmaps grp3_f6
14467 * @opcode /6
14468 * @opflclass division
14469 */
14470 IEMOP_MNEMONIC(div_Eb, "div Eb");
14471 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14472 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14473 break;
14474 }
14475 case 7:
14476 {
14477 /**
14478 * @opdone
14479 * @opmaps grp3_f6
14480 * @opcode /7
14481 * @opflclass division
14482 */
14483 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14485 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14486 break;
14487 }
14488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14489 }
14490}
14491
14492
14493/**
14494 * @opmaps grp3_f7
14495 * @opcode /0
14496 * @opflclass logical
14497 */
14498FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14499{
14500 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14501 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14502
14503 if (IEM_IS_MODRM_REG_MODE(bRm))
14504 {
14505 /* register access */
14506 switch (pVCpu->iem.s.enmEffOpSize)
14507 {
14508 case IEMMODE_16BIT:
14509 IEM_MC_BEGIN(3, 0, 0, 0);
14510 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14512 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14513 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14514 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14515 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14516 IEM_MC_REF_EFLAGS(pEFlags);
14517 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14518 IEM_MC_ADVANCE_RIP_AND_FINISH();
14519 IEM_MC_END();
14520 break;
14521
14522 case IEMMODE_32BIT:
14523 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14524 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14526 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14527 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14528 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14529 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14530 IEM_MC_REF_EFLAGS(pEFlags);
14531 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14532 /* No clearing the high dword here - test doesn't write back the result. */
14533 IEM_MC_ADVANCE_RIP_AND_FINISH();
14534 IEM_MC_END();
14535 break;
14536
14537 case IEMMODE_64BIT:
14538 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14539 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14541 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14542 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14543 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14544 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14545 IEM_MC_REF_EFLAGS(pEFlags);
14546 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14547 IEM_MC_ADVANCE_RIP_AND_FINISH();
14548 IEM_MC_END();
14549 break;
14550
14551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14552 }
14553 }
14554 else
14555 {
14556 /* memory access. */
14557 switch (pVCpu->iem.s.enmEffOpSize)
14558 {
14559 case IEMMODE_16BIT:
14560 IEM_MC_BEGIN(3, 3, 0, 0);
14561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14563
14564 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14566
14567 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14568 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14569 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14570
14571 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14572 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14573 IEM_MC_FETCH_EFLAGS(EFlags);
14574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14575
14576 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14577 IEM_MC_COMMIT_EFLAGS(EFlags);
14578 IEM_MC_ADVANCE_RIP_AND_FINISH();
14579 IEM_MC_END();
14580 break;
14581
14582 case IEMMODE_32BIT:
14583 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14586
14587 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14589
14590 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14591 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14592 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14593
14594 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14595 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14596 IEM_MC_FETCH_EFLAGS(EFlags);
14597 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14598
14599 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14600 IEM_MC_COMMIT_EFLAGS(EFlags);
14601 IEM_MC_ADVANCE_RIP_AND_FINISH();
14602 IEM_MC_END();
14603 break;
14604
14605 case IEMMODE_64BIT:
14606 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14609
14610 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14612
14613 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14614 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14615 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14616
14617 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14618 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14619 IEM_MC_FETCH_EFLAGS(EFlags);
14620 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14621
14622 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14623 IEM_MC_COMMIT_EFLAGS(EFlags);
14624 IEM_MC_ADVANCE_RIP_AND_FINISH();
14625 IEM_MC_END();
14626 break;
14627
14628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14629 }
14630 }
14631}
14632
14633
14634/**
14635 * @opmaps grp3_f7
14636 * @opcode /2
14637 * @opflclass unchanged
14638 */
14639FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14640{
14641/** @todo does not modify EFLAGS */
14642 IEMOP_MNEMONIC(not_Ev, "not Ev");
14643 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14644 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14645}
14646
14647
14648/**
14649 * @opmaps grp3_f7
14650 * @opcode /3
14651 * @opflclass arithmetic
14652 */
14653FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14654{
14655 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14656 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14657 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14658}
14659
14660
14661/**
14662 * @opmaps grp3_f7
14663 * @opcode /4
14664 * @opflclass multiply
14665 */
14666FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14667{
14668 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14669 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14670 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14671}
14672
14673
14674/**
14675 * @opmaps grp3_f7
14676 * @opcode /5
14677 * @opflclass multiply
14678 */
14679FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14680{
14681 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14682 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14683 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14684}
14685
14686
14687/**
14688 * @opmaps grp3_f7
14689 * @opcode /6
14690 * @opflclass division
14691 */
14692FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14693{
14694 IEMOP_MNEMONIC(div_Ev, "div Ev");
14695 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14696 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14697}
14698
14699
14700/**
14701 * @opmaps grp3_f7
14702 * @opcode /7
14703 * @opflclass division
14704 */
14705FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14706{
14707 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14708 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14709 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14710}
14711
14712
14713/**
14714 * @opcode 0xf7
14715 */
14716FNIEMOP_DEF(iemOp_Grp3_Ev)
14717{
14718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14719 switch (IEM_GET_MODRM_REG_8(bRm))
14720 {
14721 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14722 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14723 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14724 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14725 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14726 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14727 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14728 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14730 }
14731}
14732
14733
14734/**
14735 * @opcode 0xf8
14736 * @opflmodify cf
14737 * @opflclear cf
14738 */
14739FNIEMOP_DEF(iemOp_clc)
14740{
14741 IEMOP_MNEMONIC(clc, "clc");
14742 IEM_MC_BEGIN(0, 0, 0, 0);
14743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14744 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14745 IEM_MC_ADVANCE_RIP_AND_FINISH();
14746 IEM_MC_END();
14747}
14748
14749
14750/**
14751 * @opcode 0xf9
14752 * @opflmodify cf
14753 * @opflset cf
14754 */
14755FNIEMOP_DEF(iemOp_stc)
14756{
14757 IEMOP_MNEMONIC(stc, "stc");
14758 IEM_MC_BEGIN(0, 0, 0, 0);
14759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14760 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14761 IEM_MC_ADVANCE_RIP_AND_FINISH();
14762 IEM_MC_END();
14763}
14764
14765
14766/**
14767 * @opcode 0xfa
14768 * @opfltest iopl,vm
14769 * @opflmodify if,vif
14770 */
14771FNIEMOP_DEF(iemOp_cli)
14772{
14773 IEMOP_MNEMONIC(cli, "cli");
14774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14775 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14776}
14777
14778
14779/**
14780 * @opcode 0xfb
14781 * @opfltest iopl,vm
14782 * @opflmodify if,vif
14783 */
14784FNIEMOP_DEF(iemOp_sti)
14785{
14786 IEMOP_MNEMONIC(sti, "sti");
14787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14788 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14789 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14790}
14791
14792
14793/**
14794 * @opcode 0xfc
14795 * @opflmodify df
14796 * @opflclear df
14797 */
14798FNIEMOP_DEF(iemOp_cld)
14799{
14800 IEMOP_MNEMONIC(cld, "cld");
14801 IEM_MC_BEGIN(0, 0, 0, 0);
14802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14803 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14804 IEM_MC_ADVANCE_RIP_AND_FINISH();
14805 IEM_MC_END();
14806}
14807
14808
14809/**
14810 * @opcode 0xfd
14811 * @opflmodify df
14812 * @opflset df
14813 */
14814FNIEMOP_DEF(iemOp_std)
14815{
14816 IEMOP_MNEMONIC(std, "std");
14817 IEM_MC_BEGIN(0, 0, 0, 0);
14818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14819 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14820 IEM_MC_ADVANCE_RIP_AND_FINISH();
14821 IEM_MC_END();
14822}
14823
14824
14825/**
14826 * @opmaps grp4
14827 * @opcode /0
14828 * @opflclass incdec
14829 */
14830FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14831{
14832 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14833 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14834}
14835
14836
14837/**
14838 * @opmaps grp4
14839 * @opcode /1
14840 * @opflclass incdec
14841 */
14842FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14843{
14844 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14845 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14846}
14847
14848
14849/**
14850 * @opcode 0xfe
14851 */
14852FNIEMOP_DEF(iemOp_Grp4)
14853{
14854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14855 switch (IEM_GET_MODRM_REG_8(bRm))
14856 {
14857 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14858 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14859 default:
14860 /** @todo is the eff-addr decoded? */
14861 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14862 IEMOP_RAISE_INVALID_OPCODE_RET();
14863 }
14864}
14865
14866/**
14867 * @opmaps grp5
14868 * @opcode /0
14869 * @opflclass incdec
14870 */
14871FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14872{
14873 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14874 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14875 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14876}
14877
14878
14879/**
14880 * @opmaps grp5
14881 * @opcode /1
14882 * @opflclass incdec
14883 */
14884FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14885{
14886 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14887 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14888 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14889}
14890
14891
14892/**
14893 * Opcode 0xff /2.
14894 * @param bRm The RM byte.
14895 */
14896FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14897{
14898 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14900
14901 if (IEM_IS_MODRM_REG_MODE(bRm))
14902 {
14903 /* The new RIP is taken from a register. */
14904 switch (pVCpu->iem.s.enmEffOpSize)
14905 {
14906 case IEMMODE_16BIT:
14907 IEM_MC_BEGIN(1, 0, 0, 0);
14908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14909 IEM_MC_ARG(uint16_t, u16Target, 0);
14910 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14911 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14912 IEM_MC_END();
14913 break;
14914
14915 case IEMMODE_32BIT:
14916 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14918 IEM_MC_ARG(uint32_t, u32Target, 0);
14919 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14920 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14921 IEM_MC_END();
14922 break;
14923
14924 case IEMMODE_64BIT:
14925 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14927 IEM_MC_ARG(uint64_t, u64Target, 0);
14928 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14929 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14930 IEM_MC_END();
14931 break;
14932
14933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14934 }
14935 }
14936 else
14937 {
14938 /* The new RIP is taken from a register. */
14939 switch (pVCpu->iem.s.enmEffOpSize)
14940 {
14941 case IEMMODE_16BIT:
14942 IEM_MC_BEGIN(1, 1, 0, 0);
14943 IEM_MC_ARG(uint16_t, u16Target, 0);
14944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14947 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14948 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14949 IEM_MC_END();
14950 break;
14951
14952 case IEMMODE_32BIT:
14953 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14954 IEM_MC_ARG(uint32_t, u32Target, 0);
14955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14958 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14959 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14960 IEM_MC_END();
14961 break;
14962
14963 case IEMMODE_64BIT:
14964 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14965 IEM_MC_ARG(uint64_t, u64Target, 0);
14966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14969 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14970 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14971 IEM_MC_END();
14972 break;
14973
14974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14975 }
14976 }
14977}
14978
14979#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14980 /* Registers? How?? */ \
14981 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14982 { /* likely */ } \
14983 else \
14984 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14985 \
14986 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14987 /** @todo what does VIA do? */ \
14988 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14989 { /* likely */ } \
14990 else \
14991 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14992 \
14993 /* Far pointer loaded from memory. */ \
14994 switch (pVCpu->iem.s.enmEffOpSize) \
14995 { \
14996 case IEMMODE_16BIT: \
14997 IEM_MC_BEGIN(3, 1, 0, 0); \
14998 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14999 IEM_MC_ARG(uint16_t, offSeg, 1); \
15000 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
15001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15004 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15005 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
15006 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15007 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15008 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15009 IEM_MC_END(); \
15010 break; \
15011 \
15012 case IEMMODE_32BIT: \
15013 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
15014 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15015 IEM_MC_ARG(uint32_t, offSeg, 1); \
15016 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
15017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15020 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15021 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
15022 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15023 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
15024 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15025 IEM_MC_END(); \
15026 break; \
15027 \
15028 case IEMMODE_64BIT: \
15029 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
15030 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
15031 IEM_MC_ARG(uint16_t, u16Sel, 0); \
15032 IEM_MC_ARG(uint64_t, offSeg, 1); \
15033 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
15034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
15035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
15036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
15037 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
15038 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
15039 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
15040 | IEM_CIMPL_F_MODE /* no gates */, 0, \
15041 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
15042 IEM_MC_END(); \
15043 break; \
15044 \
15045 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
15046 } do {} while (0)
15047
15048
15049/**
15050 * Opcode 0xff /3.
15051 * @param bRm The RM byte.
15052 */
15053FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15054{
15055 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
15056 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
15057}
15058
15059
15060/**
15061 * Opcode 0xff /4.
15062 * @param bRm The RM byte.
15063 */
15064FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15065{
15066 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
15067 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
15068
15069 if (IEM_IS_MODRM_REG_MODE(bRm))
15070 {
15071 /* The new RIP is taken from a register. */
15072 switch (pVCpu->iem.s.enmEffOpSize)
15073 {
15074 case IEMMODE_16BIT:
15075 IEM_MC_BEGIN(0, 1, 0, 0);
15076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15077 IEM_MC_LOCAL(uint16_t, u16Target);
15078 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15079 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15080 IEM_MC_END();
15081 break;
15082
15083 case IEMMODE_32BIT:
15084 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
15085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15086 IEM_MC_LOCAL(uint32_t, u32Target);
15087 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15088 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15089 IEM_MC_END();
15090 break;
15091
15092 case IEMMODE_64BIT:
15093 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
15094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15095 IEM_MC_LOCAL(uint64_t, u64Target);
15096 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
15097 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15098 IEM_MC_END();
15099 break;
15100
15101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15102 }
15103 }
15104 else
15105 {
15106 /* The new RIP is taken from a memory location. */
15107 switch (pVCpu->iem.s.enmEffOpSize)
15108 {
15109 case IEMMODE_16BIT:
15110 IEM_MC_BEGIN(0, 2, 0, 0);
15111 IEM_MC_LOCAL(uint16_t, u16Target);
15112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15115 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15116 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15117 IEM_MC_END();
15118 break;
15119
15120 case IEMMODE_32BIT:
15121 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
15122 IEM_MC_LOCAL(uint32_t, u32Target);
15123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15126 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15127 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15128 IEM_MC_END();
15129 break;
15130
15131 case IEMMODE_64BIT:
15132 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15133 IEM_MC_LOCAL(uint64_t, u64Target);
15134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15137 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15138 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15139 IEM_MC_END();
15140 break;
15141
15142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15143 }
15144 }
15145}
15146
15147
15148/**
15149 * Opcode 0xff /5.
15150 * @param bRm The RM byte.
15151 */
15152FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15153{
15154 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15155 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15156}
15157
15158
15159/**
15160 * Opcode 0xff /6.
15161 * @param bRm The RM byte.
15162 */
15163FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15164{
15165 IEMOP_MNEMONIC(push_Ev, "push Ev");
15166
15167 /* Registers are handled by a common worker. */
15168 if (IEM_IS_MODRM_REG_MODE(bRm))
15169 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15170
15171 /* Memory we do here. */
15172 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15173 switch (pVCpu->iem.s.enmEffOpSize)
15174 {
15175 case IEMMODE_16BIT:
15176 IEM_MC_BEGIN(0, 2, 0, 0);
15177 IEM_MC_LOCAL(uint16_t, u16Src);
15178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15181 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15182 IEM_MC_PUSH_U16(u16Src);
15183 IEM_MC_ADVANCE_RIP_AND_FINISH();
15184 IEM_MC_END();
15185 break;
15186
15187 case IEMMODE_32BIT:
15188 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15189 IEM_MC_LOCAL(uint32_t, u32Src);
15190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15193 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15194 IEM_MC_PUSH_U32(u32Src);
15195 IEM_MC_ADVANCE_RIP_AND_FINISH();
15196 IEM_MC_END();
15197 break;
15198
15199 case IEMMODE_64BIT:
15200 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15201 IEM_MC_LOCAL(uint64_t, u64Src);
15202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15205 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15206 IEM_MC_PUSH_U64(u64Src);
15207 IEM_MC_ADVANCE_RIP_AND_FINISH();
15208 IEM_MC_END();
15209 break;
15210
15211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15212 }
15213}
15214
15215
15216/**
15217 * @opcode 0xff
15218 */
15219FNIEMOP_DEF(iemOp_Grp5)
15220{
15221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15222 switch (IEM_GET_MODRM_REG_8(bRm))
15223 {
15224 case 0:
15225 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15226 case 1:
15227 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15228 case 2:
15229 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15230 case 3:
15231 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15232 case 4:
15233 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15234 case 5:
15235 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15236 case 6:
15237 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15238 case 7:
15239 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15240 IEMOP_RAISE_INVALID_OPCODE_RET();
15241 }
15242 AssertFailedReturn(VERR_IEM_IPE_3);
15243}
15244
15245
15246
15247const PFNIEMOP g_apfnOneByteMap[256] =
15248{
15249 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15250 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15251 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15252 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15253 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15254 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15255 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15256 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15257 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15258 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15259 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15260 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15261 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15262 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15263 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15264 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15265 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15266 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15267 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15268 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15269 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15270 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15271 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15272 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15273 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15274 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15275 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15276 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15277 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15278 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15279 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15280 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15281 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15282 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15283 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15284 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15285 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15286 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15287 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15288 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15289 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15290 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15291 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15292 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15293 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15294 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15295 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15296 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15297 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15298 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15299 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15300 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15301 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15302 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15303 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15304 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15305 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15306 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15307 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15308 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15309 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15310 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15311 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15312 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15313};
15314
15315
15316/** @} */
15317
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette