VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103648

Last change on this file since 103648 was 103648, checked in by vboxsync, 14 months ago

VMM/IEM: Implemented iemNativeEmit_test_r_r_efl and enabled it for AMD64 hosts. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 568.1 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103648 2024-03-03 02:20:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
75 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
76 IEM_MC_LOCAL(uint8_t, u8Dst); \
77 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
78 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
79 IEM_MC_LOCAL(uint32_t, uEFlags); \
80 IEM_MC_FETCH_EFLAGS(uEFlags); \
81 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
82 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
83 IEM_MC_COMMIT_EFLAGS(uEFlags); \
84 } IEM_MC_NATIVE_ELSE() { \
85 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
86 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
87 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
88 IEM_MC_REF_EFLAGS(pEFlags); \
89 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
90 } IEM_MC_NATIVE_ENDIF(); \
91 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
92 IEM_MC_END(); \
93 } \
94 else \
95 { \
96 /* \
97 * We're accessing memory. \
98 * Note! We're putting the eflags on the stack here so we can commit them \
99 * after the memory. \
100 */ \
101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
102 { \
103 IEM_MC_BEGIN(3, 3, 0, 0); \
104 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
105 IEM_MC_ARG(uint8_t, u8Src, 1); \
106 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
108 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
109 \
110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
111 IEMOP_HLP_DONE_DECODING(); \
112 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
113 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
114 IEM_MC_FETCH_EFLAGS(EFlags); \
115 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
116 \
117 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
118 IEM_MC_COMMIT_EFLAGS(EFlags); \
119 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
120 IEM_MC_END(); \
121 } \
122 else \
123 { \
124 IEM_MC_BEGIN(3, 3, 0, 0); \
125 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
126 IEM_MC_ARG(uint8_t, u8Src, 1); \
127 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
129 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
130 \
131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
132 IEMOP_HLP_DONE_DECODING(); \
133 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
134 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
135 IEM_MC_FETCH_EFLAGS(EFlags); \
136 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
137 \
138 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
139 IEM_MC_COMMIT_EFLAGS(EFlags); \
140 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
141 IEM_MC_END(); \
142 } \
143 } \
144 (void)0
145
146/**
147 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
148 * operands.
149 */
150#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
152 \
153 /* \
154 * If rm is denoting a register, no more instruction bytes. \
155 */ \
156 if (IEM_IS_MODRM_REG_MODE(bRm)) \
157 { \
158 IEM_MC_BEGIN(3, 0, 0, 0); \
159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
160 IEM_MC_ARG(uint8_t, u8Src, 1); \
161 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
162 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
163 IEM_MC_LOCAL(uint8_t, u8Dst); \
164 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
165 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
166 IEM_MC_LOCAL(uint32_t, uEFlags); \
167 IEM_MC_FETCH_EFLAGS(uEFlags); \
168 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
169 IEM_MC_COMMIT_EFLAGS(uEFlags); \
170 } IEM_MC_NATIVE_ELSE() { \
171 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
172 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
173 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
174 IEM_MC_REF_EFLAGS(pEFlags); \
175 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
176 } IEM_MC_NATIVE_ENDIF(); \
177 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
178 IEM_MC_END(); \
179 } \
180 else \
181 { \
182 /* \
183 * We're accessing memory. \
184 * Note! We're putting the eflags on the stack here so we can commit them \
185 * after the memory. \
186 */ \
187 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
188 { \
189 IEM_MC_BEGIN(3, 3, 0, 0); \
190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
192 IEMOP_HLP_DONE_DECODING(); \
193 IEM_MC_NATIVE_IF(0) { \
194 IEM_MC_LOCAL(uint8_t, u8Dst); \
195 IEM_MC_FETCH_MEM_U8(u8Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
196 IEM_MC_LOCAL(uint8_t, u8SrcEmit); \
197 IEM_MC_FETCH_GREG_U8(u8SrcEmit, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
199 IEM_MC_LOCAL(uint32_t, uEFlags); \
200 IEM_MC_FETCH_EFLAGS(uEFlags); \
201 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8SrcEmit, uEFlags, 8); \
202 IEM_MC_COMMIT_EFLAGS(uEFlags); \
203 } IEM_MC_NATIVE_ELSE() { \
204 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
205 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
206 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
207 IEM_MC_ARG(uint8_t, u8Src, 1); \
208 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
209 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
210 IEM_MC_FETCH_EFLAGS(EFlags); \
211 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
212 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
213 IEM_MC_COMMIT_EFLAGS(EFlags); \
214 } IEM_MC_NATIVE_ENDIF(); \
215 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
216 IEM_MC_END(); \
217 } \
218 else \
219 { \
220 /** @todo we should probably decode the address first. */ \
221 IEMOP_HLP_DONE_DECODING(); \
222 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
223 } \
224 } \
225 (void)0
226
227/**
228 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
229 * destination.
230 */
231#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
233 \
234 /* \
235 * If rm is denoting a register, no more instruction bytes. \
236 */ \
237 if (IEM_IS_MODRM_REG_MODE(bRm)) \
238 { \
239 IEM_MC_BEGIN(3, 0, 0, 0); \
240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
241 IEM_MC_ARG(uint8_t, u8Src, 1); \
242 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
243 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
244 IEM_MC_LOCAL(uint8_t, u8Dst); \
245 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
246 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
247 IEM_MC_LOCAL(uint32_t, uEFlags); \
248 IEM_MC_FETCH_EFLAGS(uEFlags); \
249 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
250 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst); \
251 IEM_MC_COMMIT_EFLAGS(uEFlags); \
252 } IEM_MC_NATIVE_ELSE() { \
253 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
254 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
255 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
256 IEM_MC_REF_EFLAGS(pEFlags); \
257 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
258 } IEM_MC_NATIVE_ENDIF(); \
259 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
260 IEM_MC_END(); \
261 } \
262 else \
263 { \
264 /* \
265 * We're accessing memory. \
266 */ \
267 IEM_MC_BEGIN(3, 1, 0, 0); \
268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
271 IEM_MC_ARG(uint8_t, u8Src, 1); \
272 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
273 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
274 IEM_MC_LOCAL(uint8_t, u8Dst); \
275 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
276 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
277 IEM_MC_LOCAL(uint32_t, uEFlags); \
278 IEM_MC_FETCH_EFLAGS(uEFlags); \
279 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
280 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst); \
281 IEM_MC_COMMIT_EFLAGS(uEFlags); \
282 } IEM_MC_NATIVE_ELSE() { \
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
284 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
285 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
286 IEM_MC_REF_EFLAGS(pEFlags); \
287 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
288 } IEM_MC_NATIVE_ENDIF(); \
289 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
290 IEM_MC_END(); \
291 } \
292 (void)0
293
294
295/**
296 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
297 * memory/register as the destination.
298 */
299#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
301 \
302 /* \
303 * If rm is denoting a register, no more instruction bytes. \
304 */ \
305 if (IEM_IS_MODRM_REG_MODE(bRm)) \
306 { \
307 switch (pVCpu->iem.s.enmEffOpSize) \
308 { \
309 case IEMMODE_16BIT: \
310 IEM_MC_BEGIN(3, 0, 0, 0); \
311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
312 IEM_MC_ARG(uint16_t, u16Src, 1); \
313 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
315 IEM_MC_LOCAL(uint16_t, u16Dst); \
316 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
317 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
318 IEM_MC_LOCAL(uint32_t, uEFlags); \
319 IEM_MC_FETCH_EFLAGS(uEFlags); \
320 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
321 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
322 IEM_MC_COMMIT_EFLAGS(uEFlags); \
323 } IEM_MC_NATIVE_ELSE() { \
324 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
325 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
326 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
327 IEM_MC_REF_EFLAGS(pEFlags); \
328 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
329 } IEM_MC_NATIVE_ENDIF(); \
330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
331 IEM_MC_END(); \
332 break; \
333 \
334 case IEMMODE_32BIT: \
335 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
337 IEM_MC_ARG(uint32_t, u32Src, 1); \
338 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
339 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
340 IEM_MC_LOCAL(uint32_t, u32Dst); \
341 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
342 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
343 IEM_MC_LOCAL(uint32_t, uEFlags); \
344 IEM_MC_FETCH_EFLAGS(uEFlags); \
345 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
346 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
347 IEM_MC_COMMIT_EFLAGS(uEFlags); \
348 } IEM_MC_NATIVE_ELSE() { \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
351 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
352 IEM_MC_REF_EFLAGS(pEFlags); \
353 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
354 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
355 } IEM_MC_NATIVE_ENDIF(); \
356 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
357 IEM_MC_END(); \
358 break; \
359 \
360 case IEMMODE_64BIT: \
361 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
363 IEM_MC_ARG(uint64_t, u64Src, 1); \
364 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
365 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
366 IEM_MC_LOCAL(uint64_t, u64Dst); \
367 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
368 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
369 IEM_MC_LOCAL(uint32_t, uEFlags); \
370 IEM_MC_FETCH_EFLAGS(uEFlags); \
371 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
372 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
373 IEM_MC_COMMIT_EFLAGS(uEFlags); \
374 } IEM_MC_NATIVE_ELSE() { \
375 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
376 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
377 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
378 IEM_MC_REF_EFLAGS(pEFlags); \
379 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
380 } IEM_MC_NATIVE_ENDIF(); \
381 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
382 IEM_MC_END(); \
383 break; \
384 \
385 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
386 } \
387 } \
388 else \
389 { \
390 /* \
391 * We're accessing memory. \
392 * Note! We're putting the eflags on the stack here so we can commit them \
393 * after the memory. \
394 */ \
395 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
396 { \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 else \
466 { \
467 (void)0
468/* Separate macro to work around parsing issue in IEMAllInstPython.py */
469#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
470 switch (pVCpu->iem.s.enmEffOpSize) \
471 { \
472 case IEMMODE_16BIT: \
473 IEM_MC_BEGIN(3, 3, 0, 0); \
474 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
475 IEM_MC_ARG(uint16_t, u16Src, 1); \
476 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
478 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
479 \
480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
481 IEMOP_HLP_DONE_DECODING(); \
482 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
483 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
484 IEM_MC_FETCH_EFLAGS(EFlags); \
485 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
486 \
487 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
488 IEM_MC_COMMIT_EFLAGS(EFlags); \
489 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
490 IEM_MC_END(); \
491 break; \
492 \
493 case IEMMODE_32BIT: \
494 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
496 IEM_MC_ARG(uint32_t, u32Src, 1); \
497 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
499 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
500 \
501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
502 IEMOP_HLP_DONE_DECODING(); \
503 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
504 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
505 IEM_MC_FETCH_EFLAGS(EFlags); \
506 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
507 \
508 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
509 IEM_MC_COMMIT_EFLAGS(EFlags); \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
517 IEM_MC_ARG(uint64_t, u64Src, 1); \
518 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
520 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
521 \
522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
523 IEMOP_HLP_DONE_DECODING(); \
524 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
525 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
526 IEM_MC_FETCH_EFLAGS(EFlags); \
527 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
528 \
529 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
530 IEM_MC_COMMIT_EFLAGS(EFlags); \
531 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
532 IEM_MC_END(); \
533 break; \
534 \
535 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
536 } \
537 } \
538 } \
539 (void)0
540
541/**
542 * Body for read-only word/dword/qword instructions like TEST and CMP with
543 * memory/register as the destination.
544 */
545#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_EmitterBasename, a_fNativeArchs) \
546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
547 \
548 /* \
549 * If rm is denoting a register, no more instruction bytes. \
550 */ \
551 if (IEM_IS_MODRM_REG_MODE(bRm)) \
552 { \
553 switch (pVCpu->iem.s.enmEffOpSize) \
554 { \
555 case IEMMODE_16BIT: \
556 IEM_MC_BEGIN(3, 0, 0, 0); \
557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
558 IEM_MC_ARG(uint16_t, u16Src, 1); \
559 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
560 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
561 IEM_MC_LOCAL(uint16_t, u16Dst); \
562 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
563 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
564 IEM_MC_LOCAL(uint32_t, uEFlags); \
565 IEM_MC_FETCH_EFLAGS(uEFlags); \
566 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
567 IEM_MC_COMMIT_EFLAGS(uEFlags); \
568 } IEM_MC_NATIVE_ELSE() { \
569 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
570 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
571 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
572 IEM_MC_REF_EFLAGS(pEFlags); \
573 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
574 } IEM_MC_NATIVE_ENDIF(); \
575 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
576 IEM_MC_END(); \
577 break; \
578 \
579 case IEMMODE_32BIT: \
580 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
582 IEM_MC_ARG(uint32_t, u32Src, 1); \
583 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
584 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
585 IEM_MC_LOCAL(uint32_t, u32Dst); \
586 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
587 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
588 IEM_MC_LOCAL(uint32_t, uEFlags); \
589 IEM_MC_FETCH_EFLAGS(uEFlags); \
590 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
591 IEM_MC_COMMIT_EFLAGS(uEFlags); \
592 } IEM_MC_NATIVE_ELSE() { \
593 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
594 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
595 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
596 IEM_MC_REF_EFLAGS(pEFlags); \
597 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
598 } IEM_MC_NATIVE_ENDIF(); \
599 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
600 IEM_MC_END(); \
601 break; \
602 \
603 case IEMMODE_64BIT: \
604 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
606 IEM_MC_ARG(uint64_t, u64Src, 1); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
609 IEM_MC_LOCAL(uint64_t, u64Dst); \
610 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
611 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
612 IEM_MC_LOCAL(uint32_t, uEFlags); \
613 IEM_MC_FETCH_EFLAGS(uEFlags); \
614 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
615 IEM_MC_COMMIT_EFLAGS(uEFlags); \
616 } IEM_MC_NATIVE_ELSE() { \
617 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
618 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
619 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
620 IEM_MC_REF_EFLAGS(pEFlags); \
621 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
622 } IEM_MC_NATIVE_ENDIF(); \
623 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
624 IEM_MC_END(); \
625 break; \
626 \
627 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
628 } \
629 } \
630 else \
631 { \
632 /* \
633 * We're accessing memory. \
634 * Note! We're putting the eflags on the stack here so we can commit them \
635 * after the memory. \
636 */ \
637 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
638 { \
639 switch (pVCpu->iem.s.enmEffOpSize) \
640 { \
641 case IEMMODE_16BIT: \
642 IEM_MC_BEGIN(3, 3, 0, 0); \
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
645 IEMOP_HLP_DONE_DECODING(); \
646 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
647 IEM_MC_LOCAL(uint16_t, u16Dst); \
648 IEM_MC_FETCH_MEM_U16(u16Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
649 IEM_MC_LOCAL(uint16_t, u16SrcEmit); \
650 IEM_MC_FETCH_GREG_U16(u16SrcEmit, IEM_GET_MODRM_REG(pVCpu, bRm)); \
651 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
652 IEM_MC_LOCAL(uint32_t, uEFlags); \
653 IEM_MC_FETCH_EFLAGS(uEFlags); \
654 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16SrcEmit, uEFlags, 16); \
655 IEM_MC_COMMIT_EFLAGS(uEFlags); \
656 } IEM_MC_NATIVE_ELSE() { \
657 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
658 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
659 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
660 IEM_MC_ARG(uint16_t, u16Src, 1); \
661 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
662 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
663 IEM_MC_FETCH_EFLAGS(EFlags); \
664 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
665 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
666 IEM_MC_COMMIT_EFLAGS(EFlags); \
667 } IEM_MC_NATIVE_ENDIF(); \
668 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
669 IEM_MC_END(); \
670 break; \
671 \
672 case IEMMODE_32BIT: \
673 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
676 IEMOP_HLP_DONE_DECODING(); \
677 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
678 IEM_MC_LOCAL(uint32_t, u32Dst); \
679 IEM_MC_FETCH_MEM_U32(u32Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
680 IEM_MC_LOCAL(uint32_t, u32SrcEmit); \
681 IEM_MC_FETCH_GREG_U32(u32SrcEmit, IEM_GET_MODRM_REG(pVCpu, bRm)); \
682 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
683 IEM_MC_LOCAL(uint32_t, uEFlags); \
684 IEM_MC_FETCH_EFLAGS(uEFlags); \
685 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32SrcEmit, uEFlags, 32); \
686 IEM_MC_COMMIT_EFLAGS(uEFlags); \
687 } IEM_MC_NATIVE_ELSE() { \
688 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
689 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
690 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
691 IEM_MC_ARG(uint32_t, u32Src, 1); \
692 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
693 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
694 IEM_MC_FETCH_EFLAGS(EFlags); \
695 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
696 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
697 IEM_MC_COMMIT_EFLAGS(EFlags); \
698 } IEM_MC_NATIVE_ENDIF(); \
699 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
700 IEM_MC_END(); \
701 break; \
702 \
703 case IEMMODE_64BIT: \
704 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
707 IEMOP_HLP_DONE_DECODING(); \
708 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
709 IEM_MC_LOCAL(uint64_t, u64Dst); \
710 IEM_MC_FETCH_MEM_U64(u64Dst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
711 IEM_MC_LOCAL(uint64_t, u64SrcEmit); \
712 IEM_MC_FETCH_GREG_U64(u64SrcEmit, IEM_GET_MODRM_REG(pVCpu, bRm)); \
713 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
714 IEM_MC_LOCAL(uint32_t, uEFlags); \
715 IEM_MC_FETCH_EFLAGS(uEFlags); \
716 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64SrcEmit, uEFlags, 64); \
717 IEM_MC_COMMIT_EFLAGS(uEFlags); \
718 } IEM_MC_NATIVE_ELSE() { \
719 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
720 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
721 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
722 IEM_MC_ARG(uint64_t, u64Src, 1); \
723 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
724 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
725 IEM_MC_FETCH_EFLAGS(EFlags); \
726 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
727 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
728 IEM_MC_COMMIT_EFLAGS(EFlags); \
729 } IEM_MC_NATIVE_ENDIF(); \
730 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
731 IEM_MC_END(); \
732 break; \
733 \
734 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
735 } \
736 } \
737 else \
738 { \
739 IEMOP_HLP_DONE_DECODING(); \
740 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
741 } \
742 } \
743 (void)0
744
745
746/**
747 * Body for instructions like ADD, AND, OR, ++ with working on AL with
748 * a byte immediate.
749 */
750#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
751 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
752 \
753 IEM_MC_BEGIN(3, 0, 0, 0); \
754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
755 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
756 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
757 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
758 \
759 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
760 IEM_MC_REF_EFLAGS(pEFlags); \
761 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
762 \
763 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
764 IEM_MC_END()
765
766/**
767 * Body for instructions like ADD, AND, OR, ++ with working on
768 * AX/EAX/RAX with a word/dword immediate.
769 */
770#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
771 switch (pVCpu->iem.s.enmEffOpSize) \
772 { \
773 case IEMMODE_16BIT: \
774 { \
775 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
776 \
777 IEM_MC_BEGIN(3, 0, 0, 0); \
778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
779 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
780 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
781 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
782 \
783 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
784 IEM_MC_REF_EFLAGS(pEFlags); \
785 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
786 \
787 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
788 IEM_MC_END(); \
789 } \
790 \
791 case IEMMODE_32BIT: \
792 { \
793 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
794 \
795 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
797 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
798 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
799 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
800 \
801 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
802 IEM_MC_REF_EFLAGS(pEFlags); \
803 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
804 \
805 if (a_fModifiesDstReg) \
806 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
808 IEM_MC_END(); \
809 } \
810 \
811 case IEMMODE_64BIT: \
812 { \
813 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
814 \
815 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
817 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
818 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
819 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
820 \
821 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
822 IEM_MC_REF_EFLAGS(pEFlags); \
823 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
824 \
825 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
826 IEM_MC_END(); \
827 } \
828 \
829 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
830 } \
831 (void)0
832
833
834
835/* Instruction specification format - work in progress: */
836
837/**
838 * @opcode 0x00
839 * @opmnemonic add
840 * @op1 rm:Eb
841 * @op2 reg:Gb
842 * @opmaps one
843 * @openc ModR/M
844 * @opflclass arithmetic
845 * @ophints harmless ignores_op_sizes
846 * @opstats add_Eb_Gb
847 * @opgroup og_gen_arith_bin
848 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
849 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
850 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
851 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
852 */
853FNIEMOP_DEF(iemOp_add_Eb_Gb)
854{
855 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
856 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked, add, 0, 0);
857}
858
859
860/**
861 * @opcode 0x01
862 * @opgroup og_gen_arith_bin
863 * @opflclass arithmetic
864 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
865 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
866 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
867 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
868 */
869FNIEMOP_DEF(iemOp_add_Ev_Gv)
870{
871 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
872 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, add, 0, 0);
873 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
874}
875
876
877/**
878 * @opcode 0x02
879 * @opgroup og_gen_arith_bin
880 * @opflclass arithmetic
881 * @opcopytests iemOp_add_Eb_Gb
882 */
883FNIEMOP_DEF(iemOp_add_Gb_Eb)
884{
885 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
886 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8, add, 0);
887}
888
889
890/**
891 * @opcode 0x03
892 * @opgroup og_gen_arith_bin
893 * @opflclass arithmetic
894 * @opcopytests iemOp_add_Ev_Gv
895 */
896FNIEMOP_DEF(iemOp_add_Gv_Ev)
897{
898 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
900 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0, add, 0);
901}
902
903
904/**
905 * @opcode 0x04
906 * @opgroup og_gen_arith_bin
907 * @opflclass arithmetic
908 * @opcopytests iemOp_add_Eb_Gb
909 */
910FNIEMOP_DEF(iemOp_add_Al_Ib)
911{
912 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
913 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
914}
915
916
917/**
918 * @opcode 0x05
919 * @opgroup og_gen_arith_bin
920 * @opflclass arithmetic
921 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
922 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
923 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
924 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
925 */
926FNIEMOP_DEF(iemOp_add_eAX_Iz)
927{
928 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
929 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
930}
931
932
933/**
934 * @opcode 0x06
935 * @opgroup og_stack_sreg
936 */
937FNIEMOP_DEF(iemOp_push_ES)
938{
939 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
940 IEMOP_HLP_NO_64BIT();
941 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
942}
943
944
945/**
946 * @opcode 0x07
947 * @opgroup og_stack_sreg
948 */
949FNIEMOP_DEF(iemOp_pop_ES)
950{
951 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
952 IEMOP_HLP_NO_64BIT();
953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
954 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
955 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
956 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
957 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
958 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
959 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
960 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
961}
962
963
964/**
965 * @opcode 0x08
966 * @opgroup og_gen_arith_bin
967 * @opflclass logical
968 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
969 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
970 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
971 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
972 */
973FNIEMOP_DEF(iemOp_or_Eb_Gb)
974{
975 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
976 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
977 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
978}
979
980
981/*
982 * @opcode 0x09
983 * @opgroup og_gen_arith_bin
984 * @opflclass logical
985 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
986 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
987 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
988 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
989 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
990 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
991 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
992 */
993FNIEMOP_DEF(iemOp_or_Ev_Gv)
994{
995 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
996 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
997 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
998 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
999}
1000
1001
1002/**
1003 * @opcode 0x0a
1004 * @opgroup og_gen_arith_bin
1005 * @opflclass logical
1006 * @opcopytests iemOp_or_Eb_Gb
1007 */
1008FNIEMOP_DEF(iemOp_or_Gb_Eb)
1009{
1010 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1011 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1012 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1013}
1014
1015
1016/**
1017 * @opcode 0x0b
1018 * @opgroup og_gen_arith_bin
1019 * @opflclass logical
1020 * @opcopytests iemOp_or_Ev_Gv
1021 */
1022FNIEMOP_DEF(iemOp_or_Gv_Ev)
1023{
1024 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1025 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1027 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0, or, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1028}
1029
1030
1031/**
1032 * @opcode 0x0c
1033 * @opgroup og_gen_arith_bin
1034 * @opflclass logical
1035 * @opcopytests iemOp_or_Eb_Gb
1036 */
1037FNIEMOP_DEF(iemOp_or_Al_Ib)
1038{
1039 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1040 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1041 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
1042}
1043
1044
1045/**
1046 * @opcode 0x0d
1047 * @opgroup og_gen_arith_bin
1048 * @opflclass logical
1049 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1050 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1051 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1052 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1053 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1054 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1055 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
1056 */
1057FNIEMOP_DEF(iemOp_or_eAX_Iz)
1058{
1059 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1060 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1061 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
1062}
1063
1064
1065/**
1066 * @opcode 0x0e
1067 * @opgroup og_stack_sreg
1068 */
1069FNIEMOP_DEF(iemOp_push_CS)
1070{
1071 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
1072 IEMOP_HLP_NO_64BIT();
1073 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
1074}
1075
1076
1077/**
1078 * @opcode 0x0f
1079 * @opmnemonic EscTwo0f
1080 * @openc two0f
1081 * @opdisenum OP_2B_ESC
1082 * @ophints harmless
1083 * @opgroup og_escapes
1084 */
1085FNIEMOP_DEF(iemOp_2byteEscape)
1086{
1087#if 0 /// @todo def VBOX_STRICT
1088 /* Sanity check the table the first time around. */
1089 static bool s_fTested = false;
1090 if (RT_LIKELY(s_fTested)) { /* likely */ }
1091 else
1092 {
1093 s_fTested = true;
1094 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
1095 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
1096 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
1097 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
1098 }
1099#endif
1100
1101 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
1102 {
1103 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1104 IEMOP_HLP_MIN_286();
1105 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
1106 }
1107 /* @opdone */
1108
1109 /*
1110 * On the 8086 this is a POP CS instruction.
1111 * For the time being we don't specify this this.
1112 */
1113 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1114 IEMOP_HLP_NO_64BIT();
1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1116 /** @todo eliminate END_TB here */
1117 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1118 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1119 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1120 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1121}
1122
1123/**
1124 * @opcode 0x10
1125 * @opgroup og_gen_arith_bin
1126 * @opflclass arithmetic_carry
1127 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1128 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1129 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1130 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1131 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1132 */
1133FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1134{
1135 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1136 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked, adc, 0, 0);
1137}
1138
1139
1140/**
1141 * @opcode 0x11
1142 * @opgroup og_gen_arith_bin
1143 * @opflclass arithmetic_carry
1144 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1145 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1146 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1147 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1148 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1149 */
1150FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1151{
1152 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1153 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, adc, 0, 0);
1154 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1155}
1156
1157
1158/**
1159 * @opcode 0x12
1160 * @opgroup og_gen_arith_bin
1161 * @opflclass arithmetic_carry
1162 * @opcopytests iemOp_adc_Eb_Gb
1163 */
1164FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1165{
1166 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8, adc, 0);
1168}
1169
1170
1171/**
1172 * @opcode 0x13
1173 * @opgroup og_gen_arith_bin
1174 * @opflclass arithmetic_carry
1175 * @opcopytests iemOp_adc_Ev_Gv
1176 */
1177FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1178{
1179 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1181 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0, adc, 0);
1182}
1183
1184
1185/**
1186 * @opcode 0x14
1187 * @opgroup og_gen_arith_bin
1188 * @opflclass arithmetic_carry
1189 * @opcopytests iemOp_adc_Eb_Gb
1190 */
1191FNIEMOP_DEF(iemOp_adc_Al_Ib)
1192{
1193 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1194 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1195}
1196
1197
1198/**
1199 * @opcode 0x15
1200 * @opgroup og_gen_arith_bin
1201 * @opflclass arithmetic_carry
1202 * @opcopytests iemOp_adc_Ev_Gv
1203 */
1204FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1205{
1206 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1207 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1208}
1209
1210
1211/**
1212 * @opcode 0x16
1213 */
1214FNIEMOP_DEF(iemOp_push_SS)
1215{
1216 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1217 IEMOP_HLP_NO_64BIT();
1218 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1219}
1220
1221
1222/**
1223 * @opcode 0x17
1224 */
1225FNIEMOP_DEF(iemOp_pop_SS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEMOP_HLP_NO_64BIT();
1230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1231 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1232 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1233 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1234 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1235 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1236 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1237}
1238
1239
1240/**
1241 * @opcode 0x18
1242 * @opgroup og_gen_arith_bin
1243 * @opflclass arithmetic_carry
1244 */
1245FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1246{
1247 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1248 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked, sbb, 0, 0);
1249}
1250
1251
1252/**
1253 * @opcode 0x19
1254 * @opgroup og_gen_arith_bin
1255 * @opflclass arithmetic_carry
1256 */
1257FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1258{
1259 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1260 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, sbb, 0, 0);
1261 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1262}
1263
1264
1265/**
1266 * @opcode 0x1a
1267 * @opgroup og_gen_arith_bin
1268 * @opflclass arithmetic_carry
1269 */
1270FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1271{
1272 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1273 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8, sbb, 0);
1274}
1275
1276
1277/**
1278 * @opcode 0x1b
1279 * @opgroup og_gen_arith_bin
1280 * @opflclass arithmetic_carry
1281 */
1282FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1283{
1284 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1286 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0, sbb, 0);
1287}
1288
1289
1290/**
1291 * @opcode 0x1c
1292 * @opgroup og_gen_arith_bin
1293 * @opflclass arithmetic_carry
1294 */
1295FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1296{
1297 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1298 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1299}
1300
1301
1302/**
1303 * @opcode 0x1d
1304 * @opgroup og_gen_arith_bin
1305 * @opflclass arithmetic_carry
1306 */
1307FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1308{
1309 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1310 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1311}
1312
1313
1314/**
1315 * @opcode 0x1e
1316 * @opgroup og_stack_sreg
1317 */
1318FNIEMOP_DEF(iemOp_push_DS)
1319{
1320 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1321 IEMOP_HLP_NO_64BIT();
1322 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1323}
1324
1325
1326/**
1327 * @opcode 0x1f
1328 * @opgroup og_stack_sreg
1329 */
1330FNIEMOP_DEF(iemOp_pop_DS)
1331{
1332 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1334 IEMOP_HLP_NO_64BIT();
1335 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1336 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1337 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1338 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1339 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1340 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1341 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1342}
1343
1344
1345/**
1346 * @opcode 0x20
1347 * @opgroup og_gen_arith_bin
1348 * @opflclass logical
1349 */
1350FNIEMOP_DEF(iemOp_and_Eb_Gb)
1351{
1352 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1353 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1354 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1355}
1356
1357
1358/**
1359 * @opcode 0x21
1360 * @opgroup og_gen_arith_bin
1361 * @opflclass logical
1362 */
1363FNIEMOP_DEF(iemOp_and_Ev_Gv)
1364{
1365 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1366 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1367 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1368 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1369}
1370
1371
1372/**
1373 * @opcode 0x22
1374 * @opgroup og_gen_arith_bin
1375 * @opflclass logical
1376 */
1377FNIEMOP_DEF(iemOp_and_Gb_Eb)
1378{
1379 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1380 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1381 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1382}
1383
1384
1385/**
1386 * @opcode 0x23
1387 * @opgroup og_gen_arith_bin
1388 * @opflclass logical
1389 */
1390FNIEMOP_DEF(iemOp_and_Gv_Ev)
1391{
1392 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1393 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1395 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0, and, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1396}
1397
1398
1399/**
1400 * @opcode 0x24
1401 * @opgroup og_gen_arith_bin
1402 * @opflclass logical
1403 */
1404FNIEMOP_DEF(iemOp_and_Al_Ib)
1405{
1406 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1407 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1408 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1409}
1410
1411
1412/**
1413 * @opcode 0x25
1414 * @opgroup og_gen_arith_bin
1415 * @opflclass logical
1416 */
1417FNIEMOP_DEF(iemOp_and_eAX_Iz)
1418{
1419 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1420 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1421 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1422}
1423
1424
1425/**
1426 * @opcode 0x26
1427 * @opmnemonic SEG
1428 * @op1 ES
1429 * @opgroup og_prefix
1430 * @openc prefix
1431 * @opdisenum OP_SEG
1432 * @ophints harmless
1433 */
1434FNIEMOP_DEF(iemOp_seg_ES)
1435{
1436 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1437 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1438 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1439
1440 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1441 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1442}
1443
1444
1445/**
1446 * @opcode 0x27
1447 * @opfltest af,cf
1448 * @opflmodify cf,pf,af,zf,sf,of
1449 * @opflundef of
1450 */
1451FNIEMOP_DEF(iemOp_daa)
1452{
1453 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1454 IEMOP_HLP_NO_64BIT();
1455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1456 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1457 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1458}
1459
1460
1461/**
1462 * @opcode 0x28
1463 * @opgroup og_gen_arith_bin
1464 * @opflclass arithmetic
1465 */
1466FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1467{
1468 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1469 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked, sub, 0, 0);
1470}
1471
1472
1473/**
1474 * @opcode 0x29
1475 * @opgroup og_gen_arith_bin
1476 * @opflclass arithmetic
1477 */
1478FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1479{
1480 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1481 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, sub, 0, 0);
1482 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1483}
1484
1485
1486/**
1487 * @opcode 0x2a
1488 * @opgroup og_gen_arith_bin
1489 * @opflclass arithmetic
1490 */
1491FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1492{
1493 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1494 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8, sub, 0);
1495}
1496
1497
1498/**
1499 * @opcode 0x2b
1500 * @opgroup og_gen_arith_bin
1501 * @opflclass arithmetic
1502 */
1503FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1504{
1505 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1507 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0, sub, 0);
1508}
1509
1510
1511/**
1512 * @opcode 0x2c
1513 * @opgroup og_gen_arith_bin
1514 * @opflclass arithmetic
1515 */
1516FNIEMOP_DEF(iemOp_sub_Al_Ib)
1517{
1518 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1519 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1520}
1521
1522
1523/**
1524 * @opcode 0x2d
1525 * @opgroup og_gen_arith_bin
1526 * @opflclass arithmetic
1527 */
1528FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1529{
1530 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1531 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1532}
1533
1534
1535/**
1536 * @opcode 0x2e
1537 * @opmnemonic SEG
1538 * @op1 CS
1539 * @opgroup og_prefix
1540 * @openc prefix
1541 * @opdisenum OP_SEG
1542 * @ophints harmless
1543 */
1544FNIEMOP_DEF(iemOp_seg_CS)
1545{
1546 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1547 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1548 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1549
1550 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1551 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1552}
1553
1554
1555/**
1556 * @opcode 0x2f
1557 * @opfltest af,cf
1558 * @opflmodify cf,pf,af,zf,sf,of
1559 * @opflundef of
1560 */
1561FNIEMOP_DEF(iemOp_das)
1562{
1563 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1564 IEMOP_HLP_NO_64BIT();
1565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1566 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1567 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1568}
1569
1570
1571/**
1572 * @opcode 0x30
1573 * @opgroup og_gen_arith_bin
1574 * @opflclass logical
1575 */
1576FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1577{
1578 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1579 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1580 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1581}
1582
1583
1584/**
1585 * @opcode 0x31
1586 * @opgroup og_gen_arith_bin
1587 * @opflclass logical
1588 */
1589FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1590{
1591 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1592 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1593 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1594 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1595}
1596
1597
1598/**
1599 * @opcode 0x32
1600 * @opgroup og_gen_arith_bin
1601 * @opflclass logical
1602 */
1603FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1604{
1605 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1606 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1607 /** @todo xor al,al optimization */
1608 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1609}
1610
1611
1612/**
1613 * @opcode 0x33
1614 * @opgroup og_gen_arith_bin
1615 * @opflclass logical
1616 */
1617FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1618{
1619 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1621
1622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1623
1624 /*
1625 * Deal with special case of 'xor rN, rN' which sets rN to zero and has a known EFLAGS outcome.
1626 */
1627 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
1628 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
1629 {
1630 switch (pVCpu->iem.s.enmEffOpSize)
1631 {
1632 case IEMMODE_16BIT:
1633 IEM_MC_BEGIN(1, 0, 0, 0);
1634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1635 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1636 IEM_MC_LOCAL(uint32_t, fEFlags);
1637 IEM_MC_FETCH_EFLAGS(fEFlags);
1638 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1639 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1640 IEM_MC_COMMIT_EFLAGS(fEFlags);
1641 IEM_MC_ADVANCE_RIP_AND_FINISH();
1642 IEM_MC_END();
1643 break;
1644
1645 case IEMMODE_32BIT:
1646 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1648 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1649 IEM_MC_LOCAL(uint32_t, fEFlags);
1650 IEM_MC_FETCH_EFLAGS(fEFlags);
1651 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1652 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1653 IEM_MC_COMMIT_EFLAGS(fEFlags);
1654 IEM_MC_ADVANCE_RIP_AND_FINISH();
1655 IEM_MC_END();
1656 break;
1657
1658 case IEMMODE_64BIT:
1659 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
1660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1661 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1662 IEM_MC_LOCAL(uint32_t, fEFlags);
1663 IEM_MC_FETCH_EFLAGS(fEFlags);
1664 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1665 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1666 IEM_MC_COMMIT_EFLAGS(fEFlags);
1667 IEM_MC_ADVANCE_RIP_AND_FINISH();
1668 IEM_MC_END();
1669 break;
1670
1671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1672 }
1673 }
1674
1675 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1676}
1677
1678
1679/**
1680 * @opcode 0x34
1681 * @opgroup og_gen_arith_bin
1682 * @opflclass logical
1683 */
1684FNIEMOP_DEF(iemOp_xor_Al_Ib)
1685{
1686 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1688 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1689}
1690
1691
1692/**
1693 * @opcode 0x35
1694 * @opgroup og_gen_arith_bin
1695 * @opflclass logical
1696 */
1697FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1698{
1699 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1700 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1701 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1702}
1703
1704
1705/**
1706 * @opcode 0x36
1707 * @opmnemonic SEG
1708 * @op1 SS
1709 * @opgroup og_prefix
1710 * @openc prefix
1711 * @opdisenum OP_SEG
1712 * @ophints harmless
1713 */
1714FNIEMOP_DEF(iemOp_seg_SS)
1715{
1716 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1718 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1719
1720 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1721 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1722}
1723
1724
1725/**
1726 * @opcode 0x37
1727 * @opfltest af
1728 * @opflmodify cf,pf,af,zf,sf,of
1729 * @opflundef pf,zf,sf,of
1730 * @opgroup og_gen_arith_dec
1731 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1732 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1733 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1734 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1735 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1736 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1737 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1738 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1739 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1740 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1741 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1742 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1743 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1745 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1746 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1747 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1749 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1750 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1752 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1754 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1756 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1758 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1760 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1762 */
1763FNIEMOP_DEF(iemOp_aaa)
1764{
1765 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1766 IEMOP_HLP_NO_64BIT();
1767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1768 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1769
1770 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1771}
1772
1773
1774/**
1775 * @opcode 0x38
1776 * @opflclass arithmetic
1777 */
1778FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1779{
1780 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1781 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8, cmp, 0);
1782}
1783
1784
1785/**
1786 * @opcode 0x39
1787 * @opflclass arithmetic
1788 */
1789FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1790{
1791 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1792 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, cmp, 0);
1793}
1794
1795
1796/**
1797 * @opcode 0x3a
1798 * @opflclass arithmetic
1799 */
1800FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1801{
1802 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1803 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8, cmp, 0);
1804}
1805
1806
1807/**
1808 * @opcode 0x3b
1809 * @opflclass arithmetic
1810 */
1811FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1812{
1813 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0, cmp, 0);
1816}
1817
1818
1819/**
1820 * @opcode 0x3c
1821 * @opflclass arithmetic
1822 */
1823FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1824{
1825 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1826 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1827}
1828
1829
1830/**
1831 * @opcode 0x3d
1832 * @opflclass arithmetic
1833 */
1834FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1835{
1836 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1837 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1838}
1839
1840
1841/**
1842 * @opcode 0x3e
1843 */
1844FNIEMOP_DEF(iemOp_seg_DS)
1845{
1846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1848 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1849
1850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1852}
1853
1854
1855/**
1856 * @opcode 0x3f
1857 * @opfltest af
1858 * @opflmodify cf,pf,af,zf,sf,of
1859 * @opflundef pf,zf,sf,of
1860 * @opgroup og_gen_arith_dec
1861 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1862 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1863 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1864 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1865 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1866 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1867 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1868 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1869 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1870 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1871 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1872 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1873 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1874 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1875 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1876 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1877 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1878 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1879 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1880 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1881 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1882 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1883 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1884 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1885 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1886 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1887 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1888 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1889 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1890 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1891 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1892 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1893 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1894 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1895 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1896 */
1897FNIEMOP_DEF(iemOp_aas)
1898{
1899 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1900 IEMOP_HLP_NO_64BIT();
1901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1902 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1903
1904 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1905}
1906
1907
1908/**
1909 * Common 'inc/dec register' helper.
1910 *
1911 * Not for 64-bit code, only for what became the rex prefixes.
1912 */
1913#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1914 switch (pVCpu->iem.s.enmEffOpSize) \
1915 { \
1916 case IEMMODE_16BIT: \
1917 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1919 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1920 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1921 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1922 IEM_MC_REF_EFLAGS(pEFlags); \
1923 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1924 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1925 IEM_MC_END(); \
1926 break; \
1927 \
1928 case IEMMODE_32BIT: \
1929 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1931 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1932 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1933 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1934 IEM_MC_REF_EFLAGS(pEFlags); \
1935 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1936 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1937 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1938 IEM_MC_END(); \
1939 break; \
1940 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1941 } \
1942 (void)0
1943
1944/**
1945 * @opcode 0x40
1946 * @opflclass incdec
1947 */
1948FNIEMOP_DEF(iemOp_inc_eAX)
1949{
1950 /*
1951 * This is a REX prefix in 64-bit mode.
1952 */
1953 if (IEM_IS_64BIT_CODE(pVCpu))
1954 {
1955 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1956 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1957
1958 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1959 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1960 }
1961
1962 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1963 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1964}
1965
1966
1967/**
1968 * @opcode 0x41
1969 * @opflclass incdec
1970 */
1971FNIEMOP_DEF(iemOp_inc_eCX)
1972{
1973 /*
1974 * This is a REX prefix in 64-bit mode.
1975 */
1976 if (IEM_IS_64BIT_CODE(pVCpu))
1977 {
1978 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1979 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1980 pVCpu->iem.s.uRexB = 1 << 3;
1981
1982 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1983 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1984 }
1985
1986 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1987 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1988}
1989
1990
1991/**
1992 * @opcode 0x42
1993 * @opflclass incdec
1994 */
1995FNIEMOP_DEF(iemOp_inc_eDX)
1996{
1997 /*
1998 * This is a REX prefix in 64-bit mode.
1999 */
2000 if (IEM_IS_64BIT_CODE(pVCpu))
2001 {
2002 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
2003 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2004 pVCpu->iem.s.uRexIndex = 1 << 3;
2005
2006 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2007 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2008 }
2009
2010 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
2011 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
2012}
2013
2014
2015
2016/**
2017 * @opcode 0x43
2018 * @opflclass incdec
2019 */
2020FNIEMOP_DEF(iemOp_inc_eBX)
2021{
2022 /*
2023 * This is a REX prefix in 64-bit mode.
2024 */
2025 if (IEM_IS_64BIT_CODE(pVCpu))
2026 {
2027 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
2028 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2029 pVCpu->iem.s.uRexB = 1 << 3;
2030 pVCpu->iem.s.uRexIndex = 1 << 3;
2031
2032 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2033 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2034 }
2035
2036 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2037 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2038}
2039
2040
2041/**
2042 * @opcode 0x44
2043 * @opflclass incdec
2044 */
2045FNIEMOP_DEF(iemOp_inc_eSP)
2046{
2047 /*
2048 * This is a REX prefix in 64-bit mode.
2049 */
2050 if (IEM_IS_64BIT_CODE(pVCpu))
2051 {
2052 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2053 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2054 pVCpu->iem.s.uRexReg = 1 << 3;
2055
2056 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2057 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2058 }
2059
2060 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2061 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2062}
2063
2064
2065/**
2066 * @opcode 0x45
2067 * @opflclass incdec
2068 */
2069FNIEMOP_DEF(iemOp_inc_eBP)
2070{
2071 /*
2072 * This is a REX prefix in 64-bit mode.
2073 */
2074 if (IEM_IS_64BIT_CODE(pVCpu))
2075 {
2076 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2077 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2078 pVCpu->iem.s.uRexReg = 1 << 3;
2079 pVCpu->iem.s.uRexB = 1 << 3;
2080
2081 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2082 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2083 }
2084
2085 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2086 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2087}
2088
2089
2090/**
2091 * @opcode 0x46
2092 * @opflclass incdec
2093 */
2094FNIEMOP_DEF(iemOp_inc_eSI)
2095{
2096 /*
2097 * This is a REX prefix in 64-bit mode.
2098 */
2099 if (IEM_IS_64BIT_CODE(pVCpu))
2100 {
2101 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2102 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2103 pVCpu->iem.s.uRexReg = 1 << 3;
2104 pVCpu->iem.s.uRexIndex = 1 << 3;
2105
2106 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2107 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2108 }
2109
2110 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2111 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2112}
2113
2114
2115/**
2116 * @opcode 0x47
2117 * @opflclass incdec
2118 */
2119FNIEMOP_DEF(iemOp_inc_eDI)
2120{
2121 /*
2122 * This is a REX prefix in 64-bit mode.
2123 */
2124 if (IEM_IS_64BIT_CODE(pVCpu))
2125 {
2126 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2127 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2128 pVCpu->iem.s.uRexReg = 1 << 3;
2129 pVCpu->iem.s.uRexB = 1 << 3;
2130 pVCpu->iem.s.uRexIndex = 1 << 3;
2131
2132 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2133 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2134 }
2135
2136 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2137 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2138}
2139
2140
2141/**
2142 * @opcode 0x48
2143 * @opflclass incdec
2144 */
2145FNIEMOP_DEF(iemOp_dec_eAX)
2146{
2147 /*
2148 * This is a REX prefix in 64-bit mode.
2149 */
2150 if (IEM_IS_64BIT_CODE(pVCpu))
2151 {
2152 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2153 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2154 iemRecalEffOpSize(pVCpu);
2155
2156 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2157 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2158 }
2159
2160 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2161 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2162}
2163
2164
2165/**
2166 * @opcode 0x49
2167 * @opflclass incdec
2168 */
2169FNIEMOP_DEF(iemOp_dec_eCX)
2170{
2171 /*
2172 * This is a REX prefix in 64-bit mode.
2173 */
2174 if (IEM_IS_64BIT_CODE(pVCpu))
2175 {
2176 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2178 pVCpu->iem.s.uRexB = 1 << 3;
2179 iemRecalEffOpSize(pVCpu);
2180
2181 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2182 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2183 }
2184
2185 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2186 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2187}
2188
2189
2190/**
2191 * @opcode 0x4a
2192 * @opflclass incdec
2193 */
2194FNIEMOP_DEF(iemOp_dec_eDX)
2195{
2196 /*
2197 * This is a REX prefix in 64-bit mode.
2198 */
2199 if (IEM_IS_64BIT_CODE(pVCpu))
2200 {
2201 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2202 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2203 pVCpu->iem.s.uRexIndex = 1 << 3;
2204 iemRecalEffOpSize(pVCpu);
2205
2206 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2207 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2208 }
2209
2210 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2211 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2212}
2213
2214
2215/**
2216 * @opcode 0x4b
2217 * @opflclass incdec
2218 */
2219FNIEMOP_DEF(iemOp_dec_eBX)
2220{
2221 /*
2222 * This is a REX prefix in 64-bit mode.
2223 */
2224 if (IEM_IS_64BIT_CODE(pVCpu))
2225 {
2226 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2227 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2228 pVCpu->iem.s.uRexB = 1 << 3;
2229 pVCpu->iem.s.uRexIndex = 1 << 3;
2230 iemRecalEffOpSize(pVCpu);
2231
2232 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2233 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2234 }
2235
2236 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2237 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2238}
2239
2240
2241/**
2242 * @opcode 0x4c
2243 * @opflclass incdec
2244 */
2245FNIEMOP_DEF(iemOp_dec_eSP)
2246{
2247 /*
2248 * This is a REX prefix in 64-bit mode.
2249 */
2250 if (IEM_IS_64BIT_CODE(pVCpu))
2251 {
2252 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2253 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2254 pVCpu->iem.s.uRexReg = 1 << 3;
2255 iemRecalEffOpSize(pVCpu);
2256
2257 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2258 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2259 }
2260
2261 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2262 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2263}
2264
2265
2266/**
2267 * @opcode 0x4d
2268 * @opflclass incdec
2269 */
2270FNIEMOP_DEF(iemOp_dec_eBP)
2271{
2272 /*
2273 * This is a REX prefix in 64-bit mode.
2274 */
2275 if (IEM_IS_64BIT_CODE(pVCpu))
2276 {
2277 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2278 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2279 pVCpu->iem.s.uRexReg = 1 << 3;
2280 pVCpu->iem.s.uRexB = 1 << 3;
2281 iemRecalEffOpSize(pVCpu);
2282
2283 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2284 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2285 }
2286
2287 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2288 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2289}
2290
2291
2292/**
2293 * @opcode 0x4e
2294 * @opflclass incdec
2295 */
2296FNIEMOP_DEF(iemOp_dec_eSI)
2297{
2298 /*
2299 * This is a REX prefix in 64-bit mode.
2300 */
2301 if (IEM_IS_64BIT_CODE(pVCpu))
2302 {
2303 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2304 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2305 pVCpu->iem.s.uRexReg = 1 << 3;
2306 pVCpu->iem.s.uRexIndex = 1 << 3;
2307 iemRecalEffOpSize(pVCpu);
2308
2309 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2310 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2311 }
2312
2313 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2314 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2315}
2316
2317
2318/**
2319 * @opcode 0x4f
2320 * @opflclass incdec
2321 */
2322FNIEMOP_DEF(iemOp_dec_eDI)
2323{
2324 /*
2325 * This is a REX prefix in 64-bit mode.
2326 */
2327 if (IEM_IS_64BIT_CODE(pVCpu))
2328 {
2329 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2330 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2331 pVCpu->iem.s.uRexReg = 1 << 3;
2332 pVCpu->iem.s.uRexB = 1 << 3;
2333 pVCpu->iem.s.uRexIndex = 1 << 3;
2334 iemRecalEffOpSize(pVCpu);
2335
2336 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2337 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2338 }
2339
2340 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2341 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2342}
2343
2344
2345/**
2346 * Common 'push register' helper.
2347 */
2348FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2349{
2350 if (IEM_IS_64BIT_CODE(pVCpu))
2351 {
2352 iReg |= pVCpu->iem.s.uRexB;
2353 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2354 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2355 }
2356
2357 switch (pVCpu->iem.s.enmEffOpSize)
2358 {
2359 case IEMMODE_16BIT:
2360 IEM_MC_BEGIN(0, 1, 0, 0);
2361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2362 IEM_MC_LOCAL(uint16_t, u16Value);
2363 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2364 IEM_MC_PUSH_U16(u16Value);
2365 IEM_MC_ADVANCE_RIP_AND_FINISH();
2366 IEM_MC_END();
2367 break;
2368
2369 case IEMMODE_32BIT:
2370 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2372 IEM_MC_LOCAL(uint32_t, u32Value);
2373 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2374 IEM_MC_PUSH_U32(u32Value);
2375 IEM_MC_ADVANCE_RIP_AND_FINISH();
2376 IEM_MC_END();
2377 break;
2378
2379 case IEMMODE_64BIT:
2380 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2382 IEM_MC_LOCAL(uint64_t, u64Value);
2383 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2384 IEM_MC_PUSH_U64(u64Value);
2385 IEM_MC_ADVANCE_RIP_AND_FINISH();
2386 IEM_MC_END();
2387 break;
2388
2389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2390 }
2391}
2392
2393
2394/**
2395 * @opcode 0x50
2396 */
2397FNIEMOP_DEF(iemOp_push_eAX)
2398{
2399 IEMOP_MNEMONIC(push_rAX, "push rAX");
2400 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2401}
2402
2403
2404/**
2405 * @opcode 0x51
2406 */
2407FNIEMOP_DEF(iemOp_push_eCX)
2408{
2409 IEMOP_MNEMONIC(push_rCX, "push rCX");
2410 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2411}
2412
2413
2414/**
2415 * @opcode 0x52
2416 */
2417FNIEMOP_DEF(iemOp_push_eDX)
2418{
2419 IEMOP_MNEMONIC(push_rDX, "push rDX");
2420 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2421}
2422
2423
2424/**
2425 * @opcode 0x53
2426 */
2427FNIEMOP_DEF(iemOp_push_eBX)
2428{
2429 IEMOP_MNEMONIC(push_rBX, "push rBX");
2430 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2431}
2432
2433
2434/**
2435 * @opcode 0x54
2436 */
2437FNIEMOP_DEF(iemOp_push_eSP)
2438{
2439 IEMOP_MNEMONIC(push_rSP, "push rSP");
2440 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2441 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2442
2443 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2444 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2446 IEM_MC_LOCAL(uint16_t, u16Value);
2447 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2448 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2449 IEM_MC_PUSH_U16(u16Value);
2450 IEM_MC_ADVANCE_RIP_AND_FINISH();
2451 IEM_MC_END();
2452}
2453
2454
2455/**
2456 * @opcode 0x55
2457 */
2458FNIEMOP_DEF(iemOp_push_eBP)
2459{
2460 IEMOP_MNEMONIC(push_rBP, "push rBP");
2461 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2462}
2463
2464
2465/**
2466 * @opcode 0x56
2467 */
2468FNIEMOP_DEF(iemOp_push_eSI)
2469{
2470 IEMOP_MNEMONIC(push_rSI, "push rSI");
2471 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2472}
2473
2474
2475/**
2476 * @opcode 0x57
2477 */
2478FNIEMOP_DEF(iemOp_push_eDI)
2479{
2480 IEMOP_MNEMONIC(push_rDI, "push rDI");
2481 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2482}
2483
2484
2485/**
2486 * Common 'pop register' helper.
2487 */
2488FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2489{
2490 if (IEM_IS_64BIT_CODE(pVCpu))
2491 {
2492 iReg |= pVCpu->iem.s.uRexB;
2493 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2494 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2495 }
2496
2497 switch (pVCpu->iem.s.enmEffOpSize)
2498 {
2499 case IEMMODE_16BIT:
2500 IEM_MC_BEGIN(0, 0, 0, 0);
2501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2502 IEM_MC_POP_GREG_U16(iReg);
2503 IEM_MC_ADVANCE_RIP_AND_FINISH();
2504 IEM_MC_END();
2505 break;
2506
2507 case IEMMODE_32BIT:
2508 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2510 IEM_MC_POP_GREG_U32(iReg);
2511 IEM_MC_ADVANCE_RIP_AND_FINISH();
2512 IEM_MC_END();
2513 break;
2514
2515 case IEMMODE_64BIT:
2516 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2518 IEM_MC_POP_GREG_U64(iReg);
2519 IEM_MC_ADVANCE_RIP_AND_FINISH();
2520 IEM_MC_END();
2521 break;
2522
2523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2524 }
2525}
2526
2527
2528/**
2529 * @opcode 0x58
2530 */
2531FNIEMOP_DEF(iemOp_pop_eAX)
2532{
2533 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2534 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2535}
2536
2537
2538/**
2539 * @opcode 0x59
2540 */
2541FNIEMOP_DEF(iemOp_pop_eCX)
2542{
2543 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2544 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2545}
2546
2547
2548/**
2549 * @opcode 0x5a
2550 */
2551FNIEMOP_DEF(iemOp_pop_eDX)
2552{
2553 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2554 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2555}
2556
2557
2558/**
2559 * @opcode 0x5b
2560 */
2561FNIEMOP_DEF(iemOp_pop_eBX)
2562{
2563 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2564 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2565}
2566
2567
2568/**
2569 * @opcode 0x5c
2570 */
2571FNIEMOP_DEF(iemOp_pop_eSP)
2572{
2573 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2574 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2575}
2576
2577
2578/**
2579 * @opcode 0x5d
2580 */
2581FNIEMOP_DEF(iemOp_pop_eBP)
2582{
2583 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2584 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2585}
2586
2587
2588/**
2589 * @opcode 0x5e
2590 */
2591FNIEMOP_DEF(iemOp_pop_eSI)
2592{
2593 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2594 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2595}
2596
2597
2598/**
2599 * @opcode 0x5f
2600 */
2601FNIEMOP_DEF(iemOp_pop_eDI)
2602{
2603 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2604 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2605}
2606
2607
2608/**
2609 * @opcode 0x60
2610 */
2611FNIEMOP_DEF(iemOp_pusha)
2612{
2613 IEMOP_MNEMONIC(pusha, "pusha");
2614 IEMOP_HLP_MIN_186();
2615 IEMOP_HLP_NO_64BIT();
2616 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2617 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2618 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2619 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2620}
2621
2622
2623/**
2624 * @opcode 0x61
2625 */
2626FNIEMOP_DEF(iemOp_popa__mvex)
2627{
2628 if (!IEM_IS_64BIT_CODE(pVCpu))
2629 {
2630 IEMOP_MNEMONIC(popa, "popa");
2631 IEMOP_HLP_MIN_186();
2632 IEMOP_HLP_NO_64BIT();
2633 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2634 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2635 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2636 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2637 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2638 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2639 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2640 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2641 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2642 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2643 iemCImpl_popa_16);
2644 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2645 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2646 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2647 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2648 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2649 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2652 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2653 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2654 iemCImpl_popa_32);
2655 }
2656 IEMOP_MNEMONIC(mvex, "mvex");
2657 Log(("mvex prefix is not supported!\n"));
2658 IEMOP_RAISE_INVALID_OPCODE_RET();
2659}
2660
2661
2662/**
2663 * @opcode 0x62
2664 * @opmnemonic bound
2665 * @op1 Gv_RO
2666 * @op2 Ma
2667 * @opmincpu 80186
2668 * @ophints harmless x86_invalid_64
2669 * @optest op1=0 op2=0 ->
2670 * @optest op1=1 op2=0 -> value.xcpt=5
2671 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2672 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2673 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2674 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2675 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2676 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2677 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2678 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2679 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2680 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2681 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2682 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2683 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2684 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2685 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2686 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2687 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2688 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2689 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2690 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2691 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2692 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2693 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2694 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2695 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2696 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2697 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2698 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2699 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2700 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2701 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2702 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2703 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2704 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2705 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2706 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2707 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2708 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2709 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2710 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2711 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2712 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2713 */
2714FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2715{
2716 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2717 compatability mode it is invalid with MOD=3.
2718
2719 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2720 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2721 given as R and X without an exact description, so we assume it builds on
2722 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2723 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2724 uint8_t bRm;
2725 if (!IEM_IS_64BIT_CODE(pVCpu))
2726 {
2727 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2728 IEMOP_HLP_MIN_186();
2729 IEM_OPCODE_GET_NEXT_U8(&bRm);
2730 if (IEM_IS_MODRM_MEM_MODE(bRm))
2731 {
2732 /** @todo testcase: check that there are two memory accesses involved. Check
2733 * whether they're both read before the \#BR triggers. */
2734 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2735 {
2736 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2737 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2738 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2739 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2741
2742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2744
2745 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2746 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2747 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2748
2749 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2750 IEM_MC_END();
2751 }
2752 else /* 32-bit operands */
2753 {
2754 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2755 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2756 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2757 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2759
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2762
2763 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2764 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2765 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2766
2767 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2768 IEM_MC_END();
2769 }
2770 }
2771
2772 /*
2773 * @opdone
2774 */
2775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2776 {
2777 /* Note that there is no need for the CPU to fetch further bytes
2778 here because MODRM.MOD == 3. */
2779 Log(("evex not supported by the guest CPU!\n"));
2780 IEMOP_RAISE_INVALID_OPCODE_RET();
2781 }
2782 }
2783 else
2784 {
2785 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2786 * does modr/m read, whereas AMD probably doesn't... */
2787 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2788 {
2789 Log(("evex not supported by the guest CPU!\n"));
2790 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2791 }
2792 IEM_OPCODE_GET_NEXT_U8(&bRm);
2793 }
2794
2795 IEMOP_MNEMONIC(evex, "evex");
2796 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2797 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2798 Log(("evex prefix is not implemented!\n"));
2799 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2800}
2801
2802
2803/**
2804 * @opcode 0x63
2805 * @opflmodify zf
2806 * @note non-64-bit modes.
2807 */
2808FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2809{
2810 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2811 IEMOP_HLP_MIN_286();
2812 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2814
2815 if (IEM_IS_MODRM_REG_MODE(bRm))
2816 {
2817 /* Register */
2818 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2819 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2820 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2821 IEM_MC_ARG(uint16_t, u16Src, 1);
2822 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2823
2824 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2825 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2826 IEM_MC_REF_EFLAGS(pEFlags);
2827 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2828
2829 IEM_MC_ADVANCE_RIP_AND_FINISH();
2830 IEM_MC_END();
2831 }
2832 else
2833 {
2834 /* Memory */
2835 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2836 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2837 IEM_MC_ARG(uint16_t, u16Src, 1);
2838 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2840 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2841
2842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2843 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2844 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2845 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2846 IEM_MC_FETCH_EFLAGS(EFlags);
2847 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2848
2849 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2850 IEM_MC_COMMIT_EFLAGS(EFlags);
2851 IEM_MC_ADVANCE_RIP_AND_FINISH();
2852 IEM_MC_END();
2853 }
2854}
2855
2856
2857/**
2858 * @opcode 0x63
2859 *
2860 * @note This is a weird one. It works like a regular move instruction if
2861 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2862 * @todo This definitely needs a testcase to verify the odd cases. */
2863FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2864{
2865 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2866
2867 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2869
2870 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2871 {
2872 if (IEM_IS_MODRM_REG_MODE(bRm))
2873 {
2874 /*
2875 * Register to register.
2876 */
2877 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_LOCAL(uint64_t, u64Value);
2880 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2881 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2882 IEM_MC_ADVANCE_RIP_AND_FINISH();
2883 IEM_MC_END();
2884 }
2885 else
2886 {
2887 /*
2888 * We're loading a register from memory.
2889 */
2890 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2891 IEM_MC_LOCAL(uint64_t, u64Value);
2892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2895 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2896 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2897 IEM_MC_ADVANCE_RIP_AND_FINISH();
2898 IEM_MC_END();
2899 }
2900 }
2901 else
2902 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2903}
2904
2905
2906/**
2907 * @opcode 0x64
2908 * @opmnemonic segfs
2909 * @opmincpu 80386
2910 * @opgroup og_prefixes
2911 */
2912FNIEMOP_DEF(iemOp_seg_FS)
2913{
2914 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2915 IEMOP_HLP_MIN_386();
2916
2917 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2918 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2919
2920 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2921 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2922}
2923
2924
2925/**
2926 * @opcode 0x65
2927 * @opmnemonic seggs
2928 * @opmincpu 80386
2929 * @opgroup og_prefixes
2930 */
2931FNIEMOP_DEF(iemOp_seg_GS)
2932{
2933 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2934 IEMOP_HLP_MIN_386();
2935
2936 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2937 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2938
2939 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2940 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2941}
2942
2943
2944/**
2945 * @opcode 0x66
2946 * @opmnemonic opsize
2947 * @openc prefix
2948 * @opmincpu 80386
2949 * @ophints harmless
2950 * @opgroup og_prefixes
2951 */
2952FNIEMOP_DEF(iemOp_op_size)
2953{
2954 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2955 IEMOP_HLP_MIN_386();
2956
2957 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2958 iemRecalEffOpSize(pVCpu);
2959
2960 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2961 when REPZ or REPNZ are present. */
2962 if (pVCpu->iem.s.idxPrefix == 0)
2963 pVCpu->iem.s.idxPrefix = 1;
2964
2965 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2966 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2967}
2968
2969
2970/**
2971 * @opcode 0x67
2972 * @opmnemonic addrsize
2973 * @openc prefix
2974 * @opmincpu 80386
2975 * @ophints harmless
2976 * @opgroup og_prefixes
2977 */
2978FNIEMOP_DEF(iemOp_addr_size)
2979{
2980 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2981 IEMOP_HLP_MIN_386();
2982
2983 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2984 switch (pVCpu->iem.s.enmDefAddrMode)
2985 {
2986 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2987 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2988 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2989 default: AssertFailed();
2990 }
2991
2992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2993 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2994}
2995
2996
2997/**
2998 * @opcode 0x68
2999 */
3000FNIEMOP_DEF(iemOp_push_Iz)
3001{
3002 IEMOP_MNEMONIC(push_Iz, "push Iz");
3003 IEMOP_HLP_MIN_186();
3004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3005 switch (pVCpu->iem.s.enmEffOpSize)
3006 {
3007 case IEMMODE_16BIT:
3008 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3009 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3011 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
3012 IEM_MC_PUSH_U16(u16Value);
3013 IEM_MC_ADVANCE_RIP_AND_FINISH();
3014 IEM_MC_END();
3015 break;
3016
3017 case IEMMODE_32BIT:
3018 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3019 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3021 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
3022 IEM_MC_PUSH_U32(u32Value);
3023 IEM_MC_ADVANCE_RIP_AND_FINISH();
3024 IEM_MC_END();
3025 break;
3026
3027 case IEMMODE_64BIT:
3028 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3029 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
3032 IEM_MC_PUSH_U64(u64Value);
3033 IEM_MC_ADVANCE_RIP_AND_FINISH();
3034 IEM_MC_END();
3035 break;
3036
3037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3038 }
3039}
3040
3041
3042/**
3043 * @opcode 0x69
3044 * @opflclass multiply
3045 */
3046FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3047{
3048 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3049 IEMOP_HLP_MIN_186();
3050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3051 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3052
3053 switch (pVCpu->iem.s.enmEffOpSize)
3054 {
3055 case IEMMODE_16BIT:
3056 {
3057 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3058 if (IEM_IS_MODRM_REG_MODE(bRm))
3059 {
3060 /* register operand */
3061 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3062 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3064 IEM_MC_LOCAL(uint16_t, u16Tmp);
3065 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3066 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3067 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
3068 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3069 IEM_MC_REF_EFLAGS(pEFlags);
3070 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3071 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3072
3073 IEM_MC_ADVANCE_RIP_AND_FINISH();
3074 IEM_MC_END();
3075 }
3076 else
3077 {
3078 /* memory operand */
3079 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3082
3083 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085
3086 IEM_MC_LOCAL(uint16_t, u16Tmp);
3087 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3088
3089 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3090 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3091 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3092 IEM_MC_REF_EFLAGS(pEFlags);
3093 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3094 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3095
3096 IEM_MC_ADVANCE_RIP_AND_FINISH();
3097 IEM_MC_END();
3098 }
3099 break;
3100 }
3101
3102 case IEMMODE_32BIT:
3103 {
3104 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3105 if (IEM_IS_MODRM_REG_MODE(bRm))
3106 {
3107 /* register operand */
3108 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3109 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3111 IEM_MC_LOCAL(uint32_t, u32Tmp);
3112 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3113
3114 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3115 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
3116 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3117 IEM_MC_REF_EFLAGS(pEFlags);
3118 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3119 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3120
3121 IEM_MC_ADVANCE_RIP_AND_FINISH();
3122 IEM_MC_END();
3123 }
3124 else
3125 {
3126 /* memory operand */
3127 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3130
3131 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133
3134 IEM_MC_LOCAL(uint32_t, u32Tmp);
3135 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3136
3137 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3138 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3139 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3140 IEM_MC_REF_EFLAGS(pEFlags);
3141 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3142 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3143
3144 IEM_MC_ADVANCE_RIP_AND_FINISH();
3145 IEM_MC_END();
3146 }
3147 break;
3148 }
3149
3150 case IEMMODE_64BIT:
3151 {
3152 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3153 if (IEM_IS_MODRM_REG_MODE(bRm))
3154 {
3155 /* register operand */
3156 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3157 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3159 IEM_MC_LOCAL(uint64_t, u64Tmp);
3160 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3161
3162 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3163 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3165 IEM_MC_REF_EFLAGS(pEFlags);
3166 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3167 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3168
3169 IEM_MC_ADVANCE_RIP_AND_FINISH();
3170 IEM_MC_END();
3171 }
3172 else
3173 {
3174 /* memory operand */
3175 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3178
3179 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3181
3182 IEM_MC_LOCAL(uint64_t, u64Tmp);
3183 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3184
3185 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3186 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3188 IEM_MC_REF_EFLAGS(pEFlags);
3189 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3190 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3191
3192 IEM_MC_ADVANCE_RIP_AND_FINISH();
3193 IEM_MC_END();
3194 }
3195 break;
3196 }
3197
3198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3199 }
3200}
3201
3202
3203/**
3204 * @opcode 0x6a
3205 */
3206FNIEMOP_DEF(iemOp_push_Ib)
3207{
3208 IEMOP_MNEMONIC(push_Ib, "push Ib");
3209 IEMOP_HLP_MIN_186();
3210 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3211 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3212
3213 switch (pVCpu->iem.s.enmEffOpSize)
3214 {
3215 case IEMMODE_16BIT:
3216 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3218 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3219 IEM_MC_PUSH_U16(uValue);
3220 IEM_MC_ADVANCE_RIP_AND_FINISH();
3221 IEM_MC_END();
3222 break;
3223 case IEMMODE_32BIT:
3224 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3227 IEM_MC_PUSH_U32(uValue);
3228 IEM_MC_ADVANCE_RIP_AND_FINISH();
3229 IEM_MC_END();
3230 break;
3231 case IEMMODE_64BIT:
3232 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3234 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3235 IEM_MC_PUSH_U64(uValue);
3236 IEM_MC_ADVANCE_RIP_AND_FINISH();
3237 IEM_MC_END();
3238 break;
3239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3240 }
3241}
3242
3243
3244/**
3245 * @opcode 0x6b
3246 * @opflclass multiply
3247 */
3248FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3249{
3250 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3251 IEMOP_HLP_MIN_186();
3252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3254
3255 switch (pVCpu->iem.s.enmEffOpSize)
3256 {
3257 case IEMMODE_16BIT:
3258 {
3259 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3260 if (IEM_IS_MODRM_REG_MODE(bRm))
3261 {
3262 /* register operand */
3263 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3264 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3266
3267 IEM_MC_LOCAL(uint16_t, u16Tmp);
3268 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3269
3270 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3271 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3273 IEM_MC_REF_EFLAGS(pEFlags);
3274 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3275 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3276
3277 IEM_MC_ADVANCE_RIP_AND_FINISH();
3278 IEM_MC_END();
3279 }
3280 else
3281 {
3282 /* memory operand */
3283 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3284
3285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3287
3288 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3290
3291 IEM_MC_LOCAL(uint16_t, u16Tmp);
3292 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3293
3294 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3295 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3296 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3297 IEM_MC_REF_EFLAGS(pEFlags);
3298 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3299 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3300
3301 IEM_MC_ADVANCE_RIP_AND_FINISH();
3302 IEM_MC_END();
3303 }
3304 break;
3305 }
3306
3307 case IEMMODE_32BIT:
3308 {
3309 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3310 if (IEM_IS_MODRM_REG_MODE(bRm))
3311 {
3312 /* register operand */
3313 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3314 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3316 IEM_MC_LOCAL(uint32_t, u32Tmp);
3317 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3318
3319 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3320 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3321 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3322 IEM_MC_REF_EFLAGS(pEFlags);
3323 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3324 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3325
3326 IEM_MC_ADVANCE_RIP_AND_FINISH();
3327 IEM_MC_END();
3328 }
3329 else
3330 {
3331 /* memory operand */
3332 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3335
3336 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3338
3339 IEM_MC_LOCAL(uint32_t, u32Tmp);
3340 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3341
3342 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3343 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3344 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3345 IEM_MC_REF_EFLAGS(pEFlags);
3346 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3347 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3348
3349 IEM_MC_ADVANCE_RIP_AND_FINISH();
3350 IEM_MC_END();
3351 }
3352 break;
3353 }
3354
3355 case IEMMODE_64BIT:
3356 {
3357 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3358 if (IEM_IS_MODRM_REG_MODE(bRm))
3359 {
3360 /* register operand */
3361 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3362 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3364 IEM_MC_LOCAL(uint64_t, u64Tmp);
3365 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3366
3367 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3368 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3369 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3370 IEM_MC_REF_EFLAGS(pEFlags);
3371 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3372 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3373
3374 IEM_MC_ADVANCE_RIP_AND_FINISH();
3375 IEM_MC_END();
3376 }
3377 else
3378 {
3379 /* memory operand */
3380 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3383
3384 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3386
3387 IEM_MC_LOCAL(uint64_t, u64Tmp);
3388 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3389
3390 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3391 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3392 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3393 IEM_MC_REF_EFLAGS(pEFlags);
3394 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3395 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3396
3397 IEM_MC_ADVANCE_RIP_AND_FINISH();
3398 IEM_MC_END();
3399 }
3400 break;
3401 }
3402
3403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3404 }
3405}
3406
3407
3408/**
3409 * @opcode 0x6c
3410 * @opfltest iopl,df
3411 */
3412FNIEMOP_DEF(iemOp_insb_Yb_DX)
3413{
3414 IEMOP_HLP_MIN_186();
3415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3416 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3417 {
3418 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3419 switch (pVCpu->iem.s.enmEffAddrMode)
3420 {
3421 case IEMMODE_16BIT:
3422 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3423 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3424 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3425 iemCImpl_rep_ins_op8_addr16, false);
3426 case IEMMODE_32BIT:
3427 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3428 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3429 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3430 iemCImpl_rep_ins_op8_addr32, false);
3431 case IEMMODE_64BIT:
3432 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3434 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3435 iemCImpl_rep_ins_op8_addr64, false);
3436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3437 }
3438 }
3439 else
3440 {
3441 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3442 switch (pVCpu->iem.s.enmEffAddrMode)
3443 {
3444 case IEMMODE_16BIT:
3445 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3447 iemCImpl_ins_op8_addr16, false);
3448 case IEMMODE_32BIT:
3449 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3450 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3451 iemCImpl_ins_op8_addr32, false);
3452 case IEMMODE_64BIT:
3453 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3455 iemCImpl_ins_op8_addr64, false);
3456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3457 }
3458 }
3459}
3460
3461
3462/**
3463 * @opcode 0x6d
3464 * @opfltest iopl,df
3465 */
3466FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3467{
3468 IEMOP_HLP_MIN_186();
3469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3470 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3471 {
3472 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3473 switch (pVCpu->iem.s.enmEffOpSize)
3474 {
3475 case IEMMODE_16BIT:
3476 switch (pVCpu->iem.s.enmEffAddrMode)
3477 {
3478 case IEMMODE_16BIT:
3479 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3480 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3481 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3482 iemCImpl_rep_ins_op16_addr16, false);
3483 case IEMMODE_32BIT:
3484 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3485 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3486 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3487 iemCImpl_rep_ins_op16_addr32, false);
3488 case IEMMODE_64BIT:
3489 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3490 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3491 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3492 iemCImpl_rep_ins_op16_addr64, false);
3493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3494 }
3495 break;
3496 case IEMMODE_64BIT:
3497 case IEMMODE_32BIT:
3498 switch (pVCpu->iem.s.enmEffAddrMode)
3499 {
3500 case IEMMODE_16BIT:
3501 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3502 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3503 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3504 iemCImpl_rep_ins_op32_addr16, false);
3505 case IEMMODE_32BIT:
3506 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3508 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3509 iemCImpl_rep_ins_op32_addr32, false);
3510 case IEMMODE_64BIT:
3511 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3513 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3514 iemCImpl_rep_ins_op32_addr64, false);
3515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3516 }
3517 break;
3518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3519 }
3520 }
3521 else
3522 {
3523 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3524 switch (pVCpu->iem.s.enmEffOpSize)
3525 {
3526 case IEMMODE_16BIT:
3527 switch (pVCpu->iem.s.enmEffAddrMode)
3528 {
3529 case IEMMODE_16BIT:
3530 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3531 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3532 iemCImpl_ins_op16_addr16, false);
3533 case IEMMODE_32BIT:
3534 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3535 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3536 iemCImpl_ins_op16_addr32, false);
3537 case IEMMODE_64BIT:
3538 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3539 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3540 iemCImpl_ins_op16_addr64, false);
3541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3542 }
3543 break;
3544 case IEMMODE_64BIT:
3545 case IEMMODE_32BIT:
3546 switch (pVCpu->iem.s.enmEffAddrMode)
3547 {
3548 case IEMMODE_16BIT:
3549 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3550 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3551 iemCImpl_ins_op32_addr16, false);
3552 case IEMMODE_32BIT:
3553 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3554 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3555 iemCImpl_ins_op32_addr32, false);
3556 case IEMMODE_64BIT:
3557 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3558 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3559 iemCImpl_ins_op32_addr64, false);
3560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3561 }
3562 break;
3563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3564 }
3565 }
3566}
3567
3568
3569/**
3570 * @opcode 0x6e
3571 * @opfltest iopl,df
3572 */
3573FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3574{
3575 IEMOP_HLP_MIN_186();
3576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3577 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3578 {
3579 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3580 switch (pVCpu->iem.s.enmEffAddrMode)
3581 {
3582 case IEMMODE_16BIT:
3583 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3584 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3585 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3586 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3587 case IEMMODE_32BIT:
3588 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3589 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3590 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3591 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3592 case IEMMODE_64BIT:
3593 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3594 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3595 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3596 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3598 }
3599 }
3600 else
3601 {
3602 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3603 switch (pVCpu->iem.s.enmEffAddrMode)
3604 {
3605 case IEMMODE_16BIT:
3606 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3607 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3608 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3609 case IEMMODE_32BIT:
3610 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3611 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3612 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3613 case IEMMODE_64BIT:
3614 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3615 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3616 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3618 }
3619 }
3620}
3621
3622
3623/**
3624 * @opcode 0x6f
3625 * @opfltest iopl,df
3626 */
3627FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3628{
3629 IEMOP_HLP_MIN_186();
3630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3631 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3632 {
3633 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3634 switch (pVCpu->iem.s.enmEffOpSize)
3635 {
3636 case IEMMODE_16BIT:
3637 switch (pVCpu->iem.s.enmEffAddrMode)
3638 {
3639 case IEMMODE_16BIT:
3640 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3641 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3642 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3643 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3644 case IEMMODE_32BIT:
3645 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3646 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3647 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3648 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3649 case IEMMODE_64BIT:
3650 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3651 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3652 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3653 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3655 }
3656 break;
3657 case IEMMODE_64BIT:
3658 case IEMMODE_32BIT:
3659 switch (pVCpu->iem.s.enmEffAddrMode)
3660 {
3661 case IEMMODE_16BIT:
3662 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3663 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3664 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3665 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3666 case IEMMODE_32BIT:
3667 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3668 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3669 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3670 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3671 case IEMMODE_64BIT:
3672 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3673 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3674 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3675 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3677 }
3678 break;
3679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3680 }
3681 }
3682 else
3683 {
3684 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3685 switch (pVCpu->iem.s.enmEffOpSize)
3686 {
3687 case IEMMODE_16BIT:
3688 switch (pVCpu->iem.s.enmEffAddrMode)
3689 {
3690 case IEMMODE_16BIT:
3691 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3692 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3693 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3694 case IEMMODE_32BIT:
3695 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3696 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3697 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3698 case IEMMODE_64BIT:
3699 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3700 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3701 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3703 }
3704 break;
3705 case IEMMODE_64BIT:
3706 case IEMMODE_32BIT:
3707 switch (pVCpu->iem.s.enmEffAddrMode)
3708 {
3709 case IEMMODE_16BIT:
3710 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3711 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3712 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3713 case IEMMODE_32BIT:
3714 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3715 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3716 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3717 case IEMMODE_64BIT:
3718 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3719 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3720 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3722 }
3723 break;
3724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3725 }
3726 }
3727}
3728
3729
3730/**
3731 * @opcode 0x70
3732 * @opfltest of
3733 */
3734FNIEMOP_DEF(iemOp_jo_Jb)
3735{
3736 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3737 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3738 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3739
3740 IEM_MC_BEGIN(0, 0, 0, 0);
3741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3742 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3743 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3744 } IEM_MC_ELSE() {
3745 IEM_MC_ADVANCE_RIP_AND_FINISH();
3746 } IEM_MC_ENDIF();
3747 IEM_MC_END();
3748}
3749
3750
3751/**
3752 * @opcode 0x71
3753 * @opfltest of
3754 */
3755FNIEMOP_DEF(iemOp_jno_Jb)
3756{
3757 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3758 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3759 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3760
3761 IEM_MC_BEGIN(0, 0, 0, 0);
3762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3763 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3764 IEM_MC_ADVANCE_RIP_AND_FINISH();
3765 } IEM_MC_ELSE() {
3766 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3767 } IEM_MC_ENDIF();
3768 IEM_MC_END();
3769}
3770
3771/**
3772 * @opcode 0x72
3773 * @opfltest cf
3774 */
3775FNIEMOP_DEF(iemOp_jc_Jb)
3776{
3777 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3778 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3780
3781 IEM_MC_BEGIN(0, 0, 0, 0);
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3784 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3785 } IEM_MC_ELSE() {
3786 IEM_MC_ADVANCE_RIP_AND_FINISH();
3787 } IEM_MC_ENDIF();
3788 IEM_MC_END();
3789}
3790
3791
3792/**
3793 * @opcode 0x73
3794 * @opfltest cf
3795 */
3796FNIEMOP_DEF(iemOp_jnc_Jb)
3797{
3798 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3799 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3801
3802 IEM_MC_BEGIN(0, 0, 0, 0);
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3805 IEM_MC_ADVANCE_RIP_AND_FINISH();
3806 } IEM_MC_ELSE() {
3807 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3808 } IEM_MC_ENDIF();
3809 IEM_MC_END();
3810}
3811
3812
3813/**
3814 * @opcode 0x74
3815 * @opfltest zf
3816 */
3817FNIEMOP_DEF(iemOp_je_Jb)
3818{
3819 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3821 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3822
3823 IEM_MC_BEGIN(0, 0, 0, 0);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3826 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3827 } IEM_MC_ELSE() {
3828 IEM_MC_ADVANCE_RIP_AND_FINISH();
3829 } IEM_MC_ENDIF();
3830 IEM_MC_END();
3831}
3832
3833
3834/**
3835 * @opcode 0x75
3836 * @opfltest zf
3837 */
3838FNIEMOP_DEF(iemOp_jne_Jb)
3839{
3840 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3841 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3842 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3843
3844 IEM_MC_BEGIN(0, 0, 0, 0);
3845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3846 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3847 IEM_MC_ADVANCE_RIP_AND_FINISH();
3848 } IEM_MC_ELSE() {
3849 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3850 } IEM_MC_ENDIF();
3851 IEM_MC_END();
3852}
3853
3854
3855/**
3856 * @opcode 0x76
3857 * @opfltest cf,zf
3858 */
3859FNIEMOP_DEF(iemOp_jbe_Jb)
3860{
3861 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3862 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3864
3865 IEM_MC_BEGIN(0, 0, 0, 0);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3868 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3869 } IEM_MC_ELSE() {
3870 IEM_MC_ADVANCE_RIP_AND_FINISH();
3871 } IEM_MC_ENDIF();
3872 IEM_MC_END();
3873}
3874
3875
3876/**
3877 * @opcode 0x77
3878 * @opfltest cf,zf
3879 */
3880FNIEMOP_DEF(iemOp_jnbe_Jb)
3881{
3882 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3883 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3885
3886 IEM_MC_BEGIN(0, 0, 0, 0);
3887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3888 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3889 IEM_MC_ADVANCE_RIP_AND_FINISH();
3890 } IEM_MC_ELSE() {
3891 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3892 } IEM_MC_ENDIF();
3893 IEM_MC_END();
3894}
3895
3896
3897/**
3898 * @opcode 0x78
3899 * @opfltest sf
3900 */
3901FNIEMOP_DEF(iemOp_js_Jb)
3902{
3903 IEMOP_MNEMONIC(js_Jb, "js Jb");
3904 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3905 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3906
3907 IEM_MC_BEGIN(0, 0, 0, 0);
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3910 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3911 } IEM_MC_ELSE() {
3912 IEM_MC_ADVANCE_RIP_AND_FINISH();
3913 } IEM_MC_ENDIF();
3914 IEM_MC_END();
3915}
3916
3917
3918/**
3919 * @opcode 0x79
3920 * @opfltest sf
3921 */
3922FNIEMOP_DEF(iemOp_jns_Jb)
3923{
3924 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3925 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3926 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3927
3928 IEM_MC_BEGIN(0, 0, 0, 0);
3929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3931 IEM_MC_ADVANCE_RIP_AND_FINISH();
3932 } IEM_MC_ELSE() {
3933 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3934 } IEM_MC_ENDIF();
3935 IEM_MC_END();
3936}
3937
3938
3939/**
3940 * @opcode 0x7a
3941 * @opfltest pf
3942 */
3943FNIEMOP_DEF(iemOp_jp_Jb)
3944{
3945 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3946 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3947 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3948
3949 IEM_MC_BEGIN(0, 0, 0, 0);
3950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3952 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3953 } IEM_MC_ELSE() {
3954 IEM_MC_ADVANCE_RIP_AND_FINISH();
3955 } IEM_MC_ENDIF();
3956 IEM_MC_END();
3957}
3958
3959
3960/**
3961 * @opcode 0x7b
3962 * @opfltest pf
3963 */
3964FNIEMOP_DEF(iemOp_jnp_Jb)
3965{
3966 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3967 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3968 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3969
3970 IEM_MC_BEGIN(0, 0, 0, 0);
3971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3972 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3973 IEM_MC_ADVANCE_RIP_AND_FINISH();
3974 } IEM_MC_ELSE() {
3975 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3976 } IEM_MC_ENDIF();
3977 IEM_MC_END();
3978}
3979
3980
3981/**
3982 * @opcode 0x7c
3983 * @opfltest sf,of
3984 */
3985FNIEMOP_DEF(iemOp_jl_Jb)
3986{
3987 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3988 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3989 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3990
3991 IEM_MC_BEGIN(0, 0, 0, 0);
3992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3993 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3994 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3995 } IEM_MC_ELSE() {
3996 IEM_MC_ADVANCE_RIP_AND_FINISH();
3997 } IEM_MC_ENDIF();
3998 IEM_MC_END();
3999}
4000
4001
4002/**
4003 * @opcode 0x7d
4004 * @opfltest sf,of
4005 */
4006FNIEMOP_DEF(iemOp_jnl_Jb)
4007{
4008 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
4009 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4010 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4011
4012 IEM_MC_BEGIN(0, 0, 0, 0);
4013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4014 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4015 IEM_MC_ADVANCE_RIP_AND_FINISH();
4016 } IEM_MC_ELSE() {
4017 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4018 } IEM_MC_ENDIF();
4019 IEM_MC_END();
4020}
4021
4022
4023/**
4024 * @opcode 0x7e
4025 * @opfltest zf,sf,of
4026 */
4027FNIEMOP_DEF(iemOp_jle_Jb)
4028{
4029 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
4030 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4031 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4032
4033 IEM_MC_BEGIN(0, 0, 0, 0);
4034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4035 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4036 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4037 } IEM_MC_ELSE() {
4038 IEM_MC_ADVANCE_RIP_AND_FINISH();
4039 } IEM_MC_ENDIF();
4040 IEM_MC_END();
4041}
4042
4043
4044/**
4045 * @opcode 0x7f
4046 * @opfltest zf,sf,of
4047 */
4048FNIEMOP_DEF(iemOp_jnle_Jb)
4049{
4050 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4051 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4052 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4053
4054 IEM_MC_BEGIN(0, 0, 0, 0);
4055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4056 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4057 IEM_MC_ADVANCE_RIP_AND_FINISH();
4058 } IEM_MC_ELSE() {
4059 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4060 } IEM_MC_ENDIF();
4061 IEM_MC_END();
4062}
4063
4064
4065/**
4066 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4067 * iemOp_Grp1_Eb_Ib_80.
4068 */
4069#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
4070 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4071 { \
4072 /* register target */ \
4073 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4074 IEM_MC_BEGIN(3, 0, 0, 0); \
4075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4076 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4077 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4078 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4079 \
4080 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4081 IEM_MC_REF_EFLAGS(pEFlags); \
4082 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4083 \
4084 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4085 IEM_MC_END(); \
4086 } \
4087 else \
4088 { \
4089 /* memory target */ \
4090 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4091 { \
4092 IEM_MC_BEGIN(3, 3, 0, 0); \
4093 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4094 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4096 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4097 \
4098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4099 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4100 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4101 IEMOP_HLP_DONE_DECODING(); \
4102 \
4103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4104 IEM_MC_FETCH_EFLAGS(EFlags); \
4105 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4106 \
4107 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4108 IEM_MC_COMMIT_EFLAGS(EFlags); \
4109 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4110 IEM_MC_END(); \
4111 } \
4112 else \
4113 { \
4114 (void)0
4115
4116#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
4117 IEM_MC_BEGIN(3, 3, 0, 0); \
4118 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4119 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4121 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4122 \
4123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4124 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4125 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4126 IEMOP_HLP_DONE_DECODING(); \
4127 \
4128 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4129 IEM_MC_FETCH_EFLAGS(EFlags); \
4130 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4131 \
4132 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4133 IEM_MC_COMMIT_EFLAGS(EFlags); \
4134 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4135 IEM_MC_END(); \
4136 } \
4137 } \
4138 (void)0
4139
4140#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4141 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4142 { \
4143 /* register target */ \
4144 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4145 IEM_MC_BEGIN(3, 0, 0, 0); \
4146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4147 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4148 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4149 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4150 \
4151 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4152 IEM_MC_REF_EFLAGS(pEFlags); \
4153 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4154 \
4155 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4156 IEM_MC_END(); \
4157 } \
4158 else \
4159 { \
4160 /* memory target */ \
4161 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4162 { \
4163 IEM_MC_BEGIN(3, 3, 0, 0); \
4164 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4165 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4167 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4168 \
4169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4170 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4171 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4172 IEMOP_HLP_DONE_DECODING(); \
4173 \
4174 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4175 IEM_MC_FETCH_EFLAGS(EFlags); \
4176 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4177 \
4178 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4179 IEM_MC_COMMIT_EFLAGS(EFlags); \
4180 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4181 IEM_MC_END(); \
4182 } \
4183 else \
4184 { \
4185 (void)0
4186
4187#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4188 IEMOP_HLP_DONE_DECODING(); \
4189 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4190 } \
4191 } \
4192 (void)0
4193
4194
4195
4196/**
4197 * @opmaps grp1_80,grp1_83
4198 * @opcode /0
4199 * @opflclass arithmetic
4200 */
4201FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4202{
4203 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4204 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4205 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4206}
4207
4208
4209/**
4210 * @opmaps grp1_80,grp1_83
4211 * @opcode /1
4212 * @opflclass logical
4213 */
4214FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4215{
4216 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4217 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4218 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4219}
4220
4221
4222/**
4223 * @opmaps grp1_80,grp1_83
4224 * @opcode /2
4225 * @opflclass arithmetic_carry
4226 */
4227FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4228{
4229 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4230 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4231 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4232}
4233
4234
4235/**
4236 * @opmaps grp1_80,grp1_83
4237 * @opcode /3
4238 * @opflclass arithmetic_carry
4239 */
4240FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4241{
4242 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4243 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4244 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4245}
4246
4247
4248/**
4249 * @opmaps grp1_80,grp1_83
4250 * @opcode /4
4251 * @opflclass logical
4252 */
4253FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4254{
4255 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4256 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4257 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4258}
4259
4260
4261/**
4262 * @opmaps grp1_80,grp1_83
4263 * @opcode /5
4264 * @opflclass arithmetic
4265 */
4266FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4267{
4268 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4269 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4270 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4271}
4272
4273
4274/**
4275 * @opmaps grp1_80,grp1_83
4276 * @opcode /6
4277 * @opflclass logical
4278 */
4279FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4280{
4281 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4282 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4283 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4284}
4285
4286
4287/**
4288 * @opmaps grp1_80,grp1_83
4289 * @opcode /7
4290 * @opflclass arithmetic
4291 */
4292FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4293{
4294 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4295 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4296 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4297}
4298
4299
4300/**
4301 * @opcode 0x80
4302 */
4303FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4304{
4305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4306 switch (IEM_GET_MODRM_REG_8(bRm))
4307 {
4308 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4309 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4310 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4311 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4312 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4313 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4314 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4315 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4317 }
4318}
4319
4320
4321/**
4322 * Body for a group 1 binary operator.
4323 */
4324#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4325 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4326 { \
4327 /* register target */ \
4328 switch (pVCpu->iem.s.enmEffOpSize) \
4329 { \
4330 case IEMMODE_16BIT: \
4331 { \
4332 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4333 IEM_MC_BEGIN(3, 0, 0, 0); \
4334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4335 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4336 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4337 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4338 \
4339 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4340 IEM_MC_REF_EFLAGS(pEFlags); \
4341 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4342 \
4343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4344 IEM_MC_END(); \
4345 break; \
4346 } \
4347 \
4348 case IEMMODE_32BIT: \
4349 { \
4350 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4351 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4353 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4354 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4355 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4356 \
4357 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4358 IEM_MC_REF_EFLAGS(pEFlags); \
4359 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4360 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4361 \
4362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4363 IEM_MC_END(); \
4364 break; \
4365 } \
4366 \
4367 case IEMMODE_64BIT: \
4368 { \
4369 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4370 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4372 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4373 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4374 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4375 \
4376 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4377 IEM_MC_REF_EFLAGS(pEFlags); \
4378 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4379 \
4380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4381 IEM_MC_END(); \
4382 break; \
4383 } \
4384 \
4385 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4386 } \
4387 } \
4388 else \
4389 { \
4390 /* memory target */ \
4391 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4392 { \
4393 switch (pVCpu->iem.s.enmEffOpSize) \
4394 { \
4395 case IEMMODE_16BIT: \
4396 { \
4397 IEM_MC_BEGIN(3, 3, 0, 0); \
4398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4400 \
4401 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4402 IEMOP_HLP_DONE_DECODING(); \
4403 \
4404 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4405 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4406 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4407 \
4408 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4409 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4410 IEM_MC_FETCH_EFLAGS(EFlags); \
4411 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4412 \
4413 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4414 IEM_MC_COMMIT_EFLAGS(EFlags); \
4415 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4416 IEM_MC_END(); \
4417 break; \
4418 } \
4419 \
4420 case IEMMODE_32BIT: \
4421 { \
4422 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4425 \
4426 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4427 IEMOP_HLP_DONE_DECODING(); \
4428 \
4429 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4430 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4431 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4432 \
4433 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4434 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4435 IEM_MC_FETCH_EFLAGS(EFlags); \
4436 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4437 \
4438 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4439 IEM_MC_COMMIT_EFLAGS(EFlags); \
4440 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4441 IEM_MC_END(); \
4442 break; \
4443 } \
4444 \
4445 case IEMMODE_64BIT: \
4446 { \
4447 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4448 \
4449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4451 \
4452 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4453 IEMOP_HLP_DONE_DECODING(); \
4454 \
4455 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4456 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4457 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4458 \
4459 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4460 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4461 IEM_MC_FETCH_EFLAGS(EFlags); \
4462 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4463 \
4464 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4465 IEM_MC_COMMIT_EFLAGS(EFlags); \
4466 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4467 IEM_MC_END(); \
4468 break; \
4469 } \
4470 \
4471 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4472 } \
4473 } \
4474 else \
4475 { \
4476 (void)0
4477/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4478#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4479 switch (pVCpu->iem.s.enmEffOpSize) \
4480 { \
4481 case IEMMODE_16BIT: \
4482 { \
4483 IEM_MC_BEGIN(3, 3, 0, 0); \
4484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4486 \
4487 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4488 IEMOP_HLP_DONE_DECODING(); \
4489 \
4490 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4491 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4492 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4493 \
4494 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4495 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4496 IEM_MC_FETCH_EFLAGS(EFlags); \
4497 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4498 \
4499 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4500 IEM_MC_COMMIT_EFLAGS(EFlags); \
4501 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4502 IEM_MC_END(); \
4503 break; \
4504 } \
4505 \
4506 case IEMMODE_32BIT: \
4507 { \
4508 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4511 \
4512 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4513 IEMOP_HLP_DONE_DECODING(); \
4514 \
4515 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4516 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4517 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4518 \
4519 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4520 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4521 IEM_MC_FETCH_EFLAGS(EFlags); \
4522 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4523 \
4524 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4525 IEM_MC_COMMIT_EFLAGS(EFlags); \
4526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4527 IEM_MC_END(); \
4528 break; \
4529 } \
4530 \
4531 case IEMMODE_64BIT: \
4532 { \
4533 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4536 \
4537 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4538 IEMOP_HLP_DONE_DECODING(); \
4539 \
4540 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4541 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4542 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4543 \
4544 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4545 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4546 IEM_MC_FETCH_EFLAGS(EFlags); \
4547 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4548 \
4549 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4550 IEM_MC_COMMIT_EFLAGS(EFlags); \
4551 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4552 IEM_MC_END(); \
4553 break; \
4554 } \
4555 \
4556 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4557 } \
4558 } \
4559 } \
4560 (void)0
4561
4562/* read-only version */
4563#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4564 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4565 { \
4566 /* register target */ \
4567 switch (pVCpu->iem.s.enmEffOpSize) \
4568 { \
4569 case IEMMODE_16BIT: \
4570 { \
4571 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4572 IEM_MC_BEGIN(3, 0, 0, 0); \
4573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4574 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4575 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4576 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4577 \
4578 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4579 IEM_MC_REF_EFLAGS(pEFlags); \
4580 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4581 \
4582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4583 IEM_MC_END(); \
4584 break; \
4585 } \
4586 \
4587 case IEMMODE_32BIT: \
4588 { \
4589 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4590 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4592 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4593 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4594 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4595 \
4596 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4597 IEM_MC_REF_EFLAGS(pEFlags); \
4598 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4599 \
4600 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4601 IEM_MC_END(); \
4602 break; \
4603 } \
4604 \
4605 case IEMMODE_64BIT: \
4606 { \
4607 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4608 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4610 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4611 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4612 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4613 \
4614 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4615 IEM_MC_REF_EFLAGS(pEFlags); \
4616 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4617 \
4618 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4619 IEM_MC_END(); \
4620 break; \
4621 } \
4622 \
4623 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4624 } \
4625 } \
4626 else \
4627 { \
4628 /* memory target */ \
4629 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4630 { \
4631 switch (pVCpu->iem.s.enmEffOpSize) \
4632 { \
4633 case IEMMODE_16BIT: \
4634 { \
4635 IEM_MC_BEGIN(3, 3, 0, 0); \
4636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4638 \
4639 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4640 IEMOP_HLP_DONE_DECODING(); \
4641 \
4642 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4643 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4644 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4645 \
4646 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4647 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4648 IEM_MC_FETCH_EFLAGS(EFlags); \
4649 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4650 \
4651 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4652 IEM_MC_COMMIT_EFLAGS(EFlags); \
4653 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4654 IEM_MC_END(); \
4655 break; \
4656 } \
4657 \
4658 case IEMMODE_32BIT: \
4659 { \
4660 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4663 \
4664 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4665 IEMOP_HLP_DONE_DECODING(); \
4666 \
4667 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4668 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4669 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4670 \
4671 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4672 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4673 IEM_MC_FETCH_EFLAGS(EFlags); \
4674 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4675 \
4676 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4677 IEM_MC_COMMIT_EFLAGS(EFlags); \
4678 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4679 IEM_MC_END(); \
4680 break; \
4681 } \
4682 \
4683 case IEMMODE_64BIT: \
4684 { \
4685 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4688 \
4689 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4690 IEMOP_HLP_DONE_DECODING(); \
4691 \
4692 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4693 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4694 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4695 \
4696 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4697 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4698 IEM_MC_FETCH_EFLAGS(EFlags); \
4699 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4700 \
4701 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4702 IEM_MC_COMMIT_EFLAGS(EFlags); \
4703 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4704 IEM_MC_END(); \
4705 break; \
4706 } \
4707 \
4708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4709 } \
4710 } \
4711 else \
4712 { \
4713 IEMOP_HLP_DONE_DECODING(); \
4714 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4715 } \
4716 } \
4717 (void)0
4718
4719
4720/**
4721 * @opmaps grp1_81
4722 * @opcode /0
4723 * @opflclass arithmetic
4724 */
4725FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4726{
4727 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4728 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4729 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4730}
4731
4732
4733/**
4734 * @opmaps grp1_81
4735 * @opcode /1
4736 * @opflclass logical
4737 */
4738FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4739{
4740 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4741 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4742 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4743}
4744
4745
4746/**
4747 * @opmaps grp1_81
4748 * @opcode /2
4749 * @opflclass arithmetic_carry
4750 */
4751FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4752{
4753 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4754 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4755 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4756}
4757
4758
4759/**
4760 * @opmaps grp1_81
4761 * @opcode /3
4762 * @opflclass arithmetic_carry
4763 */
4764FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4765{
4766 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4767 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4768 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4769}
4770
4771
4772/**
4773 * @opmaps grp1_81
4774 * @opcode /4
4775 * @opflclass logical
4776 */
4777FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4778{
4779 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4780 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4781 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4782}
4783
4784
4785/**
4786 * @opmaps grp1_81
4787 * @opcode /5
4788 * @opflclass arithmetic
4789 */
4790FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4791{
4792 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4793 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4794 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4795}
4796
4797
4798/**
4799 * @opmaps grp1_81
4800 * @opcode /6
4801 * @opflclass logical
4802 */
4803FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4804{
4805 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4806 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4807 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4808}
4809
4810
4811/**
4812 * @opmaps grp1_81
4813 * @opcode /7
4814 * @opflclass arithmetic
4815 */
4816FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4817{
4818 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4819 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4820}
4821
4822
4823/**
4824 * @opcode 0x81
4825 */
4826FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4827{
4828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4829 switch (IEM_GET_MODRM_REG_8(bRm))
4830 {
4831 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4832 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4833 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4834 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4835 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4836 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4837 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4838 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4840 }
4841}
4842
4843
4844/**
4845 * @opcode 0x82
4846 * @opmnemonic grp1_82
4847 * @opgroup og_groups
4848 */
4849FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4850{
4851 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4852 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4853}
4854
4855
4856/**
4857 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4858 * iemOp_Grp1_Ev_Ib.
4859 */
4860#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4861 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4862 { \
4863 /* \
4864 * Register target \
4865 */ \
4866 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4867 switch (pVCpu->iem.s.enmEffOpSize) \
4868 { \
4869 case IEMMODE_16BIT: \
4870 IEM_MC_BEGIN(3, 0, 0, 0); \
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4872 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4873 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4874 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4875 \
4876 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4877 IEM_MC_REF_EFLAGS(pEFlags); \
4878 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4879 \
4880 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4881 IEM_MC_END(); \
4882 break; \
4883 \
4884 case IEMMODE_32BIT: \
4885 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4887 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4888 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4889 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4890 \
4891 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4892 IEM_MC_REF_EFLAGS(pEFlags); \
4893 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4894 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4895 \
4896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4897 IEM_MC_END(); \
4898 break; \
4899 \
4900 case IEMMODE_64BIT: \
4901 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4903 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4904 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4905 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4906 \
4907 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4908 IEM_MC_REF_EFLAGS(pEFlags); \
4909 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4910 \
4911 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4912 IEM_MC_END(); \
4913 break; \
4914 \
4915 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4916 } \
4917 } \
4918 else \
4919 { \
4920 /* \
4921 * Memory target. \
4922 */ \
4923 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4924 { \
4925 switch (pVCpu->iem.s.enmEffOpSize) \
4926 { \
4927 case IEMMODE_16BIT: \
4928 IEM_MC_BEGIN(3, 3, 0, 0); \
4929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4931 \
4932 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4933 IEMOP_HLP_DONE_DECODING(); \
4934 \
4935 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4936 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4937 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4938 \
4939 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4940 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4941 IEM_MC_FETCH_EFLAGS(EFlags); \
4942 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4943 \
4944 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4945 IEM_MC_COMMIT_EFLAGS(EFlags); \
4946 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4947 IEM_MC_END(); \
4948 break; \
4949 \
4950 case IEMMODE_32BIT: \
4951 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4954 \
4955 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4956 IEMOP_HLP_DONE_DECODING(); \
4957 \
4958 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4959 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4960 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4961 \
4962 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4963 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4964 IEM_MC_FETCH_EFLAGS(EFlags); \
4965 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4966 \
4967 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4968 IEM_MC_COMMIT_EFLAGS(EFlags); \
4969 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4970 IEM_MC_END(); \
4971 break; \
4972 \
4973 case IEMMODE_64BIT: \
4974 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4977 \
4978 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4979 IEMOP_HLP_DONE_DECODING(); \
4980 \
4981 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4982 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4983 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4984 \
4985 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4986 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4987 IEM_MC_FETCH_EFLAGS(EFlags); \
4988 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4989 \
4990 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4991 IEM_MC_COMMIT_EFLAGS(EFlags); \
4992 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4993 IEM_MC_END(); \
4994 break; \
4995 \
4996 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4997 } \
4998 } \
4999 else \
5000 { \
5001 (void)0
5002/* Separate macro to work around parsing issue in IEMAllInstPython.py */
5003#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
5004 switch (pVCpu->iem.s.enmEffOpSize) \
5005 { \
5006 case IEMMODE_16BIT: \
5007 IEM_MC_BEGIN(3, 3, 0, 0); \
5008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5010 \
5011 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5012 IEMOP_HLP_DONE_DECODING(); \
5013 \
5014 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5015 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5016 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5017 \
5018 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5020 IEM_MC_FETCH_EFLAGS(EFlags); \
5021 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
5022 \
5023 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5024 IEM_MC_COMMIT_EFLAGS(EFlags); \
5025 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5026 IEM_MC_END(); \
5027 break; \
5028 \
5029 case IEMMODE_32BIT: \
5030 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5033 \
5034 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5035 IEMOP_HLP_DONE_DECODING(); \
5036 \
5037 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5038 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5039 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5040 \
5041 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5042 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5043 IEM_MC_FETCH_EFLAGS(EFlags); \
5044 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
5045 \
5046 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5047 IEM_MC_COMMIT_EFLAGS(EFlags); \
5048 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5049 IEM_MC_END(); \
5050 break; \
5051 \
5052 case IEMMODE_64BIT: \
5053 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5056 \
5057 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5058 IEMOP_HLP_DONE_DECODING(); \
5059 \
5060 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5061 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5062 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5063 \
5064 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5065 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5066 IEM_MC_FETCH_EFLAGS(EFlags); \
5067 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
5068 \
5069 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5070 IEM_MC_COMMIT_EFLAGS(EFlags); \
5071 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5072 IEM_MC_END(); \
5073 break; \
5074 \
5075 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5076 } \
5077 } \
5078 } \
5079 (void)0
5080
5081/* read-only variant */
5082#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
5083 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5084 { \
5085 /* \
5086 * Register target \
5087 */ \
5088 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5089 switch (pVCpu->iem.s.enmEffOpSize) \
5090 { \
5091 case IEMMODE_16BIT: \
5092 IEM_MC_BEGIN(3, 0, 0, 0); \
5093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5094 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5095 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5096 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5097 \
5098 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5099 IEM_MC_REF_EFLAGS(pEFlags); \
5100 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5101 \
5102 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5103 IEM_MC_END(); \
5104 break; \
5105 \
5106 case IEMMODE_32BIT: \
5107 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
5108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5109 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5110 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5111 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5112 \
5113 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5114 IEM_MC_REF_EFLAGS(pEFlags); \
5115 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5116 \
5117 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5118 IEM_MC_END(); \
5119 break; \
5120 \
5121 case IEMMODE_64BIT: \
5122 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
5123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5124 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5125 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5126 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5127 \
5128 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5129 IEM_MC_REF_EFLAGS(pEFlags); \
5130 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5131 \
5132 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5133 IEM_MC_END(); \
5134 break; \
5135 \
5136 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5137 } \
5138 } \
5139 else \
5140 { \
5141 /* \
5142 * Memory target. \
5143 */ \
5144 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5145 { \
5146 switch (pVCpu->iem.s.enmEffOpSize) \
5147 { \
5148 case IEMMODE_16BIT: \
5149 IEM_MC_BEGIN(3, 3, 0, 0); \
5150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5152 \
5153 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5154 IEMOP_HLP_DONE_DECODING(); \
5155 \
5156 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5157 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5158 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5159 \
5160 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5161 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5162 IEM_MC_FETCH_EFLAGS(EFlags); \
5163 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5164 \
5165 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5166 IEM_MC_COMMIT_EFLAGS(EFlags); \
5167 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5168 IEM_MC_END(); \
5169 break; \
5170 \
5171 case IEMMODE_32BIT: \
5172 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5175 \
5176 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5177 IEMOP_HLP_DONE_DECODING(); \
5178 \
5179 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5180 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5181 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5182 \
5183 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5184 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5185 IEM_MC_FETCH_EFLAGS(EFlags); \
5186 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5187 \
5188 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5189 IEM_MC_COMMIT_EFLAGS(EFlags); \
5190 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5191 IEM_MC_END(); \
5192 break; \
5193 \
5194 case IEMMODE_64BIT: \
5195 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5198 \
5199 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5200 IEMOP_HLP_DONE_DECODING(); \
5201 \
5202 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5203 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5204 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5205 \
5206 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5207 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5208 IEM_MC_FETCH_EFLAGS(EFlags); \
5209 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5210 \
5211 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5212 IEM_MC_COMMIT_EFLAGS(EFlags); \
5213 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5214 IEM_MC_END(); \
5215 break; \
5216 \
5217 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5218 } \
5219 } \
5220 else \
5221 { \
5222 IEMOP_HLP_DONE_DECODING(); \
5223 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5224 } \
5225 } \
5226 (void)0
5227
5228/**
5229 * @opmaps grp1_83
5230 * @opcode /0
5231 * @opflclass arithmetic
5232 */
5233FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5234{
5235 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5236 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5237 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5238}
5239
5240
5241/**
5242 * @opmaps grp1_83
5243 * @opcode /1
5244 * @opflclass logical
5245 */
5246FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5247{
5248 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5249 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5250 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5251}
5252
5253
5254/**
5255 * @opmaps grp1_83
5256 * @opcode /2
5257 * @opflclass arithmetic_carry
5258 */
5259FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5260{
5261 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5262 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5263 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5264}
5265
5266
5267/**
5268 * @opmaps grp1_83
5269 * @opcode /3
5270 * @opflclass arithmetic_carry
5271 */
5272FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5273{
5274 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5275 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5276 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5277}
5278
5279
5280/**
5281 * @opmaps grp1_83
5282 * @opcode /4
5283 * @opflclass logical
5284 */
5285FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5286{
5287 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5288 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5289 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5290}
5291
5292
5293/**
5294 * @opmaps grp1_83
5295 * @opcode /5
5296 * @opflclass arithmetic
5297 */
5298FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5299{
5300 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5301 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5302 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5303}
5304
5305
5306/**
5307 * @opmaps grp1_83
5308 * @opcode /6
5309 * @opflclass logical
5310 */
5311FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5312{
5313 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5314 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5315 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5316}
5317
5318
5319/**
5320 * @opmaps grp1_83
5321 * @opcode /7
5322 * @opflclass arithmetic
5323 */
5324FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5325{
5326 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5327 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5328}
5329
5330
5331/**
5332 * @opcode 0x83
5333 */
5334FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5335{
5336 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5337 to the 386 even if absent in the intel reference manuals and some
5338 3rd party opcode listings. */
5339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5340 switch (IEM_GET_MODRM_REG_8(bRm))
5341 {
5342 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5343 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5344 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5345 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5346 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5347 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5348 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5349 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5351 }
5352}
5353
5354
5355/**
5356 * @opcode 0x84
5357 * @opflclass logical
5358 */
5359FNIEMOP_DEF(iemOp_test_Eb_Gb)
5360{
5361 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5362 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5363 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8, test, RT_ARCH_VAL_AMD64);
5364}
5365
5366
5367/**
5368 * @opcode 0x85
5369 * @opflclass logical
5370 */
5371FNIEMOP_DEF(iemOp_test_Ev_Gv)
5372{
5373 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5375 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, test, RT_ARCH_VAL_AMD64);
5376}
5377
5378
5379/**
5380 * @opcode 0x86
5381 */
5382FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5383{
5384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5385 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5386
5387 /*
5388 * If rm is denoting a register, no more instruction bytes.
5389 */
5390 if (IEM_IS_MODRM_REG_MODE(bRm))
5391 {
5392 IEM_MC_BEGIN(0, 2, 0, 0);
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_LOCAL(uint8_t, uTmp1);
5395 IEM_MC_LOCAL(uint8_t, uTmp2);
5396
5397 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5398 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5399 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5400 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5401
5402 IEM_MC_ADVANCE_RIP_AND_FINISH();
5403 IEM_MC_END();
5404 }
5405 else
5406 {
5407 /*
5408 * We're accessing memory.
5409 */
5410#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5411 IEM_MC_BEGIN(2, 4, 0, 0); \
5412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5413 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5414 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5415 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5416 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5417 \
5418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5419 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5420 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5421 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5422 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5423 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5424 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5425 \
5426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5427 IEM_MC_END()
5428
5429 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5430 {
5431 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5432 }
5433 else
5434 {
5435 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5436 }
5437 }
5438}
5439
5440
5441/**
5442 * @opcode 0x87
5443 */
5444FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5445{
5446 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5448
5449 /*
5450 * If rm is denoting a register, no more instruction bytes.
5451 */
5452 if (IEM_IS_MODRM_REG_MODE(bRm))
5453 {
5454 switch (pVCpu->iem.s.enmEffOpSize)
5455 {
5456 case IEMMODE_16BIT:
5457 IEM_MC_BEGIN(0, 2, 0, 0);
5458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5459 IEM_MC_LOCAL(uint16_t, uTmp1);
5460 IEM_MC_LOCAL(uint16_t, uTmp2);
5461
5462 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5463 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5464 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5465 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5466
5467 IEM_MC_ADVANCE_RIP_AND_FINISH();
5468 IEM_MC_END();
5469 break;
5470
5471 case IEMMODE_32BIT:
5472 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5474 IEM_MC_LOCAL(uint32_t, uTmp1);
5475 IEM_MC_LOCAL(uint32_t, uTmp2);
5476
5477 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5478 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5479 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5480 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5481
5482 IEM_MC_ADVANCE_RIP_AND_FINISH();
5483 IEM_MC_END();
5484 break;
5485
5486 case IEMMODE_64BIT:
5487 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5489 IEM_MC_LOCAL(uint64_t, uTmp1);
5490 IEM_MC_LOCAL(uint64_t, uTmp2);
5491
5492 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5493 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5494 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5495 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5496
5497 IEM_MC_ADVANCE_RIP_AND_FINISH();
5498 IEM_MC_END();
5499 break;
5500
5501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5502 }
5503 }
5504 else
5505 {
5506 /*
5507 * We're accessing memory.
5508 */
5509#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5510 do { \
5511 switch (pVCpu->iem.s.enmEffOpSize) \
5512 { \
5513 case IEMMODE_16BIT: \
5514 IEM_MC_BEGIN(2, 4, 0, 0); \
5515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5516 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5517 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5518 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5519 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5520 \
5521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5522 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5523 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5524 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5525 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5526 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5527 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5528 \
5529 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5530 IEM_MC_END(); \
5531 break; \
5532 \
5533 case IEMMODE_32BIT: \
5534 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5536 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5537 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5538 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5539 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5540 \
5541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5542 IEMOP_HLP_DONE_DECODING(); \
5543 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5544 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5545 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5546 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5547 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5548 \
5549 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5550 IEM_MC_END(); \
5551 break; \
5552 \
5553 case IEMMODE_64BIT: \
5554 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5556 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5557 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5558 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5559 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5560 \
5561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5562 IEMOP_HLP_DONE_DECODING(); \
5563 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5564 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5565 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5566 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5567 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5568 \
5569 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5570 IEM_MC_END(); \
5571 break; \
5572 \
5573 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5574 } \
5575 } while (0)
5576 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5577 {
5578 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5579 }
5580 else
5581 {
5582 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5583 }
5584 }
5585}
5586
5587
5588/**
5589 * @opcode 0x88
5590 */
5591FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5592{
5593 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5594
5595 uint8_t bRm;
5596 IEM_OPCODE_GET_NEXT_U8(&bRm);
5597
5598 /*
5599 * If rm is denoting a register, no more instruction bytes.
5600 */
5601 if (IEM_IS_MODRM_REG_MODE(bRm))
5602 {
5603 IEM_MC_BEGIN(0, 1, 0, 0);
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605 IEM_MC_LOCAL(uint8_t, u8Value);
5606 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5607 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5608 IEM_MC_ADVANCE_RIP_AND_FINISH();
5609 IEM_MC_END();
5610 }
5611 else
5612 {
5613 /*
5614 * We're writing a register to memory.
5615 */
5616 IEM_MC_BEGIN(0, 2, 0, 0);
5617 IEM_MC_LOCAL(uint8_t, u8Value);
5618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5621 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5622 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5623 IEM_MC_ADVANCE_RIP_AND_FINISH();
5624 IEM_MC_END();
5625 }
5626}
5627
5628
5629/**
5630 * @opcode 0x89
5631 */
5632FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5633{
5634 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5635
5636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5637
5638 /*
5639 * If rm is denoting a register, no more instruction bytes.
5640 */
5641 if (IEM_IS_MODRM_REG_MODE(bRm))
5642 {
5643 switch (pVCpu->iem.s.enmEffOpSize)
5644 {
5645 case IEMMODE_16BIT:
5646 IEM_MC_BEGIN(0, 1, 0, 0);
5647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5648 IEM_MC_LOCAL(uint16_t, u16Value);
5649 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5650 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5651 IEM_MC_ADVANCE_RIP_AND_FINISH();
5652 IEM_MC_END();
5653 break;
5654
5655 case IEMMODE_32BIT:
5656 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658 IEM_MC_LOCAL(uint32_t, u32Value);
5659 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5660 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5661 IEM_MC_ADVANCE_RIP_AND_FINISH();
5662 IEM_MC_END();
5663 break;
5664
5665 case IEMMODE_64BIT:
5666 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5668 IEM_MC_LOCAL(uint64_t, u64Value);
5669 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5670 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5671 IEM_MC_ADVANCE_RIP_AND_FINISH();
5672 IEM_MC_END();
5673 break;
5674
5675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5676 }
5677 }
5678 else
5679 {
5680 /*
5681 * We're writing a register to memory.
5682 */
5683 switch (pVCpu->iem.s.enmEffOpSize)
5684 {
5685 case IEMMODE_16BIT:
5686 IEM_MC_BEGIN(0, 2, 0, 0);
5687 IEM_MC_LOCAL(uint16_t, u16Value);
5688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5691 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5692 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5693 IEM_MC_ADVANCE_RIP_AND_FINISH();
5694 IEM_MC_END();
5695 break;
5696
5697 case IEMMODE_32BIT:
5698 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5699 IEM_MC_LOCAL(uint32_t, u32Value);
5700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5703 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5704 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5705 IEM_MC_ADVANCE_RIP_AND_FINISH();
5706 IEM_MC_END();
5707 break;
5708
5709 case IEMMODE_64BIT:
5710 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5711 IEM_MC_LOCAL(uint64_t, u64Value);
5712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5715 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5716 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5717 IEM_MC_ADVANCE_RIP_AND_FINISH();
5718 IEM_MC_END();
5719 break;
5720
5721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5722 }
5723 }
5724}
5725
5726
5727/**
5728 * @opcode 0x8a
5729 */
5730FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5731{
5732 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5733
5734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5735
5736 /*
5737 * If rm is denoting a register, no more instruction bytes.
5738 */
5739 if (IEM_IS_MODRM_REG_MODE(bRm))
5740 {
5741 IEM_MC_BEGIN(0, 1, 0, 0);
5742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5743 IEM_MC_LOCAL(uint8_t, u8Value);
5744 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5745 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5746 IEM_MC_ADVANCE_RIP_AND_FINISH();
5747 IEM_MC_END();
5748 }
5749 else
5750 {
5751 /*
5752 * We're loading a register from memory.
5753 */
5754 IEM_MC_BEGIN(0, 2, 0, 0);
5755 IEM_MC_LOCAL(uint8_t, u8Value);
5756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5759 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5760 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5761 IEM_MC_ADVANCE_RIP_AND_FINISH();
5762 IEM_MC_END();
5763 }
5764}
5765
5766
5767/**
5768 * @opcode 0x8b
5769 */
5770FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5771{
5772 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5773
5774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5775
5776 /*
5777 * If rm is denoting a register, no more instruction bytes.
5778 */
5779 if (IEM_IS_MODRM_REG_MODE(bRm))
5780 {
5781 switch (pVCpu->iem.s.enmEffOpSize)
5782 {
5783 case IEMMODE_16BIT:
5784 IEM_MC_BEGIN(0, 1, 0, 0);
5785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5786 IEM_MC_LOCAL(uint16_t, u16Value);
5787 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5788 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5789 IEM_MC_ADVANCE_RIP_AND_FINISH();
5790 IEM_MC_END();
5791 break;
5792
5793 case IEMMODE_32BIT:
5794 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5796 IEM_MC_LOCAL(uint32_t, u32Value);
5797 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5798 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5799 IEM_MC_ADVANCE_RIP_AND_FINISH();
5800 IEM_MC_END();
5801 break;
5802
5803 case IEMMODE_64BIT:
5804 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5806 IEM_MC_LOCAL(uint64_t, u64Value);
5807 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5808 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5809 IEM_MC_ADVANCE_RIP_AND_FINISH();
5810 IEM_MC_END();
5811 break;
5812
5813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5814 }
5815 }
5816 else
5817 {
5818 /*
5819 * We're loading a register from memory.
5820 */
5821 switch (pVCpu->iem.s.enmEffOpSize)
5822 {
5823 case IEMMODE_16BIT:
5824 IEM_MC_BEGIN(0, 2, 0, 0);
5825 IEM_MC_LOCAL(uint16_t, u16Value);
5826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5829 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5830 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5831 IEM_MC_ADVANCE_RIP_AND_FINISH();
5832 IEM_MC_END();
5833 break;
5834
5835 case IEMMODE_32BIT:
5836 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5837 IEM_MC_LOCAL(uint32_t, u32Value);
5838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5841 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5842 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5843 IEM_MC_ADVANCE_RIP_AND_FINISH();
5844 IEM_MC_END();
5845 break;
5846
5847 case IEMMODE_64BIT:
5848 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5849 IEM_MC_LOCAL(uint64_t, u64Value);
5850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5853 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5854 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5855 IEM_MC_ADVANCE_RIP_AND_FINISH();
5856 IEM_MC_END();
5857 break;
5858
5859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5860 }
5861 }
5862}
5863
5864
5865/**
5866 * opcode 0x63
5867 * @todo Table fixme
5868 */
5869FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5870{
5871 if (!IEM_IS_64BIT_CODE(pVCpu))
5872 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5873 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5874 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5875 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5876}
5877
5878
5879/**
5880 * @opcode 0x8c
5881 */
5882FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5883{
5884 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5885
5886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5887
5888 /*
5889 * Check that the destination register exists. The REX.R prefix is ignored.
5890 */
5891 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5892 if (iSegReg > X86_SREG_GS)
5893 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5894
5895 /*
5896 * If rm is denoting a register, no more instruction bytes.
5897 * In that case, the operand size is respected and the upper bits are
5898 * cleared (starting with some pentium).
5899 */
5900 if (IEM_IS_MODRM_REG_MODE(bRm))
5901 {
5902 switch (pVCpu->iem.s.enmEffOpSize)
5903 {
5904 case IEMMODE_16BIT:
5905 IEM_MC_BEGIN(0, 1, 0, 0);
5906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5907 IEM_MC_LOCAL(uint16_t, u16Value);
5908 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5909 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5910 IEM_MC_ADVANCE_RIP_AND_FINISH();
5911 IEM_MC_END();
5912 break;
5913
5914 case IEMMODE_32BIT:
5915 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5917 IEM_MC_LOCAL(uint32_t, u32Value);
5918 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5919 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5920 IEM_MC_ADVANCE_RIP_AND_FINISH();
5921 IEM_MC_END();
5922 break;
5923
5924 case IEMMODE_64BIT:
5925 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5927 IEM_MC_LOCAL(uint64_t, u64Value);
5928 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5929 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5930 IEM_MC_ADVANCE_RIP_AND_FINISH();
5931 IEM_MC_END();
5932 break;
5933
5934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5935 }
5936 }
5937 else
5938 {
5939 /*
5940 * We're saving the register to memory. The access is word sized
5941 * regardless of operand size prefixes.
5942 */
5943#if 0 /* not necessary */
5944 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5945#endif
5946 IEM_MC_BEGIN(0, 2, 0, 0);
5947 IEM_MC_LOCAL(uint16_t, u16Value);
5948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5951 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5952 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5953 IEM_MC_ADVANCE_RIP_AND_FINISH();
5954 IEM_MC_END();
5955 }
5956}
5957
5958
5959
5960
5961/**
5962 * @opcode 0x8d
5963 */
5964FNIEMOP_DEF(iemOp_lea_Gv_M)
5965{
5966 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5968 if (IEM_IS_MODRM_REG_MODE(bRm))
5969 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5970
5971 switch (pVCpu->iem.s.enmEffOpSize)
5972 {
5973 case IEMMODE_16BIT:
5974 IEM_MC_BEGIN(0, 2, 0, 0);
5975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5978 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5979 * operand-size, which is usually the case. It'll save an instruction
5980 * and a register. */
5981 IEM_MC_LOCAL(uint16_t, u16Cast);
5982 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5983 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5984 IEM_MC_ADVANCE_RIP_AND_FINISH();
5985 IEM_MC_END();
5986 break;
5987
5988 case IEMMODE_32BIT:
5989 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5993 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5994 * operand-size, which is usually the case. It'll save an instruction
5995 * and a register. */
5996 IEM_MC_LOCAL(uint32_t, u32Cast);
5997 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5998 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5999 IEM_MC_ADVANCE_RIP_AND_FINISH();
6000 IEM_MC_END();
6001 break;
6002
6003 case IEMMODE_64BIT:
6004 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6008 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
6009 IEM_MC_ADVANCE_RIP_AND_FINISH();
6010 IEM_MC_END();
6011 break;
6012
6013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6014 }
6015}
6016
6017
6018/**
6019 * @opcode 0x8e
6020 */
6021FNIEMOP_DEF(iemOp_mov_Sw_Ev)
6022{
6023 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
6024
6025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6026
6027 /*
6028 * The practical operand size is 16-bit.
6029 */
6030#if 0 /* not necessary */
6031 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
6032#endif
6033
6034 /*
6035 * Check that the destination register exists and can be used with this
6036 * instruction. The REX.R prefix is ignored.
6037 */
6038 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6039 /** @todo r=bird: What does 8086 do here wrt CS? */
6040 if ( iSegReg == X86_SREG_CS
6041 || iSegReg > X86_SREG_GS)
6042 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6043
6044 /*
6045 * If rm is denoting a register, no more instruction bytes.
6046 *
6047 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6048 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6049 * register. This is a restriction of the current recompiler
6050 * approach.
6051 */
6052 if (IEM_IS_MODRM_REG_MODE(bRm))
6053 {
6054#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6055 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
6056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6057 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6058 IEM_MC_ARG(uint16_t, u16Value, 1); \
6059 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6060 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6061 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6062 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6063 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6064 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6065 iemCImpl_load_SReg, iSRegArg, u16Value); \
6066 IEM_MC_END()
6067
6068 if (iSegReg == X86_SREG_SS)
6069 {
6070 if (IEM_IS_32BIT_CODE(pVCpu))
6071 {
6072 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6073 }
6074 else
6075 {
6076 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6077 }
6078 }
6079 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6080 {
6081 IEMOP_MOV_SW_EV_REG_BODY(0);
6082 }
6083 else
6084 {
6085 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6086 }
6087#undef IEMOP_MOV_SW_EV_REG_BODY
6088 }
6089 else
6090 {
6091 /*
6092 * We're loading the register from memory. The access is word sized
6093 * regardless of operand size prefixes.
6094 */
6095#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6096 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
6097 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6098 IEM_MC_ARG(uint16_t, u16Value, 1); \
6099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6102 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6103 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6104 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6105 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6106 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6107 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6108 iemCImpl_load_SReg, iSRegArg, u16Value); \
6109 IEM_MC_END()
6110
6111 if (iSegReg == X86_SREG_SS)
6112 {
6113 if (IEM_IS_32BIT_CODE(pVCpu))
6114 {
6115 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6116 }
6117 else
6118 {
6119 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6120 }
6121 }
6122 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6123 {
6124 IEMOP_MOV_SW_EV_MEM_BODY(0);
6125 }
6126 else
6127 {
6128 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6129 }
6130#undef IEMOP_MOV_SW_EV_MEM_BODY
6131 }
6132}
6133
6134
6135/** Opcode 0x8f /0. */
6136FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6137{
6138 /* This bugger is rather annoying as it requires rSP to be updated before
6139 doing the effective address calculations. Will eventually require a
6140 split between the R/M+SIB decoding and the effective address
6141 calculation - which is something that is required for any attempt at
6142 reusing this code for a recompiler. It may also be good to have if we
6143 need to delay #UD exception caused by invalid lock prefixes.
6144
6145 For now, we'll do a mostly safe interpreter-only implementation here. */
6146 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6147 * now until tests show it's checked.. */
6148 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6149
6150 /* Register access is relatively easy and can share code. */
6151 if (IEM_IS_MODRM_REG_MODE(bRm))
6152 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6153
6154 /*
6155 * Memory target.
6156 *
6157 * Intel says that RSP is incremented before it's used in any effective
6158 * address calcuations. This means some serious extra annoyance here since
6159 * we decode and calculate the effective address in one step and like to
6160 * delay committing registers till everything is done.
6161 *
6162 * So, we'll decode and calculate the effective address twice. This will
6163 * require some recoding if turned into a recompiler.
6164 */
6165 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6166
6167#if 1 /* This can be compiled, optimize later if needed. */
6168 switch (pVCpu->iem.s.enmEffOpSize)
6169 {
6170 case IEMMODE_16BIT:
6171 IEM_MC_BEGIN(2, 0, 0, 0);
6172 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6175 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6176 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6177 IEM_MC_END();
6178 break;
6179
6180 case IEMMODE_32BIT:
6181 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6182 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6185 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6186 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6187 IEM_MC_END();
6188 break;
6189
6190 case IEMMODE_64BIT:
6191 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6192 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6195 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6196 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6197 IEM_MC_END();
6198 break;
6199
6200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6201 }
6202
6203#else
6204# ifndef TST_IEM_CHECK_MC
6205 /* Calc effective address with modified ESP. */
6206/** @todo testcase */
6207 RTGCPTR GCPtrEff;
6208 VBOXSTRICTRC rcStrict;
6209 switch (pVCpu->iem.s.enmEffOpSize)
6210 {
6211 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6212 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6213 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6215 }
6216 if (rcStrict != VINF_SUCCESS)
6217 return rcStrict;
6218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6219
6220 /* Perform the operation - this should be CImpl. */
6221 RTUINT64U TmpRsp;
6222 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6223 switch (pVCpu->iem.s.enmEffOpSize)
6224 {
6225 case IEMMODE_16BIT:
6226 {
6227 uint16_t u16Value;
6228 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6229 if (rcStrict == VINF_SUCCESS)
6230 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6231 break;
6232 }
6233
6234 case IEMMODE_32BIT:
6235 {
6236 uint32_t u32Value;
6237 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6238 if (rcStrict == VINF_SUCCESS)
6239 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6240 break;
6241 }
6242
6243 case IEMMODE_64BIT:
6244 {
6245 uint64_t u64Value;
6246 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6247 if (rcStrict == VINF_SUCCESS)
6248 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6249 break;
6250 }
6251
6252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6253 }
6254 if (rcStrict == VINF_SUCCESS)
6255 {
6256 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6257 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6258 }
6259 return rcStrict;
6260
6261# else
6262 return VERR_IEM_IPE_2;
6263# endif
6264#endif
6265}
6266
6267
6268/**
6269 * @opcode 0x8f
6270 */
6271FNIEMOP_DEF(iemOp_Grp1A__xop)
6272{
6273 /*
6274 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6275 * three byte VEX prefix, except that the mmmmm field cannot have the values
6276 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6277 */
6278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6279 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6280 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6281
6282 IEMOP_MNEMONIC(xop, "xop");
6283 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6284 {
6285 /** @todo Test when exctly the XOP conformance checks kick in during
6286 * instruction decoding and fetching (using \#PF). */
6287 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6288 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6289 if ( ( pVCpu->iem.s.fPrefixes
6290 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6291 == 0)
6292 {
6293 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6294 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6295 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6296 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6297 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6298 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6299 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6300 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6301 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6302
6303 /** @todo XOP: Just use new tables and decoders. */
6304 switch (bRm & 0x1f)
6305 {
6306 case 8: /* xop opcode map 8. */
6307 IEMOP_BITCH_ABOUT_STUB();
6308 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6309
6310 case 9: /* xop opcode map 9. */
6311 IEMOP_BITCH_ABOUT_STUB();
6312 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6313
6314 case 10: /* xop opcode map 10. */
6315 IEMOP_BITCH_ABOUT_STUB();
6316 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6317
6318 default:
6319 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6320 IEMOP_RAISE_INVALID_OPCODE_RET();
6321 }
6322 }
6323 else
6324 Log(("XOP: Invalid prefix mix!\n"));
6325 }
6326 else
6327 Log(("XOP: XOP support disabled!\n"));
6328 IEMOP_RAISE_INVALID_OPCODE_RET();
6329}
6330
6331
6332/**
6333 * Common 'xchg reg,rAX' helper.
6334 */
6335FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6336{
6337 iReg |= pVCpu->iem.s.uRexB;
6338 switch (pVCpu->iem.s.enmEffOpSize)
6339 {
6340 case IEMMODE_16BIT:
6341 IEM_MC_BEGIN(0, 2, 0, 0);
6342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6343 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6344 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6345 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6346 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6347 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6348 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6349 IEM_MC_ADVANCE_RIP_AND_FINISH();
6350 IEM_MC_END();
6351 break;
6352
6353 case IEMMODE_32BIT:
6354 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6356 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6357 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6358 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6359 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6360 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6361 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6362 IEM_MC_ADVANCE_RIP_AND_FINISH();
6363 IEM_MC_END();
6364 break;
6365
6366 case IEMMODE_64BIT:
6367 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6369 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6370 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6371 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6372 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6373 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6374 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6375 IEM_MC_ADVANCE_RIP_AND_FINISH();
6376 IEM_MC_END();
6377 break;
6378
6379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6380 }
6381}
6382
6383
6384/**
6385 * @opcode 0x90
6386 */
6387FNIEMOP_DEF(iemOp_nop)
6388{
6389 /* R8/R8D and RAX/EAX can be exchanged. */
6390 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6391 {
6392 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6393 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6394 }
6395
6396 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6397 {
6398 IEMOP_MNEMONIC(pause, "pause");
6399 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6400 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6401 if (!IEM_IS_IN_GUEST(pVCpu))
6402 { /* probable */ }
6403#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6404 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6405 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6406#endif
6407#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6408 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6409 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6410#endif
6411 }
6412 else
6413 IEMOP_MNEMONIC(nop, "nop");
6414 /** @todo testcase: lock nop; lock pause */
6415 IEM_MC_BEGIN(0, 0, 0, 0);
6416 IEMOP_HLP_DONE_DECODING();
6417 IEM_MC_ADVANCE_RIP_AND_FINISH();
6418 IEM_MC_END();
6419}
6420
6421
6422/**
6423 * @opcode 0x91
6424 */
6425FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6426{
6427 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6428 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6429}
6430
6431
6432/**
6433 * @opcode 0x92
6434 */
6435FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6436{
6437 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6438 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6439}
6440
6441
6442/**
6443 * @opcode 0x93
6444 */
6445FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6446{
6447 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6448 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6449}
6450
6451
6452/**
6453 * @opcode 0x94
6454 */
6455FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6456{
6457 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6458 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6459}
6460
6461
6462/**
6463 * @opcode 0x95
6464 */
6465FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6466{
6467 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6468 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6469}
6470
6471
6472/**
6473 * @opcode 0x96
6474 */
6475FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6476{
6477 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6478 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6479}
6480
6481
6482/**
6483 * @opcode 0x97
6484 */
6485FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6486{
6487 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6488 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6489}
6490
6491
6492/**
6493 * @opcode 0x98
6494 */
6495FNIEMOP_DEF(iemOp_cbw)
6496{
6497 switch (pVCpu->iem.s.enmEffOpSize)
6498 {
6499 case IEMMODE_16BIT:
6500 IEMOP_MNEMONIC(cbw, "cbw");
6501 IEM_MC_BEGIN(0, 1, 0, 0);
6502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6503 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6504 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6505 } IEM_MC_ELSE() {
6506 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6507 } IEM_MC_ENDIF();
6508 IEM_MC_ADVANCE_RIP_AND_FINISH();
6509 IEM_MC_END();
6510 break;
6511
6512 case IEMMODE_32BIT:
6513 IEMOP_MNEMONIC(cwde, "cwde");
6514 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6516 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6517 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6518 } IEM_MC_ELSE() {
6519 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6520 } IEM_MC_ENDIF();
6521 IEM_MC_ADVANCE_RIP_AND_FINISH();
6522 IEM_MC_END();
6523 break;
6524
6525 case IEMMODE_64BIT:
6526 IEMOP_MNEMONIC(cdqe, "cdqe");
6527 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6529 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6530 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6531 } IEM_MC_ELSE() {
6532 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6533 } IEM_MC_ENDIF();
6534 IEM_MC_ADVANCE_RIP_AND_FINISH();
6535 IEM_MC_END();
6536 break;
6537
6538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6539 }
6540}
6541
6542
6543/**
6544 * @opcode 0x99
6545 */
6546FNIEMOP_DEF(iemOp_cwd)
6547{
6548 switch (pVCpu->iem.s.enmEffOpSize)
6549 {
6550 case IEMMODE_16BIT:
6551 IEMOP_MNEMONIC(cwd, "cwd");
6552 IEM_MC_BEGIN(0, 1, 0, 0);
6553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6554 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6555 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6556 } IEM_MC_ELSE() {
6557 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6558 } IEM_MC_ENDIF();
6559 IEM_MC_ADVANCE_RIP_AND_FINISH();
6560 IEM_MC_END();
6561 break;
6562
6563 case IEMMODE_32BIT:
6564 IEMOP_MNEMONIC(cdq, "cdq");
6565 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6567 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6568 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6569 } IEM_MC_ELSE() {
6570 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6571 } IEM_MC_ENDIF();
6572 IEM_MC_ADVANCE_RIP_AND_FINISH();
6573 IEM_MC_END();
6574 break;
6575
6576 case IEMMODE_64BIT:
6577 IEMOP_MNEMONIC(cqo, "cqo");
6578 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6581 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6582 } IEM_MC_ELSE() {
6583 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6584 } IEM_MC_ENDIF();
6585 IEM_MC_ADVANCE_RIP_AND_FINISH();
6586 IEM_MC_END();
6587 break;
6588
6589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6590 }
6591}
6592
6593
6594/**
6595 * @opcode 0x9a
6596 */
6597FNIEMOP_DEF(iemOp_call_Ap)
6598{
6599 IEMOP_MNEMONIC(call_Ap, "call Ap");
6600 IEMOP_HLP_NO_64BIT();
6601
6602 /* Decode the far pointer address and pass it on to the far call C implementation. */
6603 uint32_t off32Seg;
6604 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6605 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6606 else
6607 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6608 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6610 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6611 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6612 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6613 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6614}
6615
6616
6617/** Opcode 0x9b. (aka fwait) */
6618FNIEMOP_DEF(iemOp_wait)
6619{
6620 IEMOP_MNEMONIC(wait, "wait");
6621 IEM_MC_BEGIN(0, 0, 0, 0);
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6624 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6625 IEM_MC_ADVANCE_RIP_AND_FINISH();
6626 IEM_MC_END();
6627}
6628
6629
6630/**
6631 * @opcode 0x9c
6632 */
6633FNIEMOP_DEF(iemOp_pushf_Fv)
6634{
6635 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6637 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6638 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6639 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6640}
6641
6642
6643/**
6644 * @opcode 0x9d
6645 */
6646FNIEMOP_DEF(iemOp_popf_Fv)
6647{
6648 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6650 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6651 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6652 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6653 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6654}
6655
6656
6657/**
6658 * @opcode 0x9e
6659 * @opflmodify cf,pf,af,zf,sf
6660 */
6661FNIEMOP_DEF(iemOp_sahf)
6662{
6663 IEMOP_MNEMONIC(sahf, "sahf");
6664 if ( IEM_IS_64BIT_CODE(pVCpu)
6665 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6666 IEMOP_RAISE_INVALID_OPCODE_RET();
6667 IEM_MC_BEGIN(0, 2, 0, 0);
6668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6669 IEM_MC_LOCAL(uint32_t, u32Flags);
6670 IEM_MC_LOCAL(uint32_t, EFlags);
6671 IEM_MC_FETCH_EFLAGS(EFlags);
6672 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6673 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6674 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6675 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6676 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6677 IEM_MC_COMMIT_EFLAGS(EFlags);
6678 IEM_MC_ADVANCE_RIP_AND_FINISH();
6679 IEM_MC_END();
6680}
6681
6682
6683/**
6684 * @opcode 0x9f
6685 * @opfltest cf,pf,af,zf,sf
6686 */
6687FNIEMOP_DEF(iemOp_lahf)
6688{
6689 IEMOP_MNEMONIC(lahf, "lahf");
6690 if ( IEM_IS_64BIT_CODE(pVCpu)
6691 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6692 IEMOP_RAISE_INVALID_OPCODE_RET();
6693 IEM_MC_BEGIN(0, 1, 0, 0);
6694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6695 IEM_MC_LOCAL(uint8_t, u8Flags);
6696 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6697 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6698 IEM_MC_ADVANCE_RIP_AND_FINISH();
6699 IEM_MC_END();
6700}
6701
6702
6703/**
6704 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6705 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6706 * Will return/throw on failures.
6707 * @param a_GCPtrMemOff The variable to store the offset in.
6708 */
6709#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6710 do \
6711 { \
6712 switch (pVCpu->iem.s.enmEffAddrMode) \
6713 { \
6714 case IEMMODE_16BIT: \
6715 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6716 break; \
6717 case IEMMODE_32BIT: \
6718 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6719 break; \
6720 case IEMMODE_64BIT: \
6721 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6722 break; \
6723 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6724 } \
6725 } while (0)
6726
6727/**
6728 * @opcode 0xa0
6729 */
6730FNIEMOP_DEF(iemOp_mov_AL_Ob)
6731{
6732 /*
6733 * Get the offset.
6734 */
6735 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6736 RTGCPTR GCPtrMemOffDecode;
6737 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6738
6739 /*
6740 * Fetch AL.
6741 */
6742 IEM_MC_BEGIN(0, 2, 0, 0);
6743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6744 IEM_MC_LOCAL(uint8_t, u8Tmp);
6745 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6746 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6747 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6748 IEM_MC_ADVANCE_RIP_AND_FINISH();
6749 IEM_MC_END();
6750}
6751
6752
6753/**
6754 * @opcode 0xa1
6755 */
6756FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6757{
6758 /*
6759 * Get the offset.
6760 */
6761 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6762 RTGCPTR GCPtrMemOffDecode;
6763 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6764
6765 /*
6766 * Fetch rAX.
6767 */
6768 switch (pVCpu->iem.s.enmEffOpSize)
6769 {
6770 case IEMMODE_16BIT:
6771 IEM_MC_BEGIN(0, 2, 0, 0);
6772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6773 IEM_MC_LOCAL(uint16_t, u16Tmp);
6774 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6775 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6776 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6777 IEM_MC_ADVANCE_RIP_AND_FINISH();
6778 IEM_MC_END();
6779 break;
6780
6781 case IEMMODE_32BIT:
6782 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6784 IEM_MC_LOCAL(uint32_t, u32Tmp);
6785 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6786 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6787 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6788 IEM_MC_ADVANCE_RIP_AND_FINISH();
6789 IEM_MC_END();
6790 break;
6791
6792 case IEMMODE_64BIT:
6793 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6795 IEM_MC_LOCAL(uint64_t, u64Tmp);
6796 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6797 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6798 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6799 IEM_MC_ADVANCE_RIP_AND_FINISH();
6800 IEM_MC_END();
6801 break;
6802
6803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6804 }
6805}
6806
6807
6808/**
6809 * @opcode 0xa2
6810 */
6811FNIEMOP_DEF(iemOp_mov_Ob_AL)
6812{
6813 /*
6814 * Get the offset.
6815 */
6816 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6817 RTGCPTR GCPtrMemOffDecode;
6818 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6819
6820 /*
6821 * Store AL.
6822 */
6823 IEM_MC_BEGIN(0, 2, 0, 0);
6824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6825 IEM_MC_LOCAL(uint8_t, u8Tmp);
6826 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6827 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6828 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6829 IEM_MC_ADVANCE_RIP_AND_FINISH();
6830 IEM_MC_END();
6831}
6832
6833
6834/**
6835 * @opcode 0xa3
6836 */
6837FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6838{
6839 /*
6840 * Get the offset.
6841 */
6842 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6843 RTGCPTR GCPtrMemOffDecode;
6844 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6845
6846 /*
6847 * Store rAX.
6848 */
6849 switch (pVCpu->iem.s.enmEffOpSize)
6850 {
6851 case IEMMODE_16BIT:
6852 IEM_MC_BEGIN(0, 2, 0, 0);
6853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6854 IEM_MC_LOCAL(uint16_t, u16Tmp);
6855 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6856 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6857 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6858 IEM_MC_ADVANCE_RIP_AND_FINISH();
6859 IEM_MC_END();
6860 break;
6861
6862 case IEMMODE_32BIT:
6863 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6865 IEM_MC_LOCAL(uint32_t, u32Tmp);
6866 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6867 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6868 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6869 IEM_MC_ADVANCE_RIP_AND_FINISH();
6870 IEM_MC_END();
6871 break;
6872
6873 case IEMMODE_64BIT:
6874 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6876 IEM_MC_LOCAL(uint64_t, u64Tmp);
6877 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6878 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6879 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6880 IEM_MC_ADVANCE_RIP_AND_FINISH();
6881 IEM_MC_END();
6882 break;
6883
6884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6885 }
6886}
6887
6888/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6889#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6890 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6892 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6893 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6894 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6895 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6896 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6897 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6899 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6900 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6901 } IEM_MC_ELSE() { \
6902 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6903 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6904 } IEM_MC_ENDIF(); \
6905 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6906 IEM_MC_END() \
6907
6908/**
6909 * @opcode 0xa4
6910 * @opfltest df
6911 */
6912FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6913{
6914 /*
6915 * Use the C implementation if a repeat prefix is encountered.
6916 */
6917 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6918 {
6919 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6921 switch (pVCpu->iem.s.enmEffAddrMode)
6922 {
6923 case IEMMODE_16BIT:
6924 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6925 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6926 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6927 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6928 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6929 case IEMMODE_32BIT:
6930 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6931 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6932 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6933 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6934 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6935 case IEMMODE_64BIT:
6936 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6937 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6938 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6939 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6940 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6942 }
6943 }
6944
6945 /*
6946 * Sharing case implementation with movs[wdq] below.
6947 */
6948 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6949 switch (pVCpu->iem.s.enmEffAddrMode)
6950 {
6951 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6952 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6953 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6955 }
6956}
6957
6958
6959/**
6960 * @opcode 0xa5
6961 * @opfltest df
6962 */
6963FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6964{
6965
6966 /*
6967 * Use the C implementation if a repeat prefix is encountered.
6968 */
6969 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6970 {
6971 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6973 switch (pVCpu->iem.s.enmEffOpSize)
6974 {
6975 case IEMMODE_16BIT:
6976 switch (pVCpu->iem.s.enmEffAddrMode)
6977 {
6978 case IEMMODE_16BIT:
6979 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6980 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6982 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6983 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6984 case IEMMODE_32BIT:
6985 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6986 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6987 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6988 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6989 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6990 case IEMMODE_64BIT:
6991 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6992 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6993 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6994 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6995 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6997 }
6998 break;
6999 case IEMMODE_32BIT:
7000 switch (pVCpu->iem.s.enmEffAddrMode)
7001 {
7002 case IEMMODE_16BIT:
7003 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7004 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7005 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7006 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7007 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
7008 case IEMMODE_32BIT:
7009 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7010 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7011 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7012 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7013 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
7014 case IEMMODE_64BIT:
7015 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7016 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7017 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7018 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7019 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
7020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7021 }
7022 case IEMMODE_64BIT:
7023 switch (pVCpu->iem.s.enmEffAddrMode)
7024 {
7025 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
7026 case IEMMODE_32BIT:
7027 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7028 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7029 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7030 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7031 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
7032 case IEMMODE_64BIT:
7033 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7034 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7036 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7037 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7039 }
7040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7041 }
7042 }
7043
7044 /*
7045 * Annoying double switch here.
7046 * Using ugly macro for implementing the cases, sharing it with movsb.
7047 */
7048 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7049 switch (pVCpu->iem.s.enmEffOpSize)
7050 {
7051 case IEMMODE_16BIT:
7052 switch (pVCpu->iem.s.enmEffAddrMode)
7053 {
7054 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7055 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7056 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7058 }
7059 break;
7060
7061 case IEMMODE_32BIT:
7062 switch (pVCpu->iem.s.enmEffAddrMode)
7063 {
7064 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7065 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7066 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7068 }
7069 break;
7070
7071 case IEMMODE_64BIT:
7072 switch (pVCpu->iem.s.enmEffAddrMode)
7073 {
7074 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7075 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7076 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7078 }
7079 break;
7080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7081 }
7082}
7083
7084#undef IEM_MOVS_CASE
7085
7086/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7087#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7088 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
7089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7090 \
7091 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7092 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7093 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7094 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7095 \
7096 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7097 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7098 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
7099 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7100 \
7101 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7102 IEM_MC_REF_EFLAGS(pEFlags); \
7103 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
7104 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
7105 \
7106 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7107 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7108 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7109 } IEM_MC_ELSE() { \
7110 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7111 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7112 } IEM_MC_ENDIF(); \
7113 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7114 IEM_MC_END() \
7115
7116/**
7117 * @opcode 0xa6
7118 * @opflclass arithmetic
7119 * @opfltest df
7120 */
7121FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7122{
7123
7124 /*
7125 * Use the C implementation if a repeat prefix is encountered.
7126 */
7127 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7128 {
7129 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7131 switch (pVCpu->iem.s.enmEffAddrMode)
7132 {
7133 case IEMMODE_16BIT:
7134 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7135 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7136 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7137 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7138 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7139 case IEMMODE_32BIT:
7140 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7141 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7142 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7143 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7144 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7145 case IEMMODE_64BIT:
7146 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7147 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7148 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7149 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7150 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7152 }
7153 }
7154 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7155 {
7156 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7158 switch (pVCpu->iem.s.enmEffAddrMode)
7159 {
7160 case IEMMODE_16BIT:
7161 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7162 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7163 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7164 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7165 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7166 case IEMMODE_32BIT:
7167 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7168 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7169 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7170 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7171 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7172 case IEMMODE_64BIT:
7173 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7174 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7175 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7176 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7177 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7179 }
7180 }
7181
7182 /*
7183 * Sharing case implementation with cmps[wdq] below.
7184 */
7185 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7186 switch (pVCpu->iem.s.enmEffAddrMode)
7187 {
7188 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7189 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7190 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7192 }
7193}
7194
7195
7196/**
7197 * @opcode 0xa7
7198 * @opflclass arithmetic
7199 * @opfltest df
7200 */
7201FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7202{
7203 /*
7204 * Use the C implementation if a repeat prefix is encountered.
7205 */
7206 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7207 {
7208 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7210 switch (pVCpu->iem.s.enmEffOpSize)
7211 {
7212 case IEMMODE_16BIT:
7213 switch (pVCpu->iem.s.enmEffAddrMode)
7214 {
7215 case IEMMODE_16BIT:
7216 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7217 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7218 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7219 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7220 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7221 case IEMMODE_32BIT:
7222 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7223 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7224 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7225 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7226 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7227 case IEMMODE_64BIT:
7228 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7229 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7230 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7231 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7232 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7234 }
7235 break;
7236 case IEMMODE_32BIT:
7237 switch (pVCpu->iem.s.enmEffAddrMode)
7238 {
7239 case IEMMODE_16BIT:
7240 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7241 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7242 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7243 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7244 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7245 case IEMMODE_32BIT:
7246 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7247 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7248 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7249 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7250 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7251 case IEMMODE_64BIT:
7252 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7253 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7254 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7255 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7256 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7258 }
7259 case IEMMODE_64BIT:
7260 switch (pVCpu->iem.s.enmEffAddrMode)
7261 {
7262 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7263 case IEMMODE_32BIT:
7264 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7265 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7266 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7267 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7268 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7269 case IEMMODE_64BIT:
7270 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7271 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7272 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7273 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7274 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7276 }
7277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7278 }
7279 }
7280
7281 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7282 {
7283 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7285 switch (pVCpu->iem.s.enmEffOpSize)
7286 {
7287 case IEMMODE_16BIT:
7288 switch (pVCpu->iem.s.enmEffAddrMode)
7289 {
7290 case IEMMODE_16BIT:
7291 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7292 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7293 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7294 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7295 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7296 case IEMMODE_32BIT:
7297 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7298 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7299 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7300 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7301 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7302 case IEMMODE_64BIT:
7303 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7304 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7305 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7306 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7307 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7309 }
7310 break;
7311 case IEMMODE_32BIT:
7312 switch (pVCpu->iem.s.enmEffAddrMode)
7313 {
7314 case IEMMODE_16BIT:
7315 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7316 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7317 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7318 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7319 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7320 case IEMMODE_32BIT:
7321 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7322 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7323 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7324 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7325 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7326 case IEMMODE_64BIT:
7327 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7328 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7329 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7330 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7331 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7333 }
7334 case IEMMODE_64BIT:
7335 switch (pVCpu->iem.s.enmEffAddrMode)
7336 {
7337 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7338 case IEMMODE_32BIT:
7339 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7340 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7341 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7342 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7343 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7344 case IEMMODE_64BIT:
7345 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7346 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7347 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7348 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7349 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7353 }
7354 }
7355
7356 /*
7357 * Annoying double switch here.
7358 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7359 */
7360 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7361 switch (pVCpu->iem.s.enmEffOpSize)
7362 {
7363 case IEMMODE_16BIT:
7364 switch (pVCpu->iem.s.enmEffAddrMode)
7365 {
7366 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7367 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7368 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7370 }
7371 break;
7372
7373 case IEMMODE_32BIT:
7374 switch (pVCpu->iem.s.enmEffAddrMode)
7375 {
7376 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7377 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7378 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7380 }
7381 break;
7382
7383 case IEMMODE_64BIT:
7384 switch (pVCpu->iem.s.enmEffAddrMode)
7385 {
7386 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7387 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7388 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7390 }
7391 break;
7392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7393 }
7394}
7395
7396#undef IEM_CMPS_CASE
7397
7398/**
7399 * @opcode 0xa8
7400 * @opflclass logical
7401 */
7402FNIEMOP_DEF(iemOp_test_AL_Ib)
7403{
7404 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7405 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7406 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7407}
7408
7409
7410/**
7411 * @opcode 0xa9
7412 * @opflclass logical
7413 */
7414FNIEMOP_DEF(iemOp_test_eAX_Iz)
7415{
7416 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7418 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7419}
7420
7421
7422/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7423#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7424 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7426 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7427 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7428 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7429 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7430 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7431 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7432 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7433 } IEM_MC_ELSE() { \
7434 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7435 } IEM_MC_ENDIF(); \
7436 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7437 IEM_MC_END() \
7438
7439/**
7440 * @opcode 0xaa
7441 */
7442FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7443{
7444 /*
7445 * Use the C implementation if a repeat prefix is encountered.
7446 */
7447 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7448 {
7449 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7451 switch (pVCpu->iem.s.enmEffAddrMode)
7452 {
7453 case IEMMODE_16BIT:
7454 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7455 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7456 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7457 iemCImpl_stos_al_m16);
7458 case IEMMODE_32BIT:
7459 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7460 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7462 iemCImpl_stos_al_m32);
7463 case IEMMODE_64BIT:
7464 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7465 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7467 iemCImpl_stos_al_m64);
7468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7469 }
7470 }
7471
7472 /*
7473 * Sharing case implementation with stos[wdq] below.
7474 */
7475 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7476 switch (pVCpu->iem.s.enmEffAddrMode)
7477 {
7478 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7479 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7480 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7482 }
7483}
7484
7485
7486/**
7487 * @opcode 0xab
7488 */
7489FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7490{
7491 /*
7492 * Use the C implementation if a repeat prefix is encountered.
7493 */
7494 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7495 {
7496 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7498 switch (pVCpu->iem.s.enmEffOpSize)
7499 {
7500 case IEMMODE_16BIT:
7501 switch (pVCpu->iem.s.enmEffAddrMode)
7502 {
7503 case IEMMODE_16BIT:
7504 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7505 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7506 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7507 iemCImpl_stos_ax_m16);
7508 case IEMMODE_32BIT:
7509 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7511 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7512 iemCImpl_stos_ax_m32);
7513 case IEMMODE_64BIT:
7514 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7515 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7517 iemCImpl_stos_ax_m64);
7518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7519 }
7520 break;
7521 case IEMMODE_32BIT:
7522 switch (pVCpu->iem.s.enmEffAddrMode)
7523 {
7524 case IEMMODE_16BIT:
7525 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7527 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7528 iemCImpl_stos_eax_m16);
7529 case IEMMODE_32BIT:
7530 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7531 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7532 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7533 iemCImpl_stos_eax_m32);
7534 case IEMMODE_64BIT:
7535 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7536 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7537 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7538 iemCImpl_stos_eax_m64);
7539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7540 }
7541 case IEMMODE_64BIT:
7542 switch (pVCpu->iem.s.enmEffAddrMode)
7543 {
7544 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7545 case IEMMODE_32BIT:
7546 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7547 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7549 iemCImpl_stos_rax_m32);
7550 case IEMMODE_64BIT:
7551 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7552 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7554 iemCImpl_stos_rax_m64);
7555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7556 }
7557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7558 }
7559 }
7560
7561 /*
7562 * Annoying double switch here.
7563 * Using ugly macro for implementing the cases, sharing it with stosb.
7564 */
7565 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7566 switch (pVCpu->iem.s.enmEffOpSize)
7567 {
7568 case IEMMODE_16BIT:
7569 switch (pVCpu->iem.s.enmEffAddrMode)
7570 {
7571 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7572 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7573 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7575 }
7576 break;
7577
7578 case IEMMODE_32BIT:
7579 switch (pVCpu->iem.s.enmEffAddrMode)
7580 {
7581 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7582 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7583 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7585 }
7586 break;
7587
7588 case IEMMODE_64BIT:
7589 switch (pVCpu->iem.s.enmEffAddrMode)
7590 {
7591 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7592 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7593 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7595 }
7596 break;
7597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7598 }
7599}
7600
7601#undef IEM_STOS_CASE
7602
7603/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7604#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7605 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7607 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7608 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7609 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7610 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7611 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7612 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7613 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7614 } IEM_MC_ELSE() { \
7615 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7616 } IEM_MC_ENDIF(); \
7617 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7618 IEM_MC_END() \
7619
7620/**
7621 * @opcode 0xac
7622 * @opfltest df
7623 */
7624FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7625{
7626 /*
7627 * Use the C implementation if a repeat prefix is encountered.
7628 */
7629 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7630 {
7631 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7633 switch (pVCpu->iem.s.enmEffAddrMode)
7634 {
7635 case IEMMODE_16BIT:
7636 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7637 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7638 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7639 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7640 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7641 case IEMMODE_32BIT:
7642 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7643 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7644 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7645 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7646 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7647 case IEMMODE_64BIT:
7648 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7651 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7652 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7654 }
7655 }
7656
7657 /*
7658 * Sharing case implementation with stos[wdq] below.
7659 */
7660 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7661 switch (pVCpu->iem.s.enmEffAddrMode)
7662 {
7663 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7664 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7665 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7667 }
7668}
7669
7670
7671/**
7672 * @opcode 0xad
7673 * @opfltest df
7674 */
7675FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7676{
7677 /*
7678 * Use the C implementation if a repeat prefix is encountered.
7679 */
7680 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7681 {
7682 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7684 switch (pVCpu->iem.s.enmEffOpSize)
7685 {
7686 case IEMMODE_16BIT:
7687 switch (pVCpu->iem.s.enmEffAddrMode)
7688 {
7689 case IEMMODE_16BIT:
7690 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7691 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7692 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7693 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7694 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7695 case IEMMODE_32BIT:
7696 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7697 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7698 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7699 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7700 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7701 case IEMMODE_64BIT:
7702 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7703 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7704 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7705 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7706 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7708 }
7709 break;
7710 case IEMMODE_32BIT:
7711 switch (pVCpu->iem.s.enmEffAddrMode)
7712 {
7713 case IEMMODE_16BIT:
7714 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7715 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7716 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7717 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7718 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7719 case IEMMODE_32BIT:
7720 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7721 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7722 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7723 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7724 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7725 case IEMMODE_64BIT:
7726 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7727 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7728 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7729 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7730 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7732 }
7733 case IEMMODE_64BIT:
7734 switch (pVCpu->iem.s.enmEffAddrMode)
7735 {
7736 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7737 case IEMMODE_32BIT:
7738 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7739 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7740 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7741 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7742 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7743 case IEMMODE_64BIT:
7744 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7745 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7746 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7747 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7748 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7750 }
7751 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7752 }
7753 }
7754
7755 /*
7756 * Annoying double switch here.
7757 * Using ugly macro for implementing the cases, sharing it with lodsb.
7758 */
7759 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7760 switch (pVCpu->iem.s.enmEffOpSize)
7761 {
7762 case IEMMODE_16BIT:
7763 switch (pVCpu->iem.s.enmEffAddrMode)
7764 {
7765 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7766 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7767 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7769 }
7770 break;
7771
7772 case IEMMODE_32BIT:
7773 switch (pVCpu->iem.s.enmEffAddrMode)
7774 {
7775 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7776 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7777 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7779 }
7780 break;
7781
7782 case IEMMODE_64BIT:
7783 switch (pVCpu->iem.s.enmEffAddrMode)
7784 {
7785 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7786 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7787 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7789 }
7790 break;
7791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7792 }
7793}
7794
7795#undef IEM_LODS_CASE
7796
7797/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7798#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7799 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7801 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7802 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7803 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7804 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7805 \
7806 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7807 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7808 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7809 IEM_MC_REF_EFLAGS(pEFlags); \
7810 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7811 \
7812 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7813 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7814 } IEM_MC_ELSE() { \
7815 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7816 } IEM_MC_ENDIF(); \
7817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7818 IEM_MC_END();
7819
7820/**
7821 * @opcode 0xae
7822 * @opflclass arithmetic
7823 * @opfltest df
7824 */
7825FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7826{
7827 /*
7828 * Use the C implementation if a repeat prefix is encountered.
7829 */
7830 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7831 {
7832 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7834 switch (pVCpu->iem.s.enmEffAddrMode)
7835 {
7836 case IEMMODE_16BIT:
7837 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7840 iemCImpl_repe_scas_al_m16);
7841 case IEMMODE_32BIT:
7842 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7845 iemCImpl_repe_scas_al_m32);
7846 case IEMMODE_64BIT:
7847 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7848 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7849 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7850 iemCImpl_repe_scas_al_m64);
7851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7852 }
7853 }
7854 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7855 {
7856 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7858 switch (pVCpu->iem.s.enmEffAddrMode)
7859 {
7860 case IEMMODE_16BIT:
7861 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7862 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7863 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7864 iemCImpl_repne_scas_al_m16);
7865 case IEMMODE_32BIT:
7866 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7867 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7868 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7869 iemCImpl_repne_scas_al_m32);
7870 case IEMMODE_64BIT:
7871 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7872 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7873 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7874 iemCImpl_repne_scas_al_m64);
7875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7876 }
7877 }
7878
7879 /*
7880 * Sharing case implementation with stos[wdq] below.
7881 */
7882 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7883 switch (pVCpu->iem.s.enmEffAddrMode)
7884 {
7885 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7886 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7887 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7889 }
7890}
7891
7892
7893/**
7894 * @opcode 0xaf
7895 * @opflclass arithmetic
7896 * @opfltest df
7897 */
7898FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7899{
7900 /*
7901 * Use the C implementation if a repeat prefix is encountered.
7902 */
7903 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7904 {
7905 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7907 switch (pVCpu->iem.s.enmEffOpSize)
7908 {
7909 case IEMMODE_16BIT:
7910 switch (pVCpu->iem.s.enmEffAddrMode)
7911 {
7912 case IEMMODE_16BIT:
7913 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7914 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7915 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7916 iemCImpl_repe_scas_ax_m16);
7917 case IEMMODE_32BIT:
7918 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7919 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7920 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7921 iemCImpl_repe_scas_ax_m32);
7922 case IEMMODE_64BIT:
7923 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7924 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7925 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7926 iemCImpl_repe_scas_ax_m64);
7927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7928 }
7929 break;
7930 case IEMMODE_32BIT:
7931 switch (pVCpu->iem.s.enmEffAddrMode)
7932 {
7933 case IEMMODE_16BIT:
7934 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7935 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7936 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7937 iemCImpl_repe_scas_eax_m16);
7938 case IEMMODE_32BIT:
7939 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7940 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7941 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7942 iemCImpl_repe_scas_eax_m32);
7943 case IEMMODE_64BIT:
7944 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7945 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7946 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7947 iemCImpl_repe_scas_eax_m64);
7948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7949 }
7950 case IEMMODE_64BIT:
7951 switch (pVCpu->iem.s.enmEffAddrMode)
7952 {
7953 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7954 case IEMMODE_32BIT:
7955 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7956 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7957 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7958 iemCImpl_repe_scas_rax_m32);
7959 case IEMMODE_64BIT:
7960 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7961 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7962 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7963 iemCImpl_repe_scas_rax_m64);
7964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7965 }
7966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7967 }
7968 }
7969 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7970 {
7971 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7973 switch (pVCpu->iem.s.enmEffOpSize)
7974 {
7975 case IEMMODE_16BIT:
7976 switch (pVCpu->iem.s.enmEffAddrMode)
7977 {
7978 case IEMMODE_16BIT:
7979 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7980 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7982 iemCImpl_repne_scas_ax_m16);
7983 case IEMMODE_32BIT:
7984 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7985 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7986 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7987 iemCImpl_repne_scas_ax_m32);
7988 case IEMMODE_64BIT:
7989 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7990 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7991 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7992 iemCImpl_repne_scas_ax_m64);
7993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7994 }
7995 break;
7996 case IEMMODE_32BIT:
7997 switch (pVCpu->iem.s.enmEffAddrMode)
7998 {
7999 case IEMMODE_16BIT:
8000 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8001 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8002 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8003 iemCImpl_repne_scas_eax_m16);
8004 case IEMMODE_32BIT:
8005 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8006 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8007 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8008 iemCImpl_repne_scas_eax_m32);
8009 case IEMMODE_64BIT:
8010 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8011 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8012 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8013 iemCImpl_repne_scas_eax_m64);
8014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8015 }
8016 case IEMMODE_64BIT:
8017 switch (pVCpu->iem.s.enmEffAddrMode)
8018 {
8019 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
8020 case IEMMODE_32BIT:
8021 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8022 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8023 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8024 iemCImpl_repne_scas_rax_m32);
8025 case IEMMODE_64BIT:
8026 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
8027 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
8028 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
8029 iemCImpl_repne_scas_rax_m64);
8030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8031 }
8032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8033 }
8034 }
8035
8036 /*
8037 * Annoying double switch here.
8038 * Using ugly macro for implementing the cases, sharing it with scasb.
8039 */
8040 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8041 switch (pVCpu->iem.s.enmEffOpSize)
8042 {
8043 case IEMMODE_16BIT:
8044 switch (pVCpu->iem.s.enmEffAddrMode)
8045 {
8046 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8047 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8048 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8050 }
8051 break;
8052
8053 case IEMMODE_32BIT:
8054 switch (pVCpu->iem.s.enmEffAddrMode)
8055 {
8056 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8057 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8058 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8060 }
8061 break;
8062
8063 case IEMMODE_64BIT:
8064 switch (pVCpu->iem.s.enmEffAddrMode)
8065 {
8066 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8067 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8068 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8070 }
8071 break;
8072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8073 }
8074}
8075
8076#undef IEM_SCAS_CASE
8077
8078/**
8079 * Common 'mov r8, imm8' helper.
8080 */
8081FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8082{
8083 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8084 IEM_MC_BEGIN(0, 0, 0, 0);
8085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8086 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8087 IEM_MC_ADVANCE_RIP_AND_FINISH();
8088 IEM_MC_END();
8089}
8090
8091
8092/**
8093 * @opcode 0xb0
8094 */
8095FNIEMOP_DEF(iemOp_mov_AL_Ib)
8096{
8097 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8098 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8099}
8100
8101
8102/**
8103 * @opcode 0xb1
8104 */
8105FNIEMOP_DEF(iemOp_CL_Ib)
8106{
8107 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8108 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8109}
8110
8111
8112/**
8113 * @opcode 0xb2
8114 */
8115FNIEMOP_DEF(iemOp_DL_Ib)
8116{
8117 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8118 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8119}
8120
8121
8122/**
8123 * @opcode 0xb3
8124 */
8125FNIEMOP_DEF(iemOp_BL_Ib)
8126{
8127 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8128 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8129}
8130
8131
8132/**
8133 * @opcode 0xb4
8134 */
8135FNIEMOP_DEF(iemOp_mov_AH_Ib)
8136{
8137 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8138 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8139}
8140
8141
8142/**
8143 * @opcode 0xb5
8144 */
8145FNIEMOP_DEF(iemOp_CH_Ib)
8146{
8147 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8148 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8149}
8150
8151
8152/**
8153 * @opcode 0xb6
8154 */
8155FNIEMOP_DEF(iemOp_DH_Ib)
8156{
8157 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8158 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8159}
8160
8161
8162/**
8163 * @opcode 0xb7
8164 */
8165FNIEMOP_DEF(iemOp_BH_Ib)
8166{
8167 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8168 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8169}
8170
8171
8172/**
8173 * Common 'mov regX,immX' helper.
8174 */
8175FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8176{
8177 switch (pVCpu->iem.s.enmEffOpSize)
8178 {
8179 case IEMMODE_16BIT:
8180 IEM_MC_BEGIN(0, 0, 0, 0);
8181 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8183 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8184 IEM_MC_ADVANCE_RIP_AND_FINISH();
8185 IEM_MC_END();
8186 break;
8187
8188 case IEMMODE_32BIT:
8189 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8190 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8193 IEM_MC_ADVANCE_RIP_AND_FINISH();
8194 IEM_MC_END();
8195 break;
8196
8197 case IEMMODE_64BIT:
8198 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8199 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8201 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8202 IEM_MC_ADVANCE_RIP_AND_FINISH();
8203 IEM_MC_END();
8204 break;
8205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8206 }
8207}
8208
8209
8210/**
8211 * @opcode 0xb8
8212 */
8213FNIEMOP_DEF(iemOp_eAX_Iv)
8214{
8215 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8216 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8217}
8218
8219
8220/**
8221 * @opcode 0xb9
8222 */
8223FNIEMOP_DEF(iemOp_eCX_Iv)
8224{
8225 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8226 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8227}
8228
8229
8230/**
8231 * @opcode 0xba
8232 */
8233FNIEMOP_DEF(iemOp_eDX_Iv)
8234{
8235 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8236 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8237}
8238
8239
8240/**
8241 * @opcode 0xbb
8242 */
8243FNIEMOP_DEF(iemOp_eBX_Iv)
8244{
8245 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8246 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8247}
8248
8249
8250/**
8251 * @opcode 0xbc
8252 */
8253FNIEMOP_DEF(iemOp_eSP_Iv)
8254{
8255 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8256 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8257}
8258
8259
8260/**
8261 * @opcode 0xbd
8262 */
8263FNIEMOP_DEF(iemOp_eBP_Iv)
8264{
8265 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8266 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8267}
8268
8269
8270/**
8271 * @opcode 0xbe
8272 */
8273FNIEMOP_DEF(iemOp_eSI_Iv)
8274{
8275 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8276 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8277}
8278
8279
8280/**
8281 * @opcode 0xbf
8282 */
8283FNIEMOP_DEF(iemOp_eDI_Iv)
8284{
8285 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8286 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8287}
8288
8289
8290/**
8291 * @opcode 0xc0
8292 */
8293FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8294{
8295 IEMOP_HLP_MIN_186();
8296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8297
8298 /* Need to use a body macro here since the EFLAGS behaviour differs between
8299 the shifts, rotates and rotate w/ carry. Sigh. */
8300#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8301 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8302 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8303 { \
8304 /* register */ \
8305 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8306 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8308 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8309 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8310 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8311 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8312 IEM_MC_REF_EFLAGS(pEFlags); \
8313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8314 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8315 IEM_MC_END(); \
8316 } \
8317 else \
8318 { \
8319 /* memory */ \
8320 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8323 \
8324 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8326 \
8327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8328 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8329 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8330 \
8331 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8333 IEM_MC_FETCH_EFLAGS(EFlags); \
8334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8335 \
8336 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8337 IEM_MC_COMMIT_EFLAGS(EFlags); \
8338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8339 IEM_MC_END(); \
8340 } (void)0
8341
8342 switch (IEM_GET_MODRM_REG_8(bRm))
8343 {
8344 /**
8345 * @opdone
8346 * @opmaps grp2_c0
8347 * @opcode /0
8348 * @opflclass rotate_count
8349 */
8350 case 0:
8351 {
8352 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8353 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8354 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8355 break;
8356 }
8357 /**
8358 * @opdone
8359 * @opmaps grp2_c0
8360 * @opcode /1
8361 * @opflclass rotate_count
8362 */
8363 case 1:
8364 {
8365 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8366 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8367 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8368 break;
8369 }
8370 /**
8371 * @opdone
8372 * @opmaps grp2_c0
8373 * @opcode /2
8374 * @opflclass rotate_carry_count
8375 */
8376 case 2:
8377 {
8378 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8379 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8380 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8381 break;
8382 }
8383 /**
8384 * @opdone
8385 * @opmaps grp2_c0
8386 * @opcode /3
8387 * @opflclass rotate_carry_count
8388 */
8389 case 3:
8390 {
8391 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8392 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8393 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8394 break;
8395 }
8396 /**
8397 * @opdone
8398 * @opmaps grp2_c0
8399 * @opcode /4
8400 * @opflclass shift_count
8401 */
8402 case 4:
8403 {
8404 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8405 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8406 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8407 break;
8408 }
8409 /**
8410 * @opdone
8411 * @opmaps grp2_c0
8412 * @opcode /5
8413 * @opflclass shift_count
8414 */
8415 case 5:
8416 {
8417 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8418 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8419 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8420 break;
8421 }
8422 /**
8423 * @opdone
8424 * @opmaps grp2_c0
8425 * @opcode /7
8426 * @opflclass shift_count
8427 */
8428 case 7:
8429 {
8430 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8431 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8432 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8433 break;
8434 }
8435
8436 /** @opdone */
8437 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8438 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8439 }
8440#undef GRP2_BODY_Eb_Ib
8441}
8442
8443
8444/* Need to use a body macro here since the EFLAGS behaviour differs between
8445 the shifts, rotates and rotate w/ carry. Sigh. */
8446#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8447 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8448 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8449 { \
8450 /* register */ \
8451 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8452 switch (pVCpu->iem.s.enmEffOpSize) \
8453 { \
8454 case IEMMODE_16BIT: \
8455 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8457 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8458 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8459 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8460 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8461 IEM_MC_REF_EFLAGS(pEFlags); \
8462 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8463 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8464 IEM_MC_END(); \
8465 break; \
8466 \
8467 case IEMMODE_32BIT: \
8468 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8470 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8471 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8472 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8473 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8474 IEM_MC_REF_EFLAGS(pEFlags); \
8475 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8476 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8477 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8478 IEM_MC_END(); \
8479 break; \
8480 \
8481 case IEMMODE_64BIT: \
8482 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8484 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8485 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8486 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8487 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8488 IEM_MC_REF_EFLAGS(pEFlags); \
8489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8490 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8491 IEM_MC_END(); \
8492 break; \
8493 \
8494 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8495 } \
8496 } \
8497 else \
8498 { \
8499 /* memory */ \
8500 switch (pVCpu->iem.s.enmEffOpSize) \
8501 { \
8502 case IEMMODE_16BIT: \
8503 IEM_MC_BEGIN(3, 3, 0, 0); \
8504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8506 \
8507 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8509 \
8510 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8511 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8512 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8513 \
8514 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8515 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8516 IEM_MC_FETCH_EFLAGS(EFlags); \
8517 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8518 \
8519 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8520 IEM_MC_COMMIT_EFLAGS(EFlags); \
8521 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8522 IEM_MC_END(); \
8523 break; \
8524 \
8525 case IEMMODE_32BIT: \
8526 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8529 \
8530 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8532 \
8533 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8534 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8535 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8536 \
8537 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8538 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8539 IEM_MC_FETCH_EFLAGS(EFlags); \
8540 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8541 \
8542 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8543 IEM_MC_COMMIT_EFLAGS(EFlags); \
8544 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8545 IEM_MC_END(); \
8546 break; \
8547 \
8548 case IEMMODE_64BIT: \
8549 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8552 \
8553 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8555 \
8556 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8557 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8558 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8559 \
8560 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8561 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8562 IEM_MC_FETCH_EFLAGS(EFlags); \
8563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8564 \
8565 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8566 IEM_MC_COMMIT_EFLAGS(EFlags); \
8567 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8568 IEM_MC_END(); \
8569 break; \
8570 \
8571 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8572 } \
8573 } (void)0
8574
8575/**
8576 * @opmaps grp2_c1
8577 * @opcode /0
8578 * @opflclass rotate_count
8579 */
8580FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8581{
8582 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8583 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8584}
8585
8586
8587/**
8588 * @opmaps grp2_c1
8589 * @opcode /1
8590 * @opflclass rotate_count
8591 */
8592FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8593{
8594 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8595 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8596}
8597
8598
8599/**
8600 * @opmaps grp2_c1
8601 * @opcode /2
8602 * @opflclass rotate_carry_count
8603 */
8604FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8605{
8606 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8607 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8608}
8609
8610
8611/**
8612 * @opmaps grp2_c1
8613 * @opcode /3
8614 * @opflclass rotate_carry_count
8615 */
8616FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8617{
8618 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8619 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8620}
8621
8622
8623/**
8624 * @opmaps grp2_c1
8625 * @opcode /4
8626 * @opflclass shift_count
8627 */
8628FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8629{
8630 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8631 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8632}
8633
8634
8635/**
8636 * @opmaps grp2_c1
8637 * @opcode /5
8638 * @opflclass shift_count
8639 */
8640FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8641{
8642 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8643 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8644}
8645
8646
8647/**
8648 * @opmaps grp2_c1
8649 * @opcode /7
8650 * @opflclass shift_count
8651 */
8652FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8653{
8654 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8655 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8656}
8657
8658#undef GRP2_BODY_Ev_Ib
8659
8660/**
8661 * @opcode 0xc1
8662 */
8663FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8664{
8665 IEMOP_HLP_MIN_186();
8666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8667
8668 switch (IEM_GET_MODRM_REG_8(bRm))
8669 {
8670 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8671 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8672 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8673 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8674 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8675 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8676 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
8677 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8678 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8679 }
8680}
8681
8682
8683/**
8684 * @opcode 0xc2
8685 */
8686FNIEMOP_DEF(iemOp_retn_Iw)
8687{
8688 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8689 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8690 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8692 switch (pVCpu->iem.s.enmEffOpSize)
8693 {
8694 case IEMMODE_16BIT:
8695 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8696 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8697 case IEMMODE_32BIT:
8698 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8699 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8700 case IEMMODE_64BIT:
8701 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8702 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8704 }
8705}
8706
8707
8708/**
8709 * @opcode 0xc3
8710 */
8711FNIEMOP_DEF(iemOp_retn)
8712{
8713 IEMOP_MNEMONIC(retn, "retn");
8714 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8716 switch (pVCpu->iem.s.enmEffOpSize)
8717 {
8718 case IEMMODE_16BIT:
8719 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8720 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8721 case IEMMODE_32BIT:
8722 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8723 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8724 case IEMMODE_64BIT:
8725 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8726 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8728 }
8729}
8730
8731
8732/**
8733 * @opcode 0xc4
8734 */
8735FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8736{
8737 /* The LDS instruction is invalid 64-bit mode. In legacy and
8738 compatability mode it is invalid with MOD=3.
8739 The use as a VEX prefix is made possible by assigning the inverted
8740 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8741 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8743 if ( IEM_IS_64BIT_CODE(pVCpu)
8744 || IEM_IS_MODRM_REG_MODE(bRm) )
8745 {
8746 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8747 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8748 {
8749 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8750 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8751 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8752 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8753 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8754 if (IEM_IS_64BIT_CODE(pVCpu))
8755 {
8756#if 1
8757 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
8758 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
8759 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
8760 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
8761#else
8762 if (bVex2 & 0x80 /* VEX.W */)
8763 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8764 if (~bRm & 0x20 /* VEX.~B */)
8765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
8766 if (~bRm & 0x40 /* VEX.~X */)
8767 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
8768 if (~bRm & 0x80 /* VEX.~R */)
8769 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
8770#endif
8771 }
8772 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8773 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8774 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8775 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8776 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8777 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8778
8779 switch (bRm & 0x1f)
8780 {
8781 case 1: /* 0x0f lead opcode byte. */
8782#ifdef IEM_WITH_VEX
8783 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8784#else
8785 IEMOP_BITCH_ABOUT_STUB();
8786 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8787#endif
8788
8789 case 2: /* 0x0f 0x38 lead opcode bytes. */
8790#ifdef IEM_WITH_VEX
8791 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8792#else
8793 IEMOP_BITCH_ABOUT_STUB();
8794 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8795#endif
8796
8797 case 3: /* 0x0f 0x3a lead opcode bytes. */
8798#ifdef IEM_WITH_VEX
8799 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8800#else
8801 IEMOP_BITCH_ABOUT_STUB();
8802 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8803#endif
8804
8805 default:
8806 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8807 IEMOP_RAISE_INVALID_OPCODE_RET();
8808 }
8809 }
8810 Log(("VEX3: VEX support disabled!\n"));
8811 IEMOP_RAISE_INVALID_OPCODE_RET();
8812 }
8813
8814 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8815 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8816}
8817
8818
8819/**
8820 * @opcode 0xc5
8821 */
8822FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8823{
8824 /* The LES instruction is invalid 64-bit mode. In legacy and
8825 compatability mode it is invalid with MOD=3.
8826 The use as a VEX prefix is made possible by assigning the inverted
8827 REX.R to the top MOD bit, and the top bit in the inverted register
8828 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8829 to accessing registers 0..7 in this VEX form. */
8830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8831 if ( IEM_IS_64BIT_CODE(pVCpu)
8832 || IEM_IS_MODRM_REG_MODE(bRm))
8833 {
8834 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8835 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8836 {
8837 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8838 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8839 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8840 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8841 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
8842 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
8843 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8844 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8845 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8846 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8847
8848#ifdef IEM_WITH_VEX
8849 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8850#else
8851 IEMOP_BITCH_ABOUT_STUB();
8852 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8853#endif
8854 }
8855
8856 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8857 Log(("VEX2: VEX support disabled!\n"));
8858 IEMOP_RAISE_INVALID_OPCODE_RET();
8859 }
8860
8861 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8862 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8863}
8864
8865
8866/**
8867 * @opcode 0xc6
8868 */
8869FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8870{
8871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8872 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8873 IEMOP_RAISE_INVALID_OPCODE_RET();
8874 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8875
8876 if (IEM_IS_MODRM_REG_MODE(bRm))
8877 {
8878 /* register access */
8879 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8880 IEM_MC_BEGIN(0, 0, 0, 0);
8881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8882 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8883 IEM_MC_ADVANCE_RIP_AND_FINISH();
8884 IEM_MC_END();
8885 }
8886 else
8887 {
8888 /* memory access. */
8889 IEM_MC_BEGIN(0, 1, 0, 0);
8890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8892 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8894 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8895 IEM_MC_ADVANCE_RIP_AND_FINISH();
8896 IEM_MC_END();
8897 }
8898}
8899
8900
8901/**
8902 * @opcode 0xc7
8903 */
8904FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8905{
8906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8907 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8908 IEMOP_RAISE_INVALID_OPCODE_RET();
8909 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8910
8911 if (IEM_IS_MODRM_REG_MODE(bRm))
8912 {
8913 /* register access */
8914 switch (pVCpu->iem.s.enmEffOpSize)
8915 {
8916 case IEMMODE_16BIT:
8917 IEM_MC_BEGIN(0, 0, 0, 0);
8918 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8920 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8921 IEM_MC_ADVANCE_RIP_AND_FINISH();
8922 IEM_MC_END();
8923 break;
8924
8925 case IEMMODE_32BIT:
8926 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8927 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8929 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8930 IEM_MC_ADVANCE_RIP_AND_FINISH();
8931 IEM_MC_END();
8932 break;
8933
8934 case IEMMODE_64BIT:
8935 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8936 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8938 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8939 IEM_MC_ADVANCE_RIP_AND_FINISH();
8940 IEM_MC_END();
8941 break;
8942
8943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8944 }
8945 }
8946 else
8947 {
8948 /* memory access. */
8949 switch (pVCpu->iem.s.enmEffOpSize)
8950 {
8951 case IEMMODE_16BIT:
8952 IEM_MC_BEGIN(0, 1, 0, 0);
8953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8955 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8957 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8958 IEM_MC_ADVANCE_RIP_AND_FINISH();
8959 IEM_MC_END();
8960 break;
8961
8962 case IEMMODE_32BIT:
8963 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8966 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8968 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8969 IEM_MC_ADVANCE_RIP_AND_FINISH();
8970 IEM_MC_END();
8971 break;
8972
8973 case IEMMODE_64BIT:
8974 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8977 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8979 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8980 IEM_MC_ADVANCE_RIP_AND_FINISH();
8981 IEM_MC_END();
8982 break;
8983
8984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8985 }
8986 }
8987}
8988
8989
8990
8991
8992/**
8993 * @opcode 0xc8
8994 */
8995FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8996{
8997 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8998 IEMOP_HLP_MIN_186();
8999 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9000 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
9001 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
9002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9003 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
9004 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9005 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9006 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
9007}
9008
9009
9010/**
9011 * @opcode 0xc9
9012 */
9013FNIEMOP_DEF(iemOp_leave)
9014{
9015 IEMOP_MNEMONIC(leave, "leave");
9016 IEMOP_HLP_MIN_186();
9017 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9019 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
9020 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9021 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
9022 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
9023}
9024
9025
9026/**
9027 * @opcode 0xca
9028 */
9029FNIEMOP_DEF(iemOp_retf_Iw)
9030{
9031 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
9032 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9034 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9035 | IEM_CIMPL_F_MODE,
9036 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9037 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9038 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9039 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9040 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9041 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9042 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9043 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9044 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9045 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9046 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9047 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9048 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9049 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9050 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9051 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9052 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9053 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9054}
9055
9056
9057/**
9058 * @opcode 0xcb
9059 */
9060FNIEMOP_DEF(iemOp_retf)
9061{
9062 IEMOP_MNEMONIC(retf, "retf");
9063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9064 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9065 | IEM_CIMPL_F_MODE,
9066 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9067 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9068 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9069 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9070 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9071 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9072 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9073 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9074 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9075 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9076 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9077 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9078 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9079 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9080 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9081 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9082 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9083 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9084}
9085
9086
9087/**
9088 * @opcode 0xcc
9089 */
9090FNIEMOP_DEF(iemOp_int3)
9091{
9092 IEMOP_MNEMONIC(int3, "int3");
9093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9094 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9095 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9096 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9097}
9098
9099
9100/**
9101 * @opcode 0xcd
9102 */
9103FNIEMOP_DEF(iemOp_int_Ib)
9104{
9105 IEMOP_MNEMONIC(int_Ib, "int Ib");
9106 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9108 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9109 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9110 iemCImpl_int, u8Int, IEMINT_INTN);
9111 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9112}
9113
9114
9115/**
9116 * @opcode 0xce
9117 */
9118FNIEMOP_DEF(iemOp_into)
9119{
9120 IEMOP_MNEMONIC(into, "into");
9121 IEMOP_HLP_NO_64BIT();
9122 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9123 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9124 UINT64_MAX,
9125 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9126 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9127}
9128
9129
9130/**
9131 * @opcode 0xcf
9132 */
9133FNIEMOP_DEF(iemOp_iret)
9134{
9135 IEMOP_MNEMONIC(iret, "iret");
9136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9137 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9138 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9139 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9140 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9141 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9142 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9143 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9144 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9145 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9146 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9147 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9148 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9149 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9150 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9151 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9152 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9153 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9154 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9155 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9156 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9157 /* Segment registers are sanitized when returning to an outer ring, or fully
9158 reloaded when returning to v86 mode. Thus the large flush list above. */
9159}
9160
9161
9162/**
9163 * @opcode 0xd0
9164 */
9165FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9166{
9167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9168
9169 /* Need to use a body macro here since the EFLAGS behaviour differs between
9170 the shifts, rotates and rotate w/ carry. Sigh. */
9171#define GRP2_BODY_Eb_1(a_pImplExpr) \
9172 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9173 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9174 { \
9175 /* register */ \
9176 IEM_MC_BEGIN(3, 0, 0, 0); \
9177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9178 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9179 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9180 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9181 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9182 IEM_MC_REF_EFLAGS(pEFlags); \
9183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9184 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9185 IEM_MC_END(); \
9186 } \
9187 else \
9188 { \
9189 /* memory */ \
9190 IEM_MC_BEGIN(3, 3, 0, 0); \
9191 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9192 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9193 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9195 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9196 \
9197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9199 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9200 IEM_MC_FETCH_EFLAGS(EFlags); \
9201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9202 \
9203 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9204 IEM_MC_COMMIT_EFLAGS(EFlags); \
9205 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9206 IEM_MC_END(); \
9207 } (void)0
9208
9209 switch (IEM_GET_MODRM_REG_8(bRm))
9210 {
9211 /**
9212 * @opdone
9213 * @opmaps grp2_d0
9214 * @opcode /0
9215 * @opflclass rotate_1
9216 */
9217 case 0:
9218 {
9219 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9220 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9221 break;
9222 }
9223 /**
9224 * @opdone
9225 * @opmaps grp2_d0
9226 * @opcode /1
9227 * @opflclass rotate_1
9228 */
9229 case 1:
9230 {
9231 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9232 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9233 break;
9234 }
9235 /**
9236 * @opdone
9237 * @opmaps grp2_d0
9238 * @opcode /2
9239 * @opflclass rotate_carry_1
9240 */
9241 case 2:
9242 {
9243 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9244 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9245 break;
9246 }
9247 /**
9248 * @opdone
9249 * @opmaps grp2_d0
9250 * @opcode /3
9251 * @opflclass rotate_carry_1
9252 */
9253 case 3:
9254 {
9255 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9256 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9257 break;
9258 }
9259 /**
9260 * @opdone
9261 * @opmaps grp2_d0
9262 * @opcode /4
9263 * @opflclass shift_1
9264 */
9265 case 4:
9266 {
9267 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9268 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9269 break;
9270 }
9271 /**
9272 * @opdone
9273 * @opmaps grp2_d0
9274 * @opcode /5
9275 * @opflclass shift_1
9276 */
9277 case 5:
9278 {
9279 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9280 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9281 break;
9282 }
9283 /**
9284 * @opdone
9285 * @opmaps grp2_d0
9286 * @opcode /7
9287 * @opflclass shift_1
9288 */
9289 case 7:
9290 {
9291 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9292 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9293 break;
9294 }
9295 /** @opdone */
9296 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9297 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9298 }
9299#undef GRP2_BODY_Eb_1
9300}
9301
9302
9303/* Need to use a body macro here since the EFLAGS behaviour differs between
9304 the shifts, rotates and rotate w/ carry. Sigh. */
9305#define GRP2_BODY_Ev_1(a_pImplExpr) \
9306 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9307 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9308 { \
9309 /* register */ \
9310 switch (pVCpu->iem.s.enmEffOpSize) \
9311 { \
9312 case IEMMODE_16BIT: \
9313 IEM_MC_BEGIN(3, 0, 0, 0); \
9314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9315 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9316 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9317 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9318 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9319 IEM_MC_REF_EFLAGS(pEFlags); \
9320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9321 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9322 IEM_MC_END(); \
9323 break; \
9324 \
9325 case IEMMODE_32BIT: \
9326 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9328 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9329 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9330 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9331 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9332 IEM_MC_REF_EFLAGS(pEFlags); \
9333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9334 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9335 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9336 IEM_MC_END(); \
9337 break; \
9338 \
9339 case IEMMODE_64BIT: \
9340 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9342 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9343 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9344 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9345 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9346 IEM_MC_REF_EFLAGS(pEFlags); \
9347 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9348 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9349 IEM_MC_END(); \
9350 break; \
9351 \
9352 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9353 } \
9354 } \
9355 else \
9356 { \
9357 /* memory */ \
9358 switch (pVCpu->iem.s.enmEffOpSize) \
9359 { \
9360 case IEMMODE_16BIT: \
9361 IEM_MC_BEGIN(3, 3, 0, 0); \
9362 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9363 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9364 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9366 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9367 \
9368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9370 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9371 IEM_MC_FETCH_EFLAGS(EFlags); \
9372 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9373 \
9374 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9375 IEM_MC_COMMIT_EFLAGS(EFlags); \
9376 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9377 IEM_MC_END(); \
9378 break; \
9379 \
9380 case IEMMODE_32BIT: \
9381 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9382 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9383 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9384 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9386 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9387 \
9388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9390 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9391 IEM_MC_FETCH_EFLAGS(EFlags); \
9392 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9393 \
9394 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9395 IEM_MC_COMMIT_EFLAGS(EFlags); \
9396 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9397 IEM_MC_END(); \
9398 break; \
9399 \
9400 case IEMMODE_64BIT: \
9401 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9402 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9403 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9404 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9406 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9407 \
9408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9410 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9411 IEM_MC_FETCH_EFLAGS(EFlags); \
9412 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9413 \
9414 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9415 IEM_MC_COMMIT_EFLAGS(EFlags); \
9416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9417 IEM_MC_END(); \
9418 break; \
9419 \
9420 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9421 } \
9422 } (void)0
9423
9424/**
9425 * @opmaps grp2_d1
9426 * @opcode /0
9427 * @opflclass rotate_1
9428 */
9429FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9430{
9431 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9432 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9433}
9434
9435
9436/**
9437 * @opmaps grp2_d1
9438 * @opcode /1
9439 * @opflclass rotate_1
9440 */
9441FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9442{
9443 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9444 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9445}
9446
9447
9448/**
9449 * @opmaps grp2_d1
9450 * @opcode /2
9451 * @opflclass rotate_carry_1
9452 */
9453FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9454{
9455 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9456 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9457}
9458
9459
9460/**
9461 * @opmaps grp2_d1
9462 * @opcode /3
9463 * @opflclass rotate_carry_1
9464 */
9465FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9466{
9467 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9468 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9469}
9470
9471
9472/**
9473 * @opmaps grp2_d1
9474 * @opcode /4
9475 * @opflclass shift_1
9476 */
9477FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9478{
9479 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9480 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9481}
9482
9483
9484/**
9485 * @opmaps grp2_d1
9486 * @opcode /5
9487 * @opflclass shift_1
9488 */
9489FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9490{
9491 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9492 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9493}
9494
9495
9496/**
9497 * @opmaps grp2_d1
9498 * @opcode /7
9499 * @opflclass shift_1
9500 */
9501FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9502{
9503 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9504 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9505}
9506
9507#undef GRP2_BODY_Ev_1
9508
9509/**
9510 * @opcode 0xd1
9511 */
9512FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9513{
9514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9515 switch (IEM_GET_MODRM_REG_8(bRm))
9516 {
9517 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9518 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9519 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9520 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9521 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9522 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9523 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9524 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9525 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9526 }
9527}
9528
9529
9530/**
9531 * @opcode 0xd2
9532 */
9533FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9534{
9535 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9536
9537 /* Need to use a body macro here since the EFLAGS behaviour differs between
9538 the shifts, rotates and rotate w/ carry. Sigh. */
9539#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9540 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9541 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9542 { \
9543 /* register */ \
9544 IEM_MC_BEGIN(3, 0, 0, 0); \
9545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9546 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9547 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9548 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9549 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9550 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9551 IEM_MC_REF_EFLAGS(pEFlags); \
9552 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9553 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9554 IEM_MC_END(); \
9555 } \
9556 else \
9557 { \
9558 /* memory */ \
9559 IEM_MC_BEGIN(3, 3, 0, 0); \
9560 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9561 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9562 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9564 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9565 \
9566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9568 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9569 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9570 IEM_MC_FETCH_EFLAGS(EFlags); \
9571 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9572 \
9573 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9574 IEM_MC_COMMIT_EFLAGS(EFlags); \
9575 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9576 IEM_MC_END(); \
9577 } (void)0
9578
9579 switch (IEM_GET_MODRM_REG_8(bRm))
9580 {
9581 /**
9582 * @opdone
9583 * @opmaps grp2_d0
9584 * @opcode /0
9585 * @opflclass rotate_count
9586 */
9587 case 0:
9588 {
9589 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9590 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9591 break;
9592 }
9593 /**
9594 * @opdone
9595 * @opmaps grp2_d0
9596 * @opcode /1
9597 * @opflclass rotate_count
9598 */
9599 case 1:
9600 {
9601 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9602 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9603 break;
9604 }
9605 /**
9606 * @opdone
9607 * @opmaps grp2_d0
9608 * @opcode /2
9609 * @opflclass rotate_carry_count
9610 */
9611 case 2:
9612 {
9613 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9614 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9615 break;
9616 }
9617 /**
9618 * @opdone
9619 * @opmaps grp2_d0
9620 * @opcode /3
9621 * @opflclass rotate_carry_count
9622 */
9623 case 3:
9624 {
9625 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9626 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9627 break;
9628 }
9629 /**
9630 * @opdone
9631 * @opmaps grp2_d0
9632 * @opcode /4
9633 * @opflclass shift_count
9634 */
9635 case 4:
9636 {
9637 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9638 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9639 break;
9640 }
9641 /**
9642 * @opdone
9643 * @opmaps grp2_d0
9644 * @opcode /5
9645 * @opflclass shift_count
9646 */
9647 case 5:
9648 {
9649 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9650 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9651 break;
9652 }
9653 /**
9654 * @opdone
9655 * @opmaps grp2_d0
9656 * @opcode /7
9657 * @opflclass shift_count
9658 */
9659 case 7:
9660 {
9661 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9662 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9663 break;
9664 }
9665 /** @opdone */
9666 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9667 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9668 }
9669#undef GRP2_BODY_Eb_CL
9670}
9671
9672
9673/* Need to use a body macro here since the EFLAGS behaviour differs between
9674 the shifts, rotates and rotate w/ carry. Sigh. */
9675#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9676 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9677 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9678 { \
9679 /* register */ \
9680 switch (pVCpu->iem.s.enmEffOpSize) \
9681 { \
9682 case IEMMODE_16BIT: \
9683 IEM_MC_BEGIN(3, 0, 0, 0); \
9684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9685 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9686 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9687 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9688 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9689 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9690 IEM_MC_REF_EFLAGS(pEFlags); \
9691 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9692 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9693 IEM_MC_END(); \
9694 break; \
9695 \
9696 case IEMMODE_32BIT: \
9697 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9699 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9700 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9701 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9702 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9703 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9704 IEM_MC_REF_EFLAGS(pEFlags); \
9705 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9706 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9707 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9708 IEM_MC_END(); \
9709 break; \
9710 \
9711 case IEMMODE_64BIT: \
9712 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9714 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9715 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9716 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9717 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9718 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9719 IEM_MC_REF_EFLAGS(pEFlags); \
9720 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9721 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9722 IEM_MC_END(); \
9723 break; \
9724 \
9725 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9726 } \
9727 } \
9728 else \
9729 { \
9730 /* memory */ \
9731 switch (pVCpu->iem.s.enmEffOpSize) \
9732 { \
9733 case IEMMODE_16BIT: \
9734 IEM_MC_BEGIN(3, 3, 0, 0); \
9735 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9736 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9737 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9739 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9740 \
9741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9743 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9744 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9745 IEM_MC_FETCH_EFLAGS(EFlags); \
9746 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9747 \
9748 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9749 IEM_MC_COMMIT_EFLAGS(EFlags); \
9750 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9751 IEM_MC_END(); \
9752 break; \
9753 \
9754 case IEMMODE_32BIT: \
9755 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9756 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9757 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9758 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9760 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9761 \
9762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9764 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9765 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9766 IEM_MC_FETCH_EFLAGS(EFlags); \
9767 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9768 \
9769 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9770 IEM_MC_COMMIT_EFLAGS(EFlags); \
9771 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9772 IEM_MC_END(); \
9773 break; \
9774 \
9775 case IEMMODE_64BIT: \
9776 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9777 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9778 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9779 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9781 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9782 \
9783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9785 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9786 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9787 IEM_MC_FETCH_EFLAGS(EFlags); \
9788 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9789 \
9790 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9791 IEM_MC_COMMIT_EFLAGS(EFlags); \
9792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9793 IEM_MC_END(); \
9794 break; \
9795 \
9796 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9797 } \
9798 } (void)0
9799
9800
9801/**
9802 * @opmaps grp2_d0
9803 * @opcode /0
9804 * @opflclass rotate_count
9805 */
9806FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
9807{
9808 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9809 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9810}
9811
9812
9813/**
9814 * @opmaps grp2_d0
9815 * @opcode /1
9816 * @opflclass rotate_count
9817 */
9818FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
9819{
9820 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9821 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9822}
9823
9824
9825/**
9826 * @opmaps grp2_d0
9827 * @opcode /2
9828 * @opflclass rotate_carry_count
9829 */
9830FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
9831{
9832 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9833 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9834}
9835
9836
9837/**
9838 * @opmaps grp2_d0
9839 * @opcode /3
9840 * @opflclass rotate_carry_count
9841 */
9842FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
9843{
9844 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9845 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9846}
9847
9848
9849/**
9850 * @opmaps grp2_d0
9851 * @opcode /4
9852 * @opflclass shift_count
9853 */
9854FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
9855{
9856 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9857 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9858}
9859
9860
9861/**
9862 * @opmaps grp2_d0
9863 * @opcode /5
9864 * @opflclass shift_count
9865 */
9866FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
9867{
9868 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9869 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9870}
9871
9872
9873/**
9874 * @opmaps grp2_d0
9875 * @opcode /7
9876 * @opflclass shift_count
9877 */
9878FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
9879{
9880 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9881 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9882}
9883
9884#undef GRP2_BODY_Ev_CL
9885
9886/**
9887 * @opcode 0xd3
9888 */
9889FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9890{
9891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9892 switch (IEM_GET_MODRM_REG_8(bRm))
9893 {
9894 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
9895 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
9896 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
9897 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
9898 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
9899 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
9900 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
9901 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9902 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9903 }
9904}
9905
9906
9907/**
9908 * @opcode 0xd4
9909 * @opflmodify cf,pf,af,zf,sf,of
9910 * @opflundef cf,af,of
9911 */
9912FNIEMOP_DEF(iemOp_aam_Ib)
9913{
9914/** @todo testcase: aam */
9915 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9916 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9918 IEMOP_HLP_NO_64BIT();
9919 if (!bImm)
9920 IEMOP_RAISE_DIVIDE_ERROR_RET();
9921 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9922}
9923
9924
9925/**
9926 * @opcode 0xd5
9927 * @opflmodify cf,pf,af,zf,sf,of
9928 * @opflundef cf,af,of
9929 */
9930FNIEMOP_DEF(iemOp_aad_Ib)
9931{
9932/** @todo testcase: aad? */
9933 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9934 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9936 IEMOP_HLP_NO_64BIT();
9937 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9938}
9939
9940
9941/**
9942 * @opcode 0xd6
9943 */
9944FNIEMOP_DEF(iemOp_salc)
9945{
9946 IEMOP_MNEMONIC(salc, "salc");
9947 IEMOP_HLP_NO_64BIT();
9948
9949 IEM_MC_BEGIN(0, 0, 0, 0);
9950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9952 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9953 } IEM_MC_ELSE() {
9954 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9955 } IEM_MC_ENDIF();
9956 IEM_MC_ADVANCE_RIP_AND_FINISH();
9957 IEM_MC_END();
9958}
9959
9960
9961/**
9962 * @opcode 0xd7
9963 */
9964FNIEMOP_DEF(iemOp_xlat)
9965{
9966 IEMOP_MNEMONIC(xlat, "xlat");
9967 switch (pVCpu->iem.s.enmEffAddrMode)
9968 {
9969 case IEMMODE_16BIT:
9970 IEM_MC_BEGIN(2, 0, 0, 0);
9971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9972 IEM_MC_LOCAL(uint8_t, u8Tmp);
9973 IEM_MC_LOCAL(uint16_t, u16Addr);
9974 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9975 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9976 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9977 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9978 IEM_MC_ADVANCE_RIP_AND_FINISH();
9979 IEM_MC_END();
9980 break;
9981
9982 case IEMMODE_32BIT:
9983 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9985 IEM_MC_LOCAL(uint8_t, u8Tmp);
9986 IEM_MC_LOCAL(uint32_t, u32Addr);
9987 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9988 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9989 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9990 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9991 IEM_MC_ADVANCE_RIP_AND_FINISH();
9992 IEM_MC_END();
9993 break;
9994
9995 case IEMMODE_64BIT:
9996 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9998 IEM_MC_LOCAL(uint8_t, u8Tmp);
9999 IEM_MC_LOCAL(uint64_t, u64Addr);
10000 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
10001 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
10002 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
10003 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10004 IEM_MC_ADVANCE_RIP_AND_FINISH();
10005 IEM_MC_END();
10006 break;
10007
10008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10009 }
10010}
10011
10012
10013/**
10014 * Common worker for FPU instructions working on ST0 and STn, and storing the
10015 * result in ST0.
10016 *
10017 * @param bRm Mod R/M byte.
10018 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10019 */
10020FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10021{
10022 IEM_MC_BEGIN(3, 1, 0, 0);
10023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10024 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10025 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10026 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10027 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10028
10029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10031 IEM_MC_PREPARE_FPU_USAGE();
10032 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10033 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10034 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10035 } IEM_MC_ELSE() {
10036 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10037 } IEM_MC_ENDIF();
10038 IEM_MC_ADVANCE_RIP_AND_FINISH();
10039
10040 IEM_MC_END();
10041}
10042
10043
10044/**
10045 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10046 * flags.
10047 *
10048 * @param bRm Mod R/M byte.
10049 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10050 */
10051FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10052{
10053 IEM_MC_BEGIN(3, 1, 0, 0);
10054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10055 IEM_MC_LOCAL(uint16_t, u16Fsw);
10056 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10057 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10058 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10059
10060 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10061 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10062 IEM_MC_PREPARE_FPU_USAGE();
10063 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10064 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10065 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10066 } IEM_MC_ELSE() {
10067 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10068 } IEM_MC_ENDIF();
10069 IEM_MC_ADVANCE_RIP_AND_FINISH();
10070
10071 IEM_MC_END();
10072}
10073
10074
10075/**
10076 * Common worker for FPU instructions working on ST0 and STn, only affecting
10077 * flags, and popping when done.
10078 *
10079 * @param bRm Mod R/M byte.
10080 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10081 */
10082FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10083{
10084 IEM_MC_BEGIN(3, 1, 0, 0);
10085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10086 IEM_MC_LOCAL(uint16_t, u16Fsw);
10087 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10088 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10089 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10090
10091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10092 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10093 IEM_MC_PREPARE_FPU_USAGE();
10094 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10095 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10096 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10097 } IEM_MC_ELSE() {
10098 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10099 } IEM_MC_ENDIF();
10100 IEM_MC_ADVANCE_RIP_AND_FINISH();
10101
10102 IEM_MC_END();
10103}
10104
10105
10106/** Opcode 0xd8 11/0. */
10107FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10108{
10109 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10110 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10111}
10112
10113
10114/** Opcode 0xd8 11/1. */
10115FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10116{
10117 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10118 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10119}
10120
10121
10122/** Opcode 0xd8 11/2. */
10123FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10124{
10125 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10126 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10127}
10128
10129
10130/** Opcode 0xd8 11/3. */
10131FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10132{
10133 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10134 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10135}
10136
10137
10138/** Opcode 0xd8 11/4. */
10139FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10140{
10141 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10142 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10143}
10144
10145
10146/** Opcode 0xd8 11/5. */
10147FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10148{
10149 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10150 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10151}
10152
10153
10154/** Opcode 0xd8 11/6. */
10155FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10156{
10157 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10158 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10159}
10160
10161
10162/** Opcode 0xd8 11/7. */
10163FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10164{
10165 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10166 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10167}
10168
10169
10170/**
10171 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10172 * the result in ST0.
10173 *
10174 * @param bRm Mod R/M byte.
10175 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10176 */
10177FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10178{
10179 IEM_MC_BEGIN(3, 3, 0, 0);
10180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10181 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10182 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10183 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10184 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10185 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10186
10187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10189
10190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10191 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10192 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10193
10194 IEM_MC_PREPARE_FPU_USAGE();
10195 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10196 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10197 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10198 } IEM_MC_ELSE() {
10199 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10200 } IEM_MC_ENDIF();
10201 IEM_MC_ADVANCE_RIP_AND_FINISH();
10202
10203 IEM_MC_END();
10204}
10205
10206
10207/** Opcode 0xd8 !11/0. */
10208FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10209{
10210 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10211 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10212}
10213
10214
10215/** Opcode 0xd8 !11/1. */
10216FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10217{
10218 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10219 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10220}
10221
10222
10223/** Opcode 0xd8 !11/2. */
10224FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10225{
10226 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10227
10228 IEM_MC_BEGIN(3, 3, 0, 0);
10229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10230 IEM_MC_LOCAL(uint16_t, u16Fsw);
10231 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10232 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10233 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10234 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10235
10236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10238
10239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10240 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10241 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10242
10243 IEM_MC_PREPARE_FPU_USAGE();
10244 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10245 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10246 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10247 } IEM_MC_ELSE() {
10248 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10249 } IEM_MC_ENDIF();
10250 IEM_MC_ADVANCE_RIP_AND_FINISH();
10251
10252 IEM_MC_END();
10253}
10254
10255
10256/** Opcode 0xd8 !11/3. */
10257FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10258{
10259 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10260
10261 IEM_MC_BEGIN(3, 3, 0, 0);
10262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10263 IEM_MC_LOCAL(uint16_t, u16Fsw);
10264 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10265 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10266 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10267 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10268
10269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10271
10272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10273 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10274 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10275
10276 IEM_MC_PREPARE_FPU_USAGE();
10277 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10278 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10279 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10280 } IEM_MC_ELSE() {
10281 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10282 } IEM_MC_ENDIF();
10283 IEM_MC_ADVANCE_RIP_AND_FINISH();
10284
10285 IEM_MC_END();
10286}
10287
10288
10289/** Opcode 0xd8 !11/4. */
10290FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10291{
10292 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10293 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10294}
10295
10296
10297/** Opcode 0xd8 !11/5. */
10298FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10299{
10300 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10301 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10302}
10303
10304
10305/** Opcode 0xd8 !11/6. */
10306FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10307{
10308 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10309 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10310}
10311
10312
10313/** Opcode 0xd8 !11/7. */
10314FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10315{
10316 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10317 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10318}
10319
10320
10321/**
10322 * @opcode 0xd8
10323 */
10324FNIEMOP_DEF(iemOp_EscF0)
10325{
10326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10327 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10328
10329 if (IEM_IS_MODRM_REG_MODE(bRm))
10330 {
10331 switch (IEM_GET_MODRM_REG_8(bRm))
10332 {
10333 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10334 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10335 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10336 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10337 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10338 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10339 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10340 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10342 }
10343 }
10344 else
10345 {
10346 switch (IEM_GET_MODRM_REG_8(bRm))
10347 {
10348 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10349 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10350 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10351 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10352 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10353 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10354 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10355 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10357 }
10358 }
10359}
10360
10361
10362/** Opcode 0xd9 /0 mem32real
10363 * @sa iemOp_fld_m64r */
10364FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10365{
10366 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10367
10368 IEM_MC_BEGIN(2, 3, 0, 0);
10369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10370 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10371 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10372 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10373 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10374
10375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10377
10378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10379 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10380 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10381 IEM_MC_PREPARE_FPU_USAGE();
10382 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10383 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10384 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10385 } IEM_MC_ELSE() {
10386 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10387 } IEM_MC_ENDIF();
10388 IEM_MC_ADVANCE_RIP_AND_FINISH();
10389
10390 IEM_MC_END();
10391}
10392
10393
10394/** Opcode 0xd9 !11/2 mem32real */
10395FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10396{
10397 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10398 IEM_MC_BEGIN(3, 3, 0, 0);
10399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10401
10402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10404 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10405 IEM_MC_PREPARE_FPU_USAGE();
10406
10407 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10408 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10409 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10410
10411 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10412 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10413 IEM_MC_LOCAL(uint16_t, u16Fsw);
10414 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10415 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10416 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10417 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10418 } IEM_MC_ELSE() {
10419 IEM_MC_IF_FCW_IM() {
10420 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10421 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10422 } IEM_MC_ELSE() {
10423 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10424 } IEM_MC_ENDIF();
10425 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10426 } IEM_MC_ENDIF();
10427 IEM_MC_ADVANCE_RIP_AND_FINISH();
10428
10429 IEM_MC_END();
10430}
10431
10432
10433/** Opcode 0xd9 !11/3 */
10434FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10435{
10436 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10437 IEM_MC_BEGIN(3, 3, 0, 0);
10438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10440
10441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10442 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10443 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10444 IEM_MC_PREPARE_FPU_USAGE();
10445
10446 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10447 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10448 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10449
10450 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10451 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10452 IEM_MC_LOCAL(uint16_t, u16Fsw);
10453 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10454 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10455 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10456 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10457 } IEM_MC_ELSE() {
10458 IEM_MC_IF_FCW_IM() {
10459 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10460 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10461 } IEM_MC_ELSE() {
10462 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10463 } IEM_MC_ENDIF();
10464 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10465 } IEM_MC_ENDIF();
10466 IEM_MC_ADVANCE_RIP_AND_FINISH();
10467
10468 IEM_MC_END();
10469}
10470
10471
10472/** Opcode 0xd9 !11/4 */
10473FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10474{
10475 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10476 IEM_MC_BEGIN(3, 0, 0, 0);
10477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10479
10480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10481 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10482 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10483
10484 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10485 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10486 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10487 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10488 IEM_MC_END();
10489}
10490
10491
10492/** Opcode 0xd9 !11/5 */
10493FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10494{
10495 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10496 IEM_MC_BEGIN(1, 1, 0, 0);
10497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10499
10500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10502 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10503
10504 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10505 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10506
10507 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10508 iemCImpl_fldcw, u16Fsw);
10509 IEM_MC_END();
10510}
10511
10512
10513/** Opcode 0xd9 !11/6 */
10514FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10515{
10516 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10517 IEM_MC_BEGIN(3, 0, 0, 0);
10518 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10520
10521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10523 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10524
10525 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10526 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10527 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10528 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10529 IEM_MC_END();
10530}
10531
10532
10533/** Opcode 0xd9 !11/7 */
10534FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10535{
10536 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10537 IEM_MC_BEGIN(2, 0, 0, 0);
10538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10539 IEM_MC_LOCAL(uint16_t, u16Fcw);
10540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10542 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10543 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10544 IEM_MC_FETCH_FCW(u16Fcw);
10545 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10546 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10547 IEM_MC_END();
10548}
10549
10550
10551/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10552FNIEMOP_DEF(iemOp_fnop)
10553{
10554 IEMOP_MNEMONIC(fnop, "fnop");
10555 IEM_MC_BEGIN(0, 0, 0, 0);
10556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10557 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10558 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10559 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10560 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10561 * intel optimizations. Investigate. */
10562 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10563 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10564 IEM_MC_END();
10565}
10566
10567
10568/** Opcode 0xd9 11/0 stN */
10569FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10570{
10571 IEMOP_MNEMONIC(fld_stN, "fld stN");
10572 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10573 * indicates that it does. */
10574 IEM_MC_BEGIN(0, 2, 0, 0);
10575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10576 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10577 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10579 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10580
10581 IEM_MC_PREPARE_FPU_USAGE();
10582 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10583 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10584 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10585 } IEM_MC_ELSE() {
10586 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10587 } IEM_MC_ENDIF();
10588
10589 IEM_MC_ADVANCE_RIP_AND_FINISH();
10590 IEM_MC_END();
10591}
10592
10593
10594/** Opcode 0xd9 11/3 stN */
10595FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10596{
10597 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10598 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10599 * indicates that it does. */
10600 IEM_MC_BEGIN(2, 3, 0, 0);
10601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10602 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10603 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10604 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10605 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10606 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10608 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10609
10610 IEM_MC_PREPARE_FPU_USAGE();
10611 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10612 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10613 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10614 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10615 } IEM_MC_ELSE() {
10616 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10617 } IEM_MC_ENDIF();
10618
10619 IEM_MC_ADVANCE_RIP_AND_FINISH();
10620 IEM_MC_END();
10621}
10622
10623
10624/** Opcode 0xd9 11/4, 0xdd 11/2. */
10625FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10626{
10627 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10628
10629 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10630 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10631 if (!iDstReg)
10632 {
10633 IEM_MC_BEGIN(0, 1, 0, 0);
10634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10635 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10638
10639 IEM_MC_PREPARE_FPU_USAGE();
10640 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10641 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10642 } IEM_MC_ELSE() {
10643 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10644 } IEM_MC_ENDIF();
10645
10646 IEM_MC_ADVANCE_RIP_AND_FINISH();
10647 IEM_MC_END();
10648 }
10649 else
10650 {
10651 IEM_MC_BEGIN(0, 2, 0, 0);
10652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10653 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10654 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10655 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10656 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10657
10658 IEM_MC_PREPARE_FPU_USAGE();
10659 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10660 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10661 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10662 } IEM_MC_ELSE() {
10663 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10664 } IEM_MC_ENDIF();
10665
10666 IEM_MC_ADVANCE_RIP_AND_FINISH();
10667 IEM_MC_END();
10668 }
10669}
10670
10671
10672/**
10673 * Common worker for FPU instructions working on ST0 and replaces it with the
10674 * result, i.e. unary operators.
10675 *
10676 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10677 */
10678FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10679{
10680 IEM_MC_BEGIN(2, 1, 0, 0);
10681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10682 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10683 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10684 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10685
10686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10687 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10688 IEM_MC_PREPARE_FPU_USAGE();
10689 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10690 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10691 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10692 } IEM_MC_ELSE() {
10693 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10694 } IEM_MC_ENDIF();
10695 IEM_MC_ADVANCE_RIP_AND_FINISH();
10696
10697 IEM_MC_END();
10698}
10699
10700
10701/** Opcode 0xd9 0xe0. */
10702FNIEMOP_DEF(iemOp_fchs)
10703{
10704 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10705 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10706}
10707
10708
10709/** Opcode 0xd9 0xe1. */
10710FNIEMOP_DEF(iemOp_fabs)
10711{
10712 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10713 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10714}
10715
10716
10717/** Opcode 0xd9 0xe4. */
10718FNIEMOP_DEF(iemOp_ftst)
10719{
10720 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10721 IEM_MC_BEGIN(2, 1, 0, 0);
10722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10723 IEM_MC_LOCAL(uint16_t, u16Fsw);
10724 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10725 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10726
10727 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10728 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10729 IEM_MC_PREPARE_FPU_USAGE();
10730 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10731 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10732 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10733 } IEM_MC_ELSE() {
10734 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10735 } IEM_MC_ENDIF();
10736 IEM_MC_ADVANCE_RIP_AND_FINISH();
10737
10738 IEM_MC_END();
10739}
10740
10741
10742/** Opcode 0xd9 0xe5. */
10743FNIEMOP_DEF(iemOp_fxam)
10744{
10745 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10746 IEM_MC_BEGIN(2, 1, 0, 0);
10747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10748 IEM_MC_LOCAL(uint16_t, u16Fsw);
10749 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10750 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10751
10752 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10753 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10754 IEM_MC_PREPARE_FPU_USAGE();
10755 IEM_MC_REF_FPUREG(pr80Value, 0);
10756 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10757 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10758 IEM_MC_ADVANCE_RIP_AND_FINISH();
10759
10760 IEM_MC_END();
10761}
10762
10763
10764/**
10765 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10766 *
10767 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10768 */
10769FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10770{
10771 IEM_MC_BEGIN(1, 1, 0, 0);
10772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10773 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10774 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10775
10776 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10777 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10778 IEM_MC_PREPARE_FPU_USAGE();
10779 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10780 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10781 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10782 } IEM_MC_ELSE() {
10783 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10784 } IEM_MC_ENDIF();
10785 IEM_MC_ADVANCE_RIP_AND_FINISH();
10786
10787 IEM_MC_END();
10788}
10789
10790
10791/** Opcode 0xd9 0xe8. */
10792FNIEMOP_DEF(iemOp_fld1)
10793{
10794 IEMOP_MNEMONIC(fld1, "fld1");
10795 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10796}
10797
10798
10799/** Opcode 0xd9 0xe9. */
10800FNIEMOP_DEF(iemOp_fldl2t)
10801{
10802 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10803 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10804}
10805
10806
10807/** Opcode 0xd9 0xea. */
10808FNIEMOP_DEF(iemOp_fldl2e)
10809{
10810 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10811 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10812}
10813
10814/** Opcode 0xd9 0xeb. */
10815FNIEMOP_DEF(iemOp_fldpi)
10816{
10817 IEMOP_MNEMONIC(fldpi, "fldpi");
10818 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10819}
10820
10821
10822/** Opcode 0xd9 0xec. */
10823FNIEMOP_DEF(iemOp_fldlg2)
10824{
10825 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10826 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10827}
10828
10829/** Opcode 0xd9 0xed. */
10830FNIEMOP_DEF(iemOp_fldln2)
10831{
10832 IEMOP_MNEMONIC(fldln2, "fldln2");
10833 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10834}
10835
10836
10837/** Opcode 0xd9 0xee. */
10838FNIEMOP_DEF(iemOp_fldz)
10839{
10840 IEMOP_MNEMONIC(fldz, "fldz");
10841 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10842}
10843
10844
10845/** Opcode 0xd9 0xf0.
10846 *
10847 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10848 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10849 * to produce proper results for +Inf and -Inf.
10850 *
10851 * This is probably usful in the implementation pow() and similar.
10852 */
10853FNIEMOP_DEF(iemOp_f2xm1)
10854{
10855 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10856 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10857}
10858
10859
10860/**
10861 * Common worker for FPU instructions working on STn and ST0, storing the result
10862 * in STn, and popping the stack unless IE, DE or ZE was raised.
10863 *
10864 * @param bRm Mod R/M byte.
10865 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10866 */
10867FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10868{
10869 IEM_MC_BEGIN(3, 1, 0, 0);
10870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10871 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10872 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10873 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10874 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10875
10876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10878
10879 IEM_MC_PREPARE_FPU_USAGE();
10880 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10881 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10882 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10883 } IEM_MC_ELSE() {
10884 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10885 } IEM_MC_ENDIF();
10886 IEM_MC_ADVANCE_RIP_AND_FINISH();
10887
10888 IEM_MC_END();
10889}
10890
10891
10892/** Opcode 0xd9 0xf1. */
10893FNIEMOP_DEF(iemOp_fyl2x)
10894{
10895 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10896 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10897}
10898
10899
10900/**
10901 * Common worker for FPU instructions working on ST0 and having two outputs, one
10902 * replacing ST0 and one pushed onto the stack.
10903 *
10904 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10905 */
10906FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10907{
10908 IEM_MC_BEGIN(2, 1, 0, 0);
10909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10910 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10911 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10912 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10913
10914 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10915 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10916 IEM_MC_PREPARE_FPU_USAGE();
10917 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10918 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10919 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10920 } IEM_MC_ELSE() {
10921 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10922 } IEM_MC_ENDIF();
10923 IEM_MC_ADVANCE_RIP_AND_FINISH();
10924
10925 IEM_MC_END();
10926}
10927
10928
10929/** Opcode 0xd9 0xf2. */
10930FNIEMOP_DEF(iemOp_fptan)
10931{
10932 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10933 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10934}
10935
10936
10937/** Opcode 0xd9 0xf3. */
10938FNIEMOP_DEF(iemOp_fpatan)
10939{
10940 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10941 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10942}
10943
10944
10945/** Opcode 0xd9 0xf4. */
10946FNIEMOP_DEF(iemOp_fxtract)
10947{
10948 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10949 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10950}
10951
10952
10953/** Opcode 0xd9 0xf5. */
10954FNIEMOP_DEF(iemOp_fprem1)
10955{
10956 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10957 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10958}
10959
10960
10961/** Opcode 0xd9 0xf6. */
10962FNIEMOP_DEF(iemOp_fdecstp)
10963{
10964 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10965 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10966 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10967 * FINCSTP and FDECSTP. */
10968 IEM_MC_BEGIN(0, 0, 0, 0);
10969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10970
10971 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10972 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10973
10974 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10975 IEM_MC_FPU_STACK_DEC_TOP();
10976 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10977
10978 IEM_MC_ADVANCE_RIP_AND_FINISH();
10979 IEM_MC_END();
10980}
10981
10982
10983/** Opcode 0xd9 0xf7. */
10984FNIEMOP_DEF(iemOp_fincstp)
10985{
10986 IEMOP_MNEMONIC(fincstp, "fincstp");
10987 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10988 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10989 * FINCSTP and FDECSTP. */
10990 IEM_MC_BEGIN(0, 0, 0, 0);
10991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10992
10993 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10994 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10995
10996 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10997 IEM_MC_FPU_STACK_INC_TOP();
10998 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10999
11000 IEM_MC_ADVANCE_RIP_AND_FINISH();
11001 IEM_MC_END();
11002}
11003
11004
11005/** Opcode 0xd9 0xf8. */
11006FNIEMOP_DEF(iemOp_fprem)
11007{
11008 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
11009 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
11010}
11011
11012
11013/** Opcode 0xd9 0xf9. */
11014FNIEMOP_DEF(iemOp_fyl2xp1)
11015{
11016 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
11017 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
11018}
11019
11020
11021/** Opcode 0xd9 0xfa. */
11022FNIEMOP_DEF(iemOp_fsqrt)
11023{
11024 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
11025 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
11026}
11027
11028
11029/** Opcode 0xd9 0xfb. */
11030FNIEMOP_DEF(iemOp_fsincos)
11031{
11032 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
11033 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
11034}
11035
11036
11037/** Opcode 0xd9 0xfc. */
11038FNIEMOP_DEF(iemOp_frndint)
11039{
11040 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11041 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11042}
11043
11044
11045/** Opcode 0xd9 0xfd. */
11046FNIEMOP_DEF(iemOp_fscale)
11047{
11048 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11049 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11050}
11051
11052
11053/** Opcode 0xd9 0xfe. */
11054FNIEMOP_DEF(iemOp_fsin)
11055{
11056 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11057 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11058}
11059
11060
11061/** Opcode 0xd9 0xff. */
11062FNIEMOP_DEF(iemOp_fcos)
11063{
11064 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11065 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11066}
11067
11068
11069/** Used by iemOp_EscF1. */
11070IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11071{
11072 /* 0xe0 */ iemOp_fchs,
11073 /* 0xe1 */ iemOp_fabs,
11074 /* 0xe2 */ iemOp_Invalid,
11075 /* 0xe3 */ iemOp_Invalid,
11076 /* 0xe4 */ iemOp_ftst,
11077 /* 0xe5 */ iemOp_fxam,
11078 /* 0xe6 */ iemOp_Invalid,
11079 /* 0xe7 */ iemOp_Invalid,
11080 /* 0xe8 */ iemOp_fld1,
11081 /* 0xe9 */ iemOp_fldl2t,
11082 /* 0xea */ iemOp_fldl2e,
11083 /* 0xeb */ iemOp_fldpi,
11084 /* 0xec */ iemOp_fldlg2,
11085 /* 0xed */ iemOp_fldln2,
11086 /* 0xee */ iemOp_fldz,
11087 /* 0xef */ iemOp_Invalid,
11088 /* 0xf0 */ iemOp_f2xm1,
11089 /* 0xf1 */ iemOp_fyl2x,
11090 /* 0xf2 */ iemOp_fptan,
11091 /* 0xf3 */ iemOp_fpatan,
11092 /* 0xf4 */ iemOp_fxtract,
11093 /* 0xf5 */ iemOp_fprem1,
11094 /* 0xf6 */ iemOp_fdecstp,
11095 /* 0xf7 */ iemOp_fincstp,
11096 /* 0xf8 */ iemOp_fprem,
11097 /* 0xf9 */ iemOp_fyl2xp1,
11098 /* 0xfa */ iemOp_fsqrt,
11099 /* 0xfb */ iemOp_fsincos,
11100 /* 0xfc */ iemOp_frndint,
11101 /* 0xfd */ iemOp_fscale,
11102 /* 0xfe */ iemOp_fsin,
11103 /* 0xff */ iemOp_fcos
11104};
11105
11106
11107/**
11108 * @opcode 0xd9
11109 */
11110FNIEMOP_DEF(iemOp_EscF1)
11111{
11112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11113 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11114
11115 if (IEM_IS_MODRM_REG_MODE(bRm))
11116 {
11117 switch (IEM_GET_MODRM_REG_8(bRm))
11118 {
11119 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11120 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11121 case 2:
11122 if (bRm == 0xd0)
11123 return FNIEMOP_CALL(iemOp_fnop);
11124 IEMOP_RAISE_INVALID_OPCODE_RET();
11125 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11126 case 4:
11127 case 5:
11128 case 6:
11129 case 7:
11130 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11131 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11133 }
11134 }
11135 else
11136 {
11137 switch (IEM_GET_MODRM_REG_8(bRm))
11138 {
11139 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11140 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11141 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11142 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11143 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11144 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11145 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11146 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11148 }
11149 }
11150}
11151
11152
11153/** Opcode 0xda 11/0. */
11154FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11155{
11156 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11157 IEM_MC_BEGIN(0, 1, 0, 0);
11158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11159 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11160
11161 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11162 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11163
11164 IEM_MC_PREPARE_FPU_USAGE();
11165 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11166 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11167 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11168 } IEM_MC_ENDIF();
11169 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11170 } IEM_MC_ELSE() {
11171 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11172 } IEM_MC_ENDIF();
11173 IEM_MC_ADVANCE_RIP_AND_FINISH();
11174
11175 IEM_MC_END();
11176}
11177
11178
11179/** Opcode 0xda 11/1. */
11180FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11181{
11182 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11183 IEM_MC_BEGIN(0, 1, 0, 0);
11184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11185 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11186
11187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11188 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11189
11190 IEM_MC_PREPARE_FPU_USAGE();
11191 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11193 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11194 } IEM_MC_ENDIF();
11195 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11196 } IEM_MC_ELSE() {
11197 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11198 } IEM_MC_ENDIF();
11199 IEM_MC_ADVANCE_RIP_AND_FINISH();
11200
11201 IEM_MC_END();
11202}
11203
11204
11205/** Opcode 0xda 11/2. */
11206FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11207{
11208 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11209 IEM_MC_BEGIN(0, 1, 0, 0);
11210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11211 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11212
11213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11214 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11215
11216 IEM_MC_PREPARE_FPU_USAGE();
11217 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11218 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11219 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11220 } IEM_MC_ENDIF();
11221 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11222 } IEM_MC_ELSE() {
11223 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11224 } IEM_MC_ENDIF();
11225 IEM_MC_ADVANCE_RIP_AND_FINISH();
11226
11227 IEM_MC_END();
11228}
11229
11230
11231/** Opcode 0xda 11/3. */
11232FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11233{
11234 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11235 IEM_MC_BEGIN(0, 1, 0, 0);
11236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11237 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11238
11239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11240 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11241
11242 IEM_MC_PREPARE_FPU_USAGE();
11243 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11244 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11245 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11246 } IEM_MC_ENDIF();
11247 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11248 } IEM_MC_ELSE() {
11249 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11250 } IEM_MC_ENDIF();
11251 IEM_MC_ADVANCE_RIP_AND_FINISH();
11252
11253 IEM_MC_END();
11254}
11255
11256
11257/**
11258 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11259 * flags, and popping twice when done.
11260 *
11261 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11262 */
11263FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11264{
11265 IEM_MC_BEGIN(3, 1, 0, 0);
11266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11267 IEM_MC_LOCAL(uint16_t, u16Fsw);
11268 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11269 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11270 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11271
11272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11273 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11274
11275 IEM_MC_PREPARE_FPU_USAGE();
11276 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11277 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11278 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11279 } IEM_MC_ELSE() {
11280 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11281 } IEM_MC_ENDIF();
11282 IEM_MC_ADVANCE_RIP_AND_FINISH();
11283
11284 IEM_MC_END();
11285}
11286
11287
11288/** Opcode 0xda 0xe9. */
11289FNIEMOP_DEF(iemOp_fucompp)
11290{
11291 IEMOP_MNEMONIC(fucompp, "fucompp");
11292 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11293}
11294
11295
11296/**
11297 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11298 * the result in ST0.
11299 *
11300 * @param bRm Mod R/M byte.
11301 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11302 */
11303FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11304{
11305 IEM_MC_BEGIN(3, 3, 0, 0);
11306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11307 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11308 IEM_MC_LOCAL(int32_t, i32Val2);
11309 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11310 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11311 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11312
11313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11315
11316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11318 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11319
11320 IEM_MC_PREPARE_FPU_USAGE();
11321 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11322 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11323 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11324 } IEM_MC_ELSE() {
11325 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11326 } IEM_MC_ENDIF();
11327 IEM_MC_ADVANCE_RIP_AND_FINISH();
11328
11329 IEM_MC_END();
11330}
11331
11332
11333/** Opcode 0xda !11/0. */
11334FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11335{
11336 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11337 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11338}
11339
11340
11341/** Opcode 0xda !11/1. */
11342FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11343{
11344 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11345 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11346}
11347
11348
11349/** Opcode 0xda !11/2. */
11350FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11351{
11352 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11353
11354 IEM_MC_BEGIN(3, 3, 0, 0);
11355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11356 IEM_MC_LOCAL(uint16_t, u16Fsw);
11357 IEM_MC_LOCAL(int32_t, i32Val2);
11358 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11359 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11360 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11361
11362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11364
11365 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11366 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11367 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11368
11369 IEM_MC_PREPARE_FPU_USAGE();
11370 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11371 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11372 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11373 } IEM_MC_ELSE() {
11374 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11375 } IEM_MC_ENDIF();
11376 IEM_MC_ADVANCE_RIP_AND_FINISH();
11377
11378 IEM_MC_END();
11379}
11380
11381
11382/** Opcode 0xda !11/3. */
11383FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11384{
11385 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11386
11387 IEM_MC_BEGIN(3, 3, 0, 0);
11388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11389 IEM_MC_LOCAL(uint16_t, u16Fsw);
11390 IEM_MC_LOCAL(int32_t, i32Val2);
11391 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11392 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11393 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11394
11395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11397
11398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11399 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11400 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11401
11402 IEM_MC_PREPARE_FPU_USAGE();
11403 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11404 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11405 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11406 } IEM_MC_ELSE() {
11407 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11408 } IEM_MC_ENDIF();
11409 IEM_MC_ADVANCE_RIP_AND_FINISH();
11410
11411 IEM_MC_END();
11412}
11413
11414
11415/** Opcode 0xda !11/4. */
11416FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11417{
11418 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11419 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11420}
11421
11422
11423/** Opcode 0xda !11/5. */
11424FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11425{
11426 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11427 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11428}
11429
11430
11431/** Opcode 0xda !11/6. */
11432FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11433{
11434 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11435 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11436}
11437
11438
11439/** Opcode 0xda !11/7. */
11440FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11441{
11442 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11443 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11444}
11445
11446
11447/**
11448 * @opcode 0xda
11449 */
11450FNIEMOP_DEF(iemOp_EscF2)
11451{
11452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11453 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11454 if (IEM_IS_MODRM_REG_MODE(bRm))
11455 {
11456 switch (IEM_GET_MODRM_REG_8(bRm))
11457 {
11458 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11459 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11460 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11461 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11462 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11463 case 5:
11464 if (bRm == 0xe9)
11465 return FNIEMOP_CALL(iemOp_fucompp);
11466 IEMOP_RAISE_INVALID_OPCODE_RET();
11467 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11468 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11470 }
11471 }
11472 else
11473 {
11474 switch (IEM_GET_MODRM_REG_8(bRm))
11475 {
11476 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11477 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11478 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11479 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11480 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11481 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11482 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11483 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11485 }
11486 }
11487}
11488
11489
11490/** Opcode 0xdb !11/0. */
11491FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11492{
11493 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11494
11495 IEM_MC_BEGIN(2, 3, 0, 0);
11496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11497 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11498 IEM_MC_LOCAL(int32_t, i32Val);
11499 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11500 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11501
11502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11504
11505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11506 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11507 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11508
11509 IEM_MC_PREPARE_FPU_USAGE();
11510 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11511 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11512 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11513 } IEM_MC_ELSE() {
11514 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11515 } IEM_MC_ENDIF();
11516 IEM_MC_ADVANCE_RIP_AND_FINISH();
11517
11518 IEM_MC_END();
11519}
11520
11521
11522/** Opcode 0xdb !11/1. */
11523FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11524{
11525 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11526 IEM_MC_BEGIN(3, 3, 0, 0);
11527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11529
11530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11531 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11532 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11533 IEM_MC_PREPARE_FPU_USAGE();
11534
11535 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11536 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11537 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11538
11539 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11540 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11541 IEM_MC_LOCAL(uint16_t, u16Fsw);
11542 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11543 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11544 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11545 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11546 } IEM_MC_ELSE() {
11547 IEM_MC_IF_FCW_IM() {
11548 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11549 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11550 } IEM_MC_ELSE() {
11551 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11552 } IEM_MC_ENDIF();
11553 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11554 } IEM_MC_ENDIF();
11555 IEM_MC_ADVANCE_RIP_AND_FINISH();
11556
11557 IEM_MC_END();
11558}
11559
11560
11561/** Opcode 0xdb !11/2. */
11562FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11563{
11564 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11565 IEM_MC_BEGIN(3, 3, 0, 0);
11566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11568
11569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11570 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11571 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11572 IEM_MC_PREPARE_FPU_USAGE();
11573
11574 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11575 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11576 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11577
11578 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11579 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11580 IEM_MC_LOCAL(uint16_t, u16Fsw);
11581 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11582 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11583 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11584 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11585 } IEM_MC_ELSE() {
11586 IEM_MC_IF_FCW_IM() {
11587 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11588 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11589 } IEM_MC_ELSE() {
11590 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11591 } IEM_MC_ENDIF();
11592 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11593 } IEM_MC_ENDIF();
11594 IEM_MC_ADVANCE_RIP_AND_FINISH();
11595
11596 IEM_MC_END();
11597}
11598
11599
11600/** Opcode 0xdb !11/3. */
11601FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11602{
11603 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11604 IEM_MC_BEGIN(3, 2, 0, 0);
11605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11607
11608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11609 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11610 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11611 IEM_MC_PREPARE_FPU_USAGE();
11612
11613 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11614 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11615 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11616
11617 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11618 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11619 IEM_MC_LOCAL(uint16_t, u16Fsw);
11620 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11621 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11622 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11623 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11624 } IEM_MC_ELSE() {
11625 IEM_MC_IF_FCW_IM() {
11626 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11627 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11628 } IEM_MC_ELSE() {
11629 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11630 } IEM_MC_ENDIF();
11631 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11632 } IEM_MC_ENDIF();
11633 IEM_MC_ADVANCE_RIP_AND_FINISH();
11634
11635 IEM_MC_END();
11636}
11637
11638
11639/** Opcode 0xdb !11/5. */
11640FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11641{
11642 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11643
11644 IEM_MC_BEGIN(2, 3, 0, 0);
11645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11646 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11647 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11648 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11649 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11650
11651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11653
11654 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11655 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11656 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11657
11658 IEM_MC_PREPARE_FPU_USAGE();
11659 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11660 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11661 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11662 } IEM_MC_ELSE() {
11663 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11664 } IEM_MC_ENDIF();
11665 IEM_MC_ADVANCE_RIP_AND_FINISH();
11666
11667 IEM_MC_END();
11668}
11669
11670
11671/** Opcode 0xdb !11/7. */
11672FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11673{
11674 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11675 IEM_MC_BEGIN(3, 3, 0, 0);
11676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11678
11679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11680 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11681 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11682 IEM_MC_PREPARE_FPU_USAGE();
11683
11684 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11685 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11686 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11687
11688 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11689 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11690 IEM_MC_LOCAL(uint16_t, u16Fsw);
11691 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11692 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11693 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11694 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11695 } IEM_MC_ELSE() {
11696 IEM_MC_IF_FCW_IM() {
11697 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11698 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11699 } IEM_MC_ELSE() {
11700 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11701 } IEM_MC_ENDIF();
11702 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11703 } IEM_MC_ENDIF();
11704 IEM_MC_ADVANCE_RIP_AND_FINISH();
11705
11706 IEM_MC_END();
11707}
11708
11709
11710/** Opcode 0xdb 11/0. */
11711FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11712{
11713 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11714 IEM_MC_BEGIN(0, 1, 0, 0);
11715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11716 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11717
11718 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11719 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11720
11721 IEM_MC_PREPARE_FPU_USAGE();
11722 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11723 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11724 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11725 } IEM_MC_ENDIF();
11726 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11727 } IEM_MC_ELSE() {
11728 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11729 } IEM_MC_ENDIF();
11730 IEM_MC_ADVANCE_RIP_AND_FINISH();
11731
11732 IEM_MC_END();
11733}
11734
11735
11736/** Opcode 0xdb 11/1. */
11737FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11738{
11739 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11740 IEM_MC_BEGIN(0, 1, 0, 0);
11741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11742 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11743
11744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11746
11747 IEM_MC_PREPARE_FPU_USAGE();
11748 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11749 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11750 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11751 } IEM_MC_ENDIF();
11752 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11753 } IEM_MC_ELSE() {
11754 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11755 } IEM_MC_ENDIF();
11756 IEM_MC_ADVANCE_RIP_AND_FINISH();
11757
11758 IEM_MC_END();
11759}
11760
11761
11762/** Opcode 0xdb 11/2. */
11763FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11764{
11765 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11766 IEM_MC_BEGIN(0, 1, 0, 0);
11767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11768 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11769
11770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11772
11773 IEM_MC_PREPARE_FPU_USAGE();
11774 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11775 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11776 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11777 } IEM_MC_ENDIF();
11778 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11779 } IEM_MC_ELSE() {
11780 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11781 } IEM_MC_ENDIF();
11782 IEM_MC_ADVANCE_RIP_AND_FINISH();
11783
11784 IEM_MC_END();
11785}
11786
11787
11788/** Opcode 0xdb 11/3. */
11789FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11790{
11791 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11792 IEM_MC_BEGIN(0, 1, 0, 0);
11793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11794 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11795
11796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11798
11799 IEM_MC_PREPARE_FPU_USAGE();
11800 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11801 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11802 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11803 } IEM_MC_ENDIF();
11804 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11805 } IEM_MC_ELSE() {
11806 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11807 } IEM_MC_ENDIF();
11808 IEM_MC_ADVANCE_RIP_AND_FINISH();
11809
11810 IEM_MC_END();
11811}
11812
11813
11814/** Opcode 0xdb 0xe0. */
11815FNIEMOP_DEF(iemOp_fneni)
11816{
11817 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11818 IEM_MC_BEGIN(0, 0, 0, 0);
11819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11820 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11821 IEM_MC_ADVANCE_RIP_AND_FINISH();
11822 IEM_MC_END();
11823}
11824
11825
11826/** Opcode 0xdb 0xe1. */
11827FNIEMOP_DEF(iemOp_fndisi)
11828{
11829 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11830 IEM_MC_BEGIN(0, 0, 0, 0);
11831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11832 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11833 IEM_MC_ADVANCE_RIP_AND_FINISH();
11834 IEM_MC_END();
11835}
11836
11837
11838/** Opcode 0xdb 0xe2. */
11839FNIEMOP_DEF(iemOp_fnclex)
11840{
11841 IEMOP_MNEMONIC(fnclex, "fnclex");
11842 IEM_MC_BEGIN(0, 0, 0, 0);
11843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11844 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11845 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11846 IEM_MC_CLEAR_FSW_EX();
11847 IEM_MC_ADVANCE_RIP_AND_FINISH();
11848 IEM_MC_END();
11849}
11850
11851
11852/** Opcode 0xdb 0xe3. */
11853FNIEMOP_DEF(iemOp_fninit)
11854{
11855 IEMOP_MNEMONIC(fninit, "fninit");
11856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11857 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
11858 iemCImpl_finit, false /*fCheckXcpts*/);
11859}
11860
11861
11862/** Opcode 0xdb 0xe4. */
11863FNIEMOP_DEF(iemOp_fnsetpm)
11864{
11865 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11866 IEM_MC_BEGIN(0, 0, 0, 0);
11867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11868 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11869 IEM_MC_ADVANCE_RIP_AND_FINISH();
11870 IEM_MC_END();
11871}
11872
11873
11874/** Opcode 0xdb 0xe5. */
11875FNIEMOP_DEF(iemOp_frstpm)
11876{
11877 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11878#if 0 /* #UDs on newer CPUs */
11879 IEM_MC_BEGIN(0, 0, 0, 0);
11880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11882 IEM_MC_ADVANCE_RIP_AND_FINISH();
11883 IEM_MC_END();
11884 return VINF_SUCCESS;
11885#else
11886 IEMOP_RAISE_INVALID_OPCODE_RET();
11887#endif
11888}
11889
11890
11891/** Opcode 0xdb 11/5. */
11892FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11893{
11894 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11895 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11896 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11897 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11898}
11899
11900
11901/** Opcode 0xdb 11/6. */
11902FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11903{
11904 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11905 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11906 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11907 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11908}
11909
11910
11911/**
11912 * @opcode 0xdb
11913 */
11914FNIEMOP_DEF(iemOp_EscF3)
11915{
11916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11917 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11918 if (IEM_IS_MODRM_REG_MODE(bRm))
11919 {
11920 switch (IEM_GET_MODRM_REG_8(bRm))
11921 {
11922 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11923 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11924 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11925 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11926 case 4:
11927 switch (bRm)
11928 {
11929 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11930 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11931 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11932 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11933 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11934 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11935 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11936 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11938 }
11939 break;
11940 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11941 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11942 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11944 }
11945 }
11946 else
11947 {
11948 switch (IEM_GET_MODRM_REG_8(bRm))
11949 {
11950 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11951 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11952 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11953 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11954 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11955 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11956 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11957 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11959 }
11960 }
11961}
11962
11963
11964/**
11965 * Common worker for FPU instructions working on STn and ST0, and storing the
11966 * result in STn unless IE, DE or ZE was raised.
11967 *
11968 * @param bRm Mod R/M byte.
11969 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11970 */
11971FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11972{
11973 IEM_MC_BEGIN(3, 1, 0, 0);
11974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11975 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11976 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11977 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11978 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11979
11980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11981 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11982
11983 IEM_MC_PREPARE_FPU_USAGE();
11984 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11985 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11986 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11987 } IEM_MC_ELSE() {
11988 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11989 } IEM_MC_ENDIF();
11990 IEM_MC_ADVANCE_RIP_AND_FINISH();
11991
11992 IEM_MC_END();
11993}
11994
11995
11996/** Opcode 0xdc 11/0. */
11997FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11998{
11999 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
12000 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
12001}
12002
12003
12004/** Opcode 0xdc 11/1. */
12005FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
12006{
12007 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
12008 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
12009}
12010
12011
12012/** Opcode 0xdc 11/4. */
12013FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
12014{
12015 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
12016 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
12017}
12018
12019
12020/** Opcode 0xdc 11/5. */
12021FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
12022{
12023 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
12024 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
12025}
12026
12027
12028/** Opcode 0xdc 11/6. */
12029FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
12030{
12031 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
12032 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
12033}
12034
12035
12036/** Opcode 0xdc 11/7. */
12037FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12038{
12039 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12040 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12041}
12042
12043
12044/**
12045 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12046 * memory operand, and storing the result in ST0.
12047 *
12048 * @param bRm Mod R/M byte.
12049 * @param pfnImpl Pointer to the instruction implementation (assembly).
12050 */
12051FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12052{
12053 IEM_MC_BEGIN(3, 3, 0, 0);
12054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12055 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12056 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12057 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12058 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12059 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12060
12061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12064 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12065
12066 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12067 IEM_MC_PREPARE_FPU_USAGE();
12068 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12069 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12070 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12071 } IEM_MC_ELSE() {
12072 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12073 } IEM_MC_ENDIF();
12074 IEM_MC_ADVANCE_RIP_AND_FINISH();
12075
12076 IEM_MC_END();
12077}
12078
12079
12080/** Opcode 0xdc !11/0. */
12081FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12082{
12083 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12084 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12085}
12086
12087
12088/** Opcode 0xdc !11/1. */
12089FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12090{
12091 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12092 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12093}
12094
12095
12096/** Opcode 0xdc !11/2. */
12097FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12098{
12099 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12100
12101 IEM_MC_BEGIN(3, 3, 0, 0);
12102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12103 IEM_MC_LOCAL(uint16_t, u16Fsw);
12104 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12105 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12106 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12107 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12108
12109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12111
12112 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12113 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12114 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12115
12116 IEM_MC_PREPARE_FPU_USAGE();
12117 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12118 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12119 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12120 } IEM_MC_ELSE() {
12121 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12122 } IEM_MC_ENDIF();
12123 IEM_MC_ADVANCE_RIP_AND_FINISH();
12124
12125 IEM_MC_END();
12126}
12127
12128
12129/** Opcode 0xdc !11/3. */
12130FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12131{
12132 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12133
12134 IEM_MC_BEGIN(3, 3, 0, 0);
12135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12136 IEM_MC_LOCAL(uint16_t, u16Fsw);
12137 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12138 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12139 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12140 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12141
12142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12144
12145 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12146 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12147 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12148
12149 IEM_MC_PREPARE_FPU_USAGE();
12150 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12151 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12152 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12153 } IEM_MC_ELSE() {
12154 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12155 } IEM_MC_ENDIF();
12156 IEM_MC_ADVANCE_RIP_AND_FINISH();
12157
12158 IEM_MC_END();
12159}
12160
12161
12162/** Opcode 0xdc !11/4. */
12163FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12164{
12165 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12166 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12167}
12168
12169
12170/** Opcode 0xdc !11/5. */
12171FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12172{
12173 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12174 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12175}
12176
12177
12178/** Opcode 0xdc !11/6. */
12179FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12180{
12181 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12182 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12183}
12184
12185
12186/** Opcode 0xdc !11/7. */
12187FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12188{
12189 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12190 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12191}
12192
12193
12194/**
12195 * @opcode 0xdc
12196 */
12197FNIEMOP_DEF(iemOp_EscF4)
12198{
12199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12200 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12201 if (IEM_IS_MODRM_REG_MODE(bRm))
12202 {
12203 switch (IEM_GET_MODRM_REG_8(bRm))
12204 {
12205 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12206 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12207 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12208 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12209 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12210 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12211 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12212 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12214 }
12215 }
12216 else
12217 {
12218 switch (IEM_GET_MODRM_REG_8(bRm))
12219 {
12220 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12221 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12222 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12223 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12224 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12225 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12226 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12227 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12229 }
12230 }
12231}
12232
12233
12234/** Opcode 0xdd !11/0.
12235 * @sa iemOp_fld_m32r */
12236FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12237{
12238 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12239
12240 IEM_MC_BEGIN(2, 3, 0, 0);
12241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12242 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12243 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12244 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12245 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12246
12247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12249 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12250 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12251
12252 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12253 IEM_MC_PREPARE_FPU_USAGE();
12254 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12255 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12256 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12257 } IEM_MC_ELSE() {
12258 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12259 } IEM_MC_ENDIF();
12260 IEM_MC_ADVANCE_RIP_AND_FINISH();
12261
12262 IEM_MC_END();
12263}
12264
12265
12266/** Opcode 0xdd !11/0. */
12267FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12268{
12269 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12270 IEM_MC_BEGIN(3, 3, 0, 0);
12271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12273
12274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12277 IEM_MC_PREPARE_FPU_USAGE();
12278
12279 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12280 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12281 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12282
12283 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12284 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12285 IEM_MC_LOCAL(uint16_t, u16Fsw);
12286 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12287 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12288 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12289 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12290 } IEM_MC_ELSE() {
12291 IEM_MC_IF_FCW_IM() {
12292 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12293 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12294 } IEM_MC_ELSE() {
12295 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12296 } IEM_MC_ENDIF();
12297 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12298 } IEM_MC_ENDIF();
12299 IEM_MC_ADVANCE_RIP_AND_FINISH();
12300
12301 IEM_MC_END();
12302}
12303
12304
12305/** Opcode 0xdd !11/0. */
12306FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12307{
12308 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12309 IEM_MC_BEGIN(3, 3, 0, 0);
12310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12312
12313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12316 IEM_MC_PREPARE_FPU_USAGE();
12317
12318 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12319 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12320 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12321
12322 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12323 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12324 IEM_MC_LOCAL(uint16_t, u16Fsw);
12325 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12326 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12327 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12328 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12329 } IEM_MC_ELSE() {
12330 IEM_MC_IF_FCW_IM() {
12331 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12332 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12333 } IEM_MC_ELSE() {
12334 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12335 } IEM_MC_ENDIF();
12336 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12337 } IEM_MC_ENDIF();
12338 IEM_MC_ADVANCE_RIP_AND_FINISH();
12339
12340 IEM_MC_END();
12341}
12342
12343
12344
12345
12346/** Opcode 0xdd !11/0. */
12347FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12348{
12349 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12350 IEM_MC_BEGIN(3, 3, 0, 0);
12351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12353
12354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12355 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12356 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12357 IEM_MC_PREPARE_FPU_USAGE();
12358
12359 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12360 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12361 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12362
12363 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12364 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12365 IEM_MC_LOCAL(uint16_t, u16Fsw);
12366 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12367 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12368 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12369 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12370 } IEM_MC_ELSE() {
12371 IEM_MC_IF_FCW_IM() {
12372 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12373 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12374 } IEM_MC_ELSE() {
12375 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12376 } IEM_MC_ENDIF();
12377 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12378 } IEM_MC_ENDIF();
12379 IEM_MC_ADVANCE_RIP_AND_FINISH();
12380
12381 IEM_MC_END();
12382}
12383
12384
12385/** Opcode 0xdd !11/0. */
12386FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12387{
12388 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12389 IEM_MC_BEGIN(3, 0, 0, 0);
12390 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12392
12393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12394 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12395 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12396
12397 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12398 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12399 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12400 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12401 IEM_MC_END();
12402}
12403
12404
12405/** Opcode 0xdd !11/0. */
12406FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12407{
12408 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12409 IEM_MC_BEGIN(3, 0, 0, 0);
12410 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12412
12413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12415 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12416
12417 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12418 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12419 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12420 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12421 IEM_MC_END();
12422}
12423
12424/** Opcode 0xdd !11/0. */
12425FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12426{
12427 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12428
12429 IEM_MC_BEGIN(0, 2, 0, 0);
12430 IEM_MC_LOCAL(uint16_t, u16Tmp);
12431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12432
12433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12435 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12436
12437 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12438 IEM_MC_FETCH_FSW(u16Tmp);
12439 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12440 IEM_MC_ADVANCE_RIP_AND_FINISH();
12441
12442/** @todo Debug / drop a hint to the verifier that things may differ
12443 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12444 * NT4SP1. (X86_FSW_PE) */
12445 IEM_MC_END();
12446}
12447
12448
12449/** Opcode 0xdd 11/0. */
12450FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12451{
12452 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12453 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12454 unmodified. */
12455 IEM_MC_BEGIN(0, 0, 0, 0);
12456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12457
12458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12460
12461 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12462 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12463 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12464
12465 IEM_MC_ADVANCE_RIP_AND_FINISH();
12466 IEM_MC_END();
12467}
12468
12469
12470/** Opcode 0xdd 11/1. */
12471FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12472{
12473 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12474 IEM_MC_BEGIN(0, 2, 0, 0);
12475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12476 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12477 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12480
12481 IEM_MC_PREPARE_FPU_USAGE();
12482 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12483 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12484 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12485 } IEM_MC_ELSE() {
12486 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12487 } IEM_MC_ENDIF();
12488
12489 IEM_MC_ADVANCE_RIP_AND_FINISH();
12490 IEM_MC_END();
12491}
12492
12493
12494/** Opcode 0xdd 11/3. */
12495FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12496{
12497 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12498 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12499}
12500
12501
12502/** Opcode 0xdd 11/4. */
12503FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12504{
12505 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12506 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12507}
12508
12509
12510/**
12511 * @opcode 0xdd
12512 */
12513FNIEMOP_DEF(iemOp_EscF5)
12514{
12515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12516 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12517 if (IEM_IS_MODRM_REG_MODE(bRm))
12518 {
12519 switch (IEM_GET_MODRM_REG_8(bRm))
12520 {
12521 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12522 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12523 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12524 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12525 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12526 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12527 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12528 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12530 }
12531 }
12532 else
12533 {
12534 switch (IEM_GET_MODRM_REG_8(bRm))
12535 {
12536 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12537 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12538 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12539 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12540 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12541 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12542 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12543 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12545 }
12546 }
12547}
12548
12549
12550/** Opcode 0xde 11/0. */
12551FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12552{
12553 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12554 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12555}
12556
12557
12558/** Opcode 0xde 11/0. */
12559FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12560{
12561 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12562 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12563}
12564
12565
12566/** Opcode 0xde 0xd9. */
12567FNIEMOP_DEF(iemOp_fcompp)
12568{
12569 IEMOP_MNEMONIC(fcompp, "fcompp");
12570 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12571}
12572
12573
12574/** Opcode 0xde 11/4. */
12575FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12576{
12577 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12578 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12579}
12580
12581
12582/** Opcode 0xde 11/5. */
12583FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12584{
12585 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12586 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12587}
12588
12589
12590/** Opcode 0xde 11/6. */
12591FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12592{
12593 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12594 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12595}
12596
12597
12598/** Opcode 0xde 11/7. */
12599FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12600{
12601 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12602 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12603}
12604
12605
12606/**
12607 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12608 * the result in ST0.
12609 *
12610 * @param bRm Mod R/M byte.
12611 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12612 */
12613FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12614{
12615 IEM_MC_BEGIN(3, 3, 0, 0);
12616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12617 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12618 IEM_MC_LOCAL(int16_t, i16Val2);
12619 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12620 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12621 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12622
12623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12625
12626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12628 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12629
12630 IEM_MC_PREPARE_FPU_USAGE();
12631 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12632 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12633 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12634 } IEM_MC_ELSE() {
12635 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12636 } IEM_MC_ENDIF();
12637 IEM_MC_ADVANCE_RIP_AND_FINISH();
12638
12639 IEM_MC_END();
12640}
12641
12642
12643/** Opcode 0xde !11/0. */
12644FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12645{
12646 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12647 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12648}
12649
12650
12651/** Opcode 0xde !11/1. */
12652FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12653{
12654 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12655 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12656}
12657
12658
12659/** Opcode 0xde !11/2. */
12660FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12661{
12662 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12663
12664 IEM_MC_BEGIN(3, 3, 0, 0);
12665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12666 IEM_MC_LOCAL(uint16_t, u16Fsw);
12667 IEM_MC_LOCAL(int16_t, i16Val2);
12668 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12669 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12670 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12671
12672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12674
12675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12677 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12678
12679 IEM_MC_PREPARE_FPU_USAGE();
12680 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12681 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12682 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12683 } IEM_MC_ELSE() {
12684 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12685 } IEM_MC_ENDIF();
12686 IEM_MC_ADVANCE_RIP_AND_FINISH();
12687
12688 IEM_MC_END();
12689}
12690
12691
12692/** Opcode 0xde !11/3. */
12693FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12694{
12695 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12696
12697 IEM_MC_BEGIN(3, 3, 0, 0);
12698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12699 IEM_MC_LOCAL(uint16_t, u16Fsw);
12700 IEM_MC_LOCAL(int16_t, i16Val2);
12701 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12702 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12703 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12704
12705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12707
12708 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12709 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12710 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12711
12712 IEM_MC_PREPARE_FPU_USAGE();
12713 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12714 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12715 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12716 } IEM_MC_ELSE() {
12717 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12718 } IEM_MC_ENDIF();
12719 IEM_MC_ADVANCE_RIP_AND_FINISH();
12720
12721 IEM_MC_END();
12722}
12723
12724
12725/** Opcode 0xde !11/4. */
12726FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12727{
12728 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12729 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12730}
12731
12732
12733/** Opcode 0xde !11/5. */
12734FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12735{
12736 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12737 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12738}
12739
12740
12741/** Opcode 0xde !11/6. */
12742FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12743{
12744 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12745 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12746}
12747
12748
12749/** Opcode 0xde !11/7. */
12750FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12751{
12752 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12753 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12754}
12755
12756
12757/**
12758 * @opcode 0xde
12759 */
12760FNIEMOP_DEF(iemOp_EscF6)
12761{
12762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12763 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12764 if (IEM_IS_MODRM_REG_MODE(bRm))
12765 {
12766 switch (IEM_GET_MODRM_REG_8(bRm))
12767 {
12768 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12769 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12770 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12771 case 3: if (bRm == 0xd9)
12772 return FNIEMOP_CALL(iemOp_fcompp);
12773 IEMOP_RAISE_INVALID_OPCODE_RET();
12774 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12775 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12776 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12777 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12779 }
12780 }
12781 else
12782 {
12783 switch (IEM_GET_MODRM_REG_8(bRm))
12784 {
12785 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12786 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12787 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12788 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12789 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12790 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12791 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12792 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12794 }
12795 }
12796}
12797
12798
12799/** Opcode 0xdf 11/0.
12800 * Undocument instruction, assumed to work like ffree + fincstp. */
12801FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12802{
12803 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12804 IEM_MC_BEGIN(0, 0, 0, 0);
12805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12806
12807 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12808 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12809
12810 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12811 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12812 IEM_MC_FPU_STACK_INC_TOP();
12813 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12814
12815 IEM_MC_ADVANCE_RIP_AND_FINISH();
12816 IEM_MC_END();
12817}
12818
12819
12820/** Opcode 0xdf 0xe0. */
12821FNIEMOP_DEF(iemOp_fnstsw_ax)
12822{
12823 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12824 IEM_MC_BEGIN(0, 1, 0, 0);
12825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12826 IEM_MC_LOCAL(uint16_t, u16Tmp);
12827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12828 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12829 IEM_MC_FETCH_FSW(u16Tmp);
12830 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12831 IEM_MC_ADVANCE_RIP_AND_FINISH();
12832 IEM_MC_END();
12833}
12834
12835
12836/** Opcode 0xdf 11/5. */
12837FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12838{
12839 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12840 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12841 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12842 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12843}
12844
12845
12846/** Opcode 0xdf 11/6. */
12847FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12848{
12849 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12850 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12851 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12852 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12853}
12854
12855
12856/** Opcode 0xdf !11/0. */
12857FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12858{
12859 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12860
12861 IEM_MC_BEGIN(2, 3, 0, 0);
12862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12863 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12864 IEM_MC_LOCAL(int16_t, i16Val);
12865 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12866 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12867
12868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12870
12871 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12872 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12873 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12874
12875 IEM_MC_PREPARE_FPU_USAGE();
12876 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12877 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12878 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12879 } IEM_MC_ELSE() {
12880 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12881 } IEM_MC_ENDIF();
12882 IEM_MC_ADVANCE_RIP_AND_FINISH();
12883
12884 IEM_MC_END();
12885}
12886
12887
12888/** Opcode 0xdf !11/1. */
12889FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12890{
12891 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12892 IEM_MC_BEGIN(3, 3, 0, 0);
12893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12895
12896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12899 IEM_MC_PREPARE_FPU_USAGE();
12900
12901 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12902 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12903 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12904
12905 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12906 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12907 IEM_MC_LOCAL(uint16_t, u16Fsw);
12908 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12909 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12910 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12911 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12912 } IEM_MC_ELSE() {
12913 IEM_MC_IF_FCW_IM() {
12914 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12915 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12916 } IEM_MC_ELSE() {
12917 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12918 } IEM_MC_ENDIF();
12919 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12920 } IEM_MC_ENDIF();
12921 IEM_MC_ADVANCE_RIP_AND_FINISH();
12922
12923 IEM_MC_END();
12924}
12925
12926
12927/** Opcode 0xdf !11/2. */
12928FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12929{
12930 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12931 IEM_MC_BEGIN(3, 3, 0, 0);
12932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12934
12935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12938 IEM_MC_PREPARE_FPU_USAGE();
12939
12940 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12941 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12942 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12943
12944 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12945 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12946 IEM_MC_LOCAL(uint16_t, u16Fsw);
12947 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12948 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12949 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12950 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12951 } IEM_MC_ELSE() {
12952 IEM_MC_IF_FCW_IM() {
12953 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12954 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12955 } IEM_MC_ELSE() {
12956 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12957 } IEM_MC_ENDIF();
12958 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12959 } IEM_MC_ENDIF();
12960 IEM_MC_ADVANCE_RIP_AND_FINISH();
12961
12962 IEM_MC_END();
12963}
12964
12965
12966/** Opcode 0xdf !11/3. */
12967FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12968{
12969 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12970 IEM_MC_BEGIN(3, 3, 0, 0);
12971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12973
12974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12977 IEM_MC_PREPARE_FPU_USAGE();
12978
12979 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12980 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12981 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12982
12983 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12984 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12985 IEM_MC_LOCAL(uint16_t, u16Fsw);
12986 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12987 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12988 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12989 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12990 } IEM_MC_ELSE() {
12991 IEM_MC_IF_FCW_IM() {
12992 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12993 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12994 } IEM_MC_ELSE() {
12995 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12996 } IEM_MC_ENDIF();
12997 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12998 } IEM_MC_ENDIF();
12999 IEM_MC_ADVANCE_RIP_AND_FINISH();
13000
13001 IEM_MC_END();
13002}
13003
13004
13005/** Opcode 0xdf !11/4. */
13006FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
13007{
13008 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
13009
13010 IEM_MC_BEGIN(2, 3, 0, 0);
13011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13012 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13013 IEM_MC_LOCAL(RTPBCD80U, d80Val);
13014 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13015 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
13016
13017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13019
13020 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13021 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13022 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13023
13024 IEM_MC_PREPARE_FPU_USAGE();
13025 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13026 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
13027 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13028 } IEM_MC_ELSE() {
13029 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13030 } IEM_MC_ENDIF();
13031 IEM_MC_ADVANCE_RIP_AND_FINISH();
13032
13033 IEM_MC_END();
13034}
13035
13036
13037/** Opcode 0xdf !11/5. */
13038FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13039{
13040 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13041
13042 IEM_MC_BEGIN(2, 3, 0, 0);
13043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13044 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13045 IEM_MC_LOCAL(int64_t, i64Val);
13046 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13047 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13048
13049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13051
13052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13053 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13054 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13055
13056 IEM_MC_PREPARE_FPU_USAGE();
13057 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13058 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13059 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13060 } IEM_MC_ELSE() {
13061 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13062 } IEM_MC_ENDIF();
13063 IEM_MC_ADVANCE_RIP_AND_FINISH();
13064
13065 IEM_MC_END();
13066}
13067
13068
13069/** Opcode 0xdf !11/6. */
13070FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13071{
13072 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13073 IEM_MC_BEGIN(3, 3, 0, 0);
13074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13076
13077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13079 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13080 IEM_MC_PREPARE_FPU_USAGE();
13081
13082 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13083 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13084 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13085
13086 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13087 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13088 IEM_MC_LOCAL(uint16_t, u16Fsw);
13089 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13090 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13091 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13092 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13093 } IEM_MC_ELSE() {
13094 IEM_MC_IF_FCW_IM() {
13095 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13096 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13097 } IEM_MC_ELSE() {
13098 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13099 } IEM_MC_ENDIF();
13100 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13101 } IEM_MC_ENDIF();
13102 IEM_MC_ADVANCE_RIP_AND_FINISH();
13103
13104 IEM_MC_END();
13105}
13106
13107
13108/** Opcode 0xdf !11/7. */
13109FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13110{
13111 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13112 IEM_MC_BEGIN(3, 3, 0, 0);
13113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13115
13116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13117 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13118 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13119 IEM_MC_PREPARE_FPU_USAGE();
13120
13121 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13122 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13123 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13124
13125 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13126 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13127 IEM_MC_LOCAL(uint16_t, u16Fsw);
13128 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13129 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13130 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13131 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13132 } IEM_MC_ELSE() {
13133 IEM_MC_IF_FCW_IM() {
13134 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13135 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13136 } IEM_MC_ELSE() {
13137 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13138 } IEM_MC_ENDIF();
13139 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13140 } IEM_MC_ENDIF();
13141 IEM_MC_ADVANCE_RIP_AND_FINISH();
13142
13143 IEM_MC_END();
13144}
13145
13146
13147/**
13148 * @opcode 0xdf
13149 */
13150FNIEMOP_DEF(iemOp_EscF7)
13151{
13152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13153 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13154 if (IEM_IS_MODRM_REG_MODE(bRm))
13155 {
13156 switch (IEM_GET_MODRM_REG_8(bRm))
13157 {
13158 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13159 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13160 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13161 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13162 case 4: if (bRm == 0xe0)
13163 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13164 IEMOP_RAISE_INVALID_OPCODE_RET();
13165 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13166 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13167 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13169 }
13170 }
13171 else
13172 {
13173 switch (IEM_GET_MODRM_REG_8(bRm))
13174 {
13175 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13176 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13177 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13178 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13179 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13180 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13181 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13182 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13184 }
13185 }
13186}
13187
13188
13189/**
13190 * @opcode 0xe0
13191 * @opfltest zf
13192 */
13193FNIEMOP_DEF(iemOp_loopne_Jb)
13194{
13195 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13196 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13198
13199 switch (pVCpu->iem.s.enmEffAddrMode)
13200 {
13201 case IEMMODE_16BIT:
13202 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13204 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13205 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13206 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13207 } IEM_MC_ELSE() {
13208 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13209 IEM_MC_ADVANCE_RIP_AND_FINISH();
13210 } IEM_MC_ENDIF();
13211 IEM_MC_END();
13212 break;
13213
13214 case IEMMODE_32BIT:
13215 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13217 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13218 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13219 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13220 } IEM_MC_ELSE() {
13221 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13222 IEM_MC_ADVANCE_RIP_AND_FINISH();
13223 } IEM_MC_ENDIF();
13224 IEM_MC_END();
13225 break;
13226
13227 case IEMMODE_64BIT:
13228 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13230 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13231 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13232 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13233 } IEM_MC_ELSE() {
13234 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13235 IEM_MC_ADVANCE_RIP_AND_FINISH();
13236 } IEM_MC_ENDIF();
13237 IEM_MC_END();
13238 break;
13239
13240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13241 }
13242}
13243
13244
13245/**
13246 * @opcode 0xe1
13247 * @opfltest zf
13248 */
13249FNIEMOP_DEF(iemOp_loope_Jb)
13250{
13251 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13252 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13253 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13254
13255 switch (pVCpu->iem.s.enmEffAddrMode)
13256 {
13257 case IEMMODE_16BIT:
13258 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13260 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13261 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13262 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13263 } IEM_MC_ELSE() {
13264 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13265 IEM_MC_ADVANCE_RIP_AND_FINISH();
13266 } IEM_MC_ENDIF();
13267 IEM_MC_END();
13268 break;
13269
13270 case IEMMODE_32BIT:
13271 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13273 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13274 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13275 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13276 } IEM_MC_ELSE() {
13277 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13278 IEM_MC_ADVANCE_RIP_AND_FINISH();
13279 } IEM_MC_ENDIF();
13280 IEM_MC_END();
13281 break;
13282
13283 case IEMMODE_64BIT:
13284 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13286 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13287 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13288 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13289 } IEM_MC_ELSE() {
13290 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13291 IEM_MC_ADVANCE_RIP_AND_FINISH();
13292 } IEM_MC_ENDIF();
13293 IEM_MC_END();
13294 break;
13295
13296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13297 }
13298}
13299
13300
13301/**
13302 * @opcode 0xe2
13303 */
13304FNIEMOP_DEF(iemOp_loop_Jb)
13305{
13306 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13307 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13308 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13309
13310 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13311 * using the 32-bit operand size override. How can that be restarted? See
13312 * weird pseudo code in intel manual. */
13313
13314 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13315 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13316 * the loop causes guest crashes, but when logging it's nice to skip a few million
13317 * lines of useless output. */
13318#if defined(LOG_ENABLED)
13319 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13320 switch (pVCpu->iem.s.enmEffAddrMode)
13321 {
13322 case IEMMODE_16BIT:
13323 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13325 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13326 IEM_MC_ADVANCE_RIP_AND_FINISH();
13327 IEM_MC_END();
13328 break;
13329
13330 case IEMMODE_32BIT:
13331 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13334 IEM_MC_ADVANCE_RIP_AND_FINISH();
13335 IEM_MC_END();
13336 break;
13337
13338 case IEMMODE_64BIT:
13339 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13341 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13342 IEM_MC_ADVANCE_RIP_AND_FINISH();
13343 IEM_MC_END();
13344 break;
13345
13346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13347 }
13348#endif
13349
13350 switch (pVCpu->iem.s.enmEffAddrMode)
13351 {
13352 case IEMMODE_16BIT:
13353 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13355 IEM_MC_IF_CX_IS_NOT_ONE() {
13356 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13357 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13358 } IEM_MC_ELSE() {
13359 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13360 IEM_MC_ADVANCE_RIP_AND_FINISH();
13361 } IEM_MC_ENDIF();
13362 IEM_MC_END();
13363 break;
13364
13365 case IEMMODE_32BIT:
13366 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13368 IEM_MC_IF_ECX_IS_NOT_ONE() {
13369 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13370 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13371 } IEM_MC_ELSE() {
13372 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13373 IEM_MC_ADVANCE_RIP_AND_FINISH();
13374 } IEM_MC_ENDIF();
13375 IEM_MC_END();
13376 break;
13377
13378 case IEMMODE_64BIT:
13379 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13381 IEM_MC_IF_RCX_IS_NOT_ONE() {
13382 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13383 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13384 } IEM_MC_ELSE() {
13385 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13386 IEM_MC_ADVANCE_RIP_AND_FINISH();
13387 } IEM_MC_ENDIF();
13388 IEM_MC_END();
13389 break;
13390
13391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13392 }
13393}
13394
13395
13396/**
13397 * @opcode 0xe3
13398 */
13399FNIEMOP_DEF(iemOp_jecxz_Jb)
13400{
13401 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13402 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13403 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13404
13405 switch (pVCpu->iem.s.enmEffAddrMode)
13406 {
13407 case IEMMODE_16BIT:
13408 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13410 IEM_MC_IF_CX_IS_NZ() {
13411 IEM_MC_ADVANCE_RIP_AND_FINISH();
13412 } IEM_MC_ELSE() {
13413 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13414 } IEM_MC_ENDIF();
13415 IEM_MC_END();
13416 break;
13417
13418 case IEMMODE_32BIT:
13419 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13421 IEM_MC_IF_ECX_IS_NZ() {
13422 IEM_MC_ADVANCE_RIP_AND_FINISH();
13423 } IEM_MC_ELSE() {
13424 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13425 } IEM_MC_ENDIF();
13426 IEM_MC_END();
13427 break;
13428
13429 case IEMMODE_64BIT:
13430 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13432 IEM_MC_IF_RCX_IS_NZ() {
13433 IEM_MC_ADVANCE_RIP_AND_FINISH();
13434 } IEM_MC_ELSE() {
13435 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13436 } IEM_MC_ENDIF();
13437 IEM_MC_END();
13438 break;
13439
13440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13441 }
13442}
13443
13444
13445/**
13446 * @opcode 0xe4
13447 * @opfltest iopl
13448 */
13449FNIEMOP_DEF(iemOp_in_AL_Ib)
13450{
13451 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13452 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13454 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13455 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13456}
13457
13458
13459/**
13460 * @opcode 0xe5
13461 * @opfltest iopl
13462 */
13463FNIEMOP_DEF(iemOp_in_eAX_Ib)
13464{
13465 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13466 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13468 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13469 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13470 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13471}
13472
13473
13474/**
13475 * @opcode 0xe6
13476 * @opfltest iopl
13477 */
13478FNIEMOP_DEF(iemOp_out_Ib_AL)
13479{
13480 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13481 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13483 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13484 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13485}
13486
13487
13488/**
13489 * @opcode 0xe7
13490 * @opfltest iopl
13491 */
13492FNIEMOP_DEF(iemOp_out_Ib_eAX)
13493{
13494 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13495 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13497 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13498 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13499 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13500}
13501
13502
13503/**
13504 * @opcode 0xe8
13505 */
13506FNIEMOP_DEF(iemOp_call_Jv)
13507{
13508 IEMOP_MNEMONIC(call_Jv, "call Jv");
13509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13510 switch (pVCpu->iem.s.enmEffOpSize)
13511 {
13512 case IEMMODE_16BIT:
13513 {
13514 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13515 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13516 iemCImpl_call_rel_16, (int16_t)u16Imm);
13517 }
13518
13519 case IEMMODE_32BIT:
13520 {
13521 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13522 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13523 iemCImpl_call_rel_32, (int32_t)u32Imm);
13524 }
13525
13526 case IEMMODE_64BIT:
13527 {
13528 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13529 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13530 iemCImpl_call_rel_64, u64Imm);
13531 }
13532
13533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13534 }
13535}
13536
13537
13538/**
13539 * @opcode 0xe9
13540 */
13541FNIEMOP_DEF(iemOp_jmp_Jv)
13542{
13543 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13544 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13545 switch (pVCpu->iem.s.enmEffOpSize)
13546 {
13547 case IEMMODE_16BIT:
13548 IEM_MC_BEGIN(0, 0, 0, 0);
13549 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13551 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13552 IEM_MC_END();
13553 break;
13554
13555 case IEMMODE_64BIT:
13556 case IEMMODE_32BIT:
13557 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13558 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13560 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13561 IEM_MC_END();
13562 break;
13563
13564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13565 }
13566}
13567
13568
13569/**
13570 * @opcode 0xea
13571 */
13572FNIEMOP_DEF(iemOp_jmp_Ap)
13573{
13574 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13575 IEMOP_HLP_NO_64BIT();
13576
13577 /* Decode the far pointer address and pass it on to the far call C implementation. */
13578 uint32_t off32Seg;
13579 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13580 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13581 else
13582 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13583 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13585 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13586 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13587 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13588 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13589}
13590
13591
13592/**
13593 * @opcode 0xeb
13594 */
13595FNIEMOP_DEF(iemOp_jmp_Jb)
13596{
13597 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13598 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13599 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13600
13601 IEM_MC_BEGIN(0, 0, 0, 0);
13602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13603 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13604 IEM_MC_END();
13605}
13606
13607
13608/**
13609 * @opcode 0xec
13610 * @opfltest iopl
13611 */
13612FNIEMOP_DEF(iemOp_in_AL_DX)
13613{
13614 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13616 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13617 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13618 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13619}
13620
13621
13622/**
13623 * @opcode 0xed
13624 * @opfltest iopl
13625 */
13626FNIEMOP_DEF(iemOp_in_eAX_DX)
13627{
13628 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13630 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13631 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13632 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13633 pVCpu->iem.s.enmEffAddrMode);
13634}
13635
13636
13637/**
13638 * @opcode 0xee
13639 * @opfltest iopl
13640 */
13641FNIEMOP_DEF(iemOp_out_DX_AL)
13642{
13643 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13645 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13646 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13647}
13648
13649
13650/**
13651 * @opcode 0xef
13652 * @opfltest iopl
13653 */
13654FNIEMOP_DEF(iemOp_out_DX_eAX)
13655{
13656 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13658 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13659 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13660 pVCpu->iem.s.enmEffAddrMode);
13661}
13662
13663
13664/**
13665 * @opcode 0xf0
13666 */
13667FNIEMOP_DEF(iemOp_lock)
13668{
13669 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13670 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13671
13672 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13673 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13674}
13675
13676
13677/**
13678 * @opcode 0xf1
13679 */
13680FNIEMOP_DEF(iemOp_int1)
13681{
13682 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13683 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13684 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13685 * LOADALL memo. Needs some testing. */
13686 IEMOP_HLP_MIN_386();
13687 /** @todo testcase! */
13688 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13689 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13690 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13691}
13692
13693
13694/**
13695 * @opcode 0xf2
13696 */
13697FNIEMOP_DEF(iemOp_repne)
13698{
13699 /* This overrides any previous REPE prefix. */
13700 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13701 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13702 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13703
13704 /* For the 4 entry opcode tables, REPNZ overrides any previous
13705 REPZ and operand size prefixes. */
13706 pVCpu->iem.s.idxPrefix = 3;
13707
13708 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13709 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13710}
13711
13712
13713/**
13714 * @opcode 0xf3
13715 */
13716FNIEMOP_DEF(iemOp_repe)
13717{
13718 /* This overrides any previous REPNE prefix. */
13719 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13720 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13721 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13722
13723 /* For the 4 entry opcode tables, REPNZ overrides any previous
13724 REPNZ and operand size prefixes. */
13725 pVCpu->iem.s.idxPrefix = 2;
13726
13727 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13728 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13729}
13730
13731
13732/**
13733 * @opcode 0xf4
13734 */
13735FNIEMOP_DEF(iemOp_hlt)
13736{
13737 IEMOP_MNEMONIC(hlt, "hlt");
13738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13739 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13740}
13741
13742
13743/**
13744 * @opcode 0xf5
13745 * @opflmodify cf
13746 */
13747FNIEMOP_DEF(iemOp_cmc)
13748{
13749 IEMOP_MNEMONIC(cmc, "cmc");
13750 IEM_MC_BEGIN(0, 0, 0, 0);
13751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13752 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13753 IEM_MC_ADVANCE_RIP_AND_FINISH();
13754 IEM_MC_END();
13755}
13756
13757
13758/**
13759 * Body for of 'inc/dec/not/neg Eb'.
13760 */
13761#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13762 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13763 { \
13764 /* register access */ \
13765 IEM_MC_BEGIN(2, 0, 0, 0); \
13766 IEMOP_HLP_DONE_DECODING(); \
13767 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13768 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13769 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13770 IEM_MC_REF_EFLAGS(pEFlags); \
13771 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13772 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13773 IEM_MC_END(); \
13774 } \
13775 else \
13776 { \
13777 /* memory access. */ \
13778 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13779 { \
13780 IEM_MC_BEGIN(2, 2, 0, 0); \
13781 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13782 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13784 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13785 \
13786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13787 IEMOP_HLP_DONE_DECODING(); \
13788 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13789 IEM_MC_FETCH_EFLAGS(EFlags); \
13790 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13791 \
13792 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13793 IEM_MC_COMMIT_EFLAGS(EFlags); \
13794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13795 IEM_MC_END(); \
13796 } \
13797 else \
13798 { \
13799 IEM_MC_BEGIN(2, 2, 0, 0); \
13800 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13801 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13803 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13804 \
13805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13806 IEMOP_HLP_DONE_DECODING(); \
13807 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13808 IEM_MC_FETCH_EFLAGS(EFlags); \
13809 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13810 \
13811 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13812 IEM_MC_COMMIT_EFLAGS(EFlags); \
13813 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13814 IEM_MC_END(); \
13815 } \
13816 } \
13817 (void)0
13818
13819
13820/**
13821 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13822 */
13823#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13824 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13825 { \
13826 /* \
13827 * Register target \
13828 */ \
13829 switch (pVCpu->iem.s.enmEffOpSize) \
13830 { \
13831 case IEMMODE_16BIT: \
13832 IEM_MC_BEGIN(2, 0, 0, 0); \
13833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13834 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13835 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13836 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13837 IEM_MC_REF_EFLAGS(pEFlags); \
13838 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13839 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13840 IEM_MC_END(); \
13841 break; \
13842 \
13843 case IEMMODE_32BIT: \
13844 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13846 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13847 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13848 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13849 IEM_MC_REF_EFLAGS(pEFlags); \
13850 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13851 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13852 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13853 IEM_MC_END(); \
13854 break; \
13855 \
13856 case IEMMODE_64BIT: \
13857 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13859 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13860 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13861 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13862 IEM_MC_REF_EFLAGS(pEFlags); \
13863 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13864 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13865 IEM_MC_END(); \
13866 break; \
13867 \
13868 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13869 } \
13870 } \
13871 else \
13872 { \
13873 /* \
13874 * Memory target. \
13875 */ \
13876 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13877 { \
13878 switch (pVCpu->iem.s.enmEffOpSize) \
13879 { \
13880 case IEMMODE_16BIT: \
13881 IEM_MC_BEGIN(2, 3, 0, 0); \
13882 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13883 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13885 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13886 \
13887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13888 IEMOP_HLP_DONE_DECODING(); \
13889 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13890 IEM_MC_FETCH_EFLAGS(EFlags); \
13891 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13892 \
13893 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13894 IEM_MC_COMMIT_EFLAGS(EFlags); \
13895 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13896 IEM_MC_END(); \
13897 break; \
13898 \
13899 case IEMMODE_32BIT: \
13900 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13901 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13902 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13904 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13905 \
13906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13907 IEMOP_HLP_DONE_DECODING(); \
13908 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13909 IEM_MC_FETCH_EFLAGS(EFlags); \
13910 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13911 \
13912 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13913 IEM_MC_COMMIT_EFLAGS(EFlags); \
13914 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13915 IEM_MC_END(); \
13916 break; \
13917 \
13918 case IEMMODE_64BIT: \
13919 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13920 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13921 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13923 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13924 \
13925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13926 IEMOP_HLP_DONE_DECODING(); \
13927 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13928 IEM_MC_FETCH_EFLAGS(EFlags); \
13929 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13930 \
13931 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13932 IEM_MC_COMMIT_EFLAGS(EFlags); \
13933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13934 IEM_MC_END(); \
13935 break; \
13936 \
13937 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13938 } \
13939 } \
13940 else \
13941 { \
13942 (void)0
13943
13944#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13945 switch (pVCpu->iem.s.enmEffOpSize) \
13946 { \
13947 case IEMMODE_16BIT: \
13948 IEM_MC_BEGIN(2, 3, 0, 0); \
13949 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13950 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13952 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13953 \
13954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13955 IEMOP_HLP_DONE_DECODING(); \
13956 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13957 IEM_MC_FETCH_EFLAGS(EFlags); \
13958 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13959 \
13960 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13961 IEM_MC_COMMIT_EFLAGS(EFlags); \
13962 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13963 IEM_MC_END(); \
13964 break; \
13965 \
13966 case IEMMODE_32BIT: \
13967 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13968 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13969 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13971 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13972 \
13973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13974 IEMOP_HLP_DONE_DECODING(); \
13975 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13976 IEM_MC_FETCH_EFLAGS(EFlags); \
13977 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13978 \
13979 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13980 IEM_MC_COMMIT_EFLAGS(EFlags); \
13981 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13982 IEM_MC_END(); \
13983 break; \
13984 \
13985 case IEMMODE_64BIT: \
13986 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13987 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13988 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13990 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13991 \
13992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13993 IEMOP_HLP_DONE_DECODING(); \
13994 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13995 IEM_MC_FETCH_EFLAGS(EFlags); \
13996 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13997 \
13998 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13999 IEM_MC_COMMIT_EFLAGS(EFlags); \
14000 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14001 IEM_MC_END(); \
14002 break; \
14003 \
14004 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14005 } \
14006 } \
14007 } \
14008 (void)0
14009
14010
14011/**
14012 * @opmaps grp3_f6
14013 * @opcode /0
14014 * @opflclass logical
14015 * @todo also /1
14016 */
14017FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
14018{
14019 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
14020 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14021
14022 if (IEM_IS_MODRM_REG_MODE(bRm))
14023 {
14024 /* register access */
14025 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14026 IEM_MC_BEGIN(3, 0, 0, 0);
14027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14028 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14029 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
14030 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14031 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14032 IEM_MC_REF_EFLAGS(pEFlags);
14033 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14034 IEM_MC_ADVANCE_RIP_AND_FINISH();
14035 IEM_MC_END();
14036 }
14037 else
14038 {
14039 /* memory access. */
14040 IEM_MC_BEGIN(3, 3, 0, 0);
14041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
14043
14044 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14046
14047 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14048 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
14049 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14050
14051 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
14052 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14053 IEM_MC_FETCH_EFLAGS(EFlags);
14054 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14055
14056 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14057 IEM_MC_COMMIT_EFLAGS(EFlags);
14058 IEM_MC_ADVANCE_RIP_AND_FINISH();
14059 IEM_MC_END();
14060 }
14061}
14062
14063
14064/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14065#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14066 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14067 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14068 { \
14069 /* register access */ \
14070 IEM_MC_BEGIN(3, 1, 0, 0); \
14071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14072 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14073 IEM_MC_ARG(uint8_t, u8Value, 1); \
14074 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14075 IEM_MC_LOCAL(int32_t, rc); \
14076 \
14077 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14078 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14079 IEM_MC_REF_EFLAGS(pEFlags); \
14080 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14081 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14082 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14083 } IEM_MC_ELSE() { \
14084 IEM_MC_RAISE_DIVIDE_ERROR(); \
14085 } IEM_MC_ENDIF(); \
14086 \
14087 IEM_MC_END(); \
14088 } \
14089 else \
14090 { \
14091 /* memory access. */ \
14092 IEM_MC_BEGIN(3, 2, 0, 0); \
14093 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14094 IEM_MC_ARG(uint8_t, u8Value, 1); \
14095 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14097 IEM_MC_LOCAL(int32_t, rc); \
14098 \
14099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14101 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14102 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14103 IEM_MC_REF_EFLAGS(pEFlags); \
14104 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14105 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14106 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14107 } IEM_MC_ELSE() { \
14108 IEM_MC_RAISE_DIVIDE_ERROR(); \
14109 } IEM_MC_ENDIF(); \
14110 \
14111 IEM_MC_END(); \
14112 } (void)0
14113
14114
14115/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14116#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14117 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14118 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14119 { \
14120 /* register access */ \
14121 switch (pVCpu->iem.s.enmEffOpSize) \
14122 { \
14123 case IEMMODE_16BIT: \
14124 IEM_MC_BEGIN(4, 1, 0, 0); \
14125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14126 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14127 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14128 IEM_MC_ARG(uint16_t, u16Value, 2); \
14129 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14130 IEM_MC_LOCAL(int32_t, rc); \
14131 \
14132 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14133 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14134 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14135 IEM_MC_REF_EFLAGS(pEFlags); \
14136 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14137 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14138 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14139 } IEM_MC_ELSE() { \
14140 IEM_MC_RAISE_DIVIDE_ERROR(); \
14141 } IEM_MC_ENDIF(); \
14142 \
14143 IEM_MC_END(); \
14144 break; \
14145 \
14146 case IEMMODE_32BIT: \
14147 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
14148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14149 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14150 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14151 IEM_MC_ARG(uint32_t, u32Value, 2); \
14152 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14153 IEM_MC_LOCAL(int32_t, rc); \
14154 \
14155 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14156 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14157 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14158 IEM_MC_REF_EFLAGS(pEFlags); \
14159 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14160 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14161 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14162 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14163 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14164 } IEM_MC_ELSE() { \
14165 IEM_MC_RAISE_DIVIDE_ERROR(); \
14166 } IEM_MC_ENDIF(); \
14167 \
14168 IEM_MC_END(); \
14169 break; \
14170 \
14171 case IEMMODE_64BIT: \
14172 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
14173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14174 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14175 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14176 IEM_MC_ARG(uint64_t, u64Value, 2); \
14177 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14178 IEM_MC_LOCAL(int32_t, rc); \
14179 \
14180 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14181 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14182 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14183 IEM_MC_REF_EFLAGS(pEFlags); \
14184 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14185 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14186 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14187 } IEM_MC_ELSE() { \
14188 IEM_MC_RAISE_DIVIDE_ERROR(); \
14189 } IEM_MC_ENDIF(); \
14190 \
14191 IEM_MC_END(); \
14192 break; \
14193 \
14194 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14195 } \
14196 } \
14197 else \
14198 { \
14199 /* memory access. */ \
14200 switch (pVCpu->iem.s.enmEffOpSize) \
14201 { \
14202 case IEMMODE_16BIT: \
14203 IEM_MC_BEGIN(4, 2, 0, 0); \
14204 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14205 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14206 IEM_MC_ARG(uint16_t, u16Value, 2); \
14207 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14209 IEM_MC_LOCAL(int32_t, rc); \
14210 \
14211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14213 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14214 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14215 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14216 IEM_MC_REF_EFLAGS(pEFlags); \
14217 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14218 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14219 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14220 } IEM_MC_ELSE() { \
14221 IEM_MC_RAISE_DIVIDE_ERROR(); \
14222 } IEM_MC_ENDIF(); \
14223 \
14224 IEM_MC_END(); \
14225 break; \
14226 \
14227 case IEMMODE_32BIT: \
14228 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14229 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14230 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14231 IEM_MC_ARG(uint32_t, u32Value, 2); \
14232 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14234 IEM_MC_LOCAL(int32_t, rc); \
14235 \
14236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14238 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14239 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14240 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14241 IEM_MC_REF_EFLAGS(pEFlags); \
14242 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14243 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14244 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14245 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14246 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14247 } IEM_MC_ELSE() { \
14248 IEM_MC_RAISE_DIVIDE_ERROR(); \
14249 } IEM_MC_ENDIF(); \
14250 \
14251 IEM_MC_END(); \
14252 break; \
14253 \
14254 case IEMMODE_64BIT: \
14255 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14256 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14257 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14258 IEM_MC_ARG(uint64_t, u64Value, 2); \
14259 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14261 IEM_MC_LOCAL(int32_t, rc); \
14262 \
14263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14265 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14266 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14267 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14268 IEM_MC_REF_EFLAGS(pEFlags); \
14269 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14270 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14271 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14272 } IEM_MC_ELSE() { \
14273 IEM_MC_RAISE_DIVIDE_ERROR(); \
14274 } IEM_MC_ENDIF(); \
14275 \
14276 IEM_MC_END(); \
14277 break; \
14278 \
14279 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14280 } \
14281 } (void)0
14282
14283
14284/**
14285 * @opmaps grp3_f6
14286 * @opcode /2
14287 * @opflclass unchanged
14288 */
14289FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14290{
14291/** @todo does not modify EFLAGS. */
14292 IEMOP_MNEMONIC(not_Eb, "not Eb");
14293 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14294}
14295
14296
14297/**
14298 * @opmaps grp3_f6
14299 * @opcode /3
14300 * @opflclass arithmetic
14301 */
14302FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14303{
14304 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14305 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14306}
14307
14308
14309/**
14310 * @opcode 0xf6
14311 */
14312FNIEMOP_DEF(iemOp_Grp3_Eb)
14313{
14314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14315 switch (IEM_GET_MODRM_REG_8(bRm))
14316 {
14317 case 0:
14318 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14319 case 1:
14320 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14321 case 2:
14322 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14323 case 3:
14324 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14325 case 4:
14326 {
14327 /**
14328 * @opdone
14329 * @opmaps grp3_f6
14330 * @opcode /4
14331 * @opflclass multiply
14332 */
14333 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14335 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14336 break;
14337 }
14338 case 5:
14339 {
14340 /**
14341 * @opdone
14342 * @opmaps grp3_f6
14343 * @opcode /5
14344 * @opflclass multiply
14345 */
14346 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14347 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14348 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14349 break;
14350 }
14351 case 6:
14352 {
14353 /**
14354 * @opdone
14355 * @opmaps grp3_f6
14356 * @opcode /6
14357 * @opflclass division
14358 */
14359 IEMOP_MNEMONIC(div_Eb, "div Eb");
14360 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14361 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14362 break;
14363 }
14364 case 7:
14365 {
14366 /**
14367 * @opdone
14368 * @opmaps grp3_f6
14369 * @opcode /7
14370 * @opflclass division
14371 */
14372 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14373 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14374 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14375 break;
14376 }
14377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14378 }
14379}
14380
14381
14382/**
14383 * @opmaps grp3_f7
14384 * @opcode /0
14385 * @opflclass logical
14386 */
14387FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14388{
14389 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14390 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14391
14392 if (IEM_IS_MODRM_REG_MODE(bRm))
14393 {
14394 /* register access */
14395 switch (pVCpu->iem.s.enmEffOpSize)
14396 {
14397 case IEMMODE_16BIT:
14398 IEM_MC_BEGIN(3, 0, 0, 0);
14399 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14401 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14402 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14403 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14404 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14405 IEM_MC_REF_EFLAGS(pEFlags);
14406 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14407 IEM_MC_ADVANCE_RIP_AND_FINISH();
14408 IEM_MC_END();
14409 break;
14410
14411 case IEMMODE_32BIT:
14412 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14413 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14415 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14416 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14417 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14418 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14419 IEM_MC_REF_EFLAGS(pEFlags);
14420 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14421 /* No clearing the high dword here - test doesn't write back the result. */
14422 IEM_MC_ADVANCE_RIP_AND_FINISH();
14423 IEM_MC_END();
14424 break;
14425
14426 case IEMMODE_64BIT:
14427 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14428 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14430 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14431 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14432 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14433 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14434 IEM_MC_REF_EFLAGS(pEFlags);
14435 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14436 IEM_MC_ADVANCE_RIP_AND_FINISH();
14437 IEM_MC_END();
14438 break;
14439
14440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14441 }
14442 }
14443 else
14444 {
14445 /* memory access. */
14446 switch (pVCpu->iem.s.enmEffOpSize)
14447 {
14448 case IEMMODE_16BIT:
14449 IEM_MC_BEGIN(3, 3, 0, 0);
14450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14452
14453 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14455
14456 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14457 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14458 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14459
14460 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14461 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14462 IEM_MC_FETCH_EFLAGS(EFlags);
14463 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14464
14465 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14466 IEM_MC_COMMIT_EFLAGS(EFlags);
14467 IEM_MC_ADVANCE_RIP_AND_FINISH();
14468 IEM_MC_END();
14469 break;
14470
14471 case IEMMODE_32BIT:
14472 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14475
14476 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14478
14479 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14480 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14481 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14482
14483 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14484 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14485 IEM_MC_FETCH_EFLAGS(EFlags);
14486 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14487
14488 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14489 IEM_MC_COMMIT_EFLAGS(EFlags);
14490 IEM_MC_ADVANCE_RIP_AND_FINISH();
14491 IEM_MC_END();
14492 break;
14493
14494 case IEMMODE_64BIT:
14495 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14498
14499 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14501
14502 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14503 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14504 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14505
14506 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14507 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14508 IEM_MC_FETCH_EFLAGS(EFlags);
14509 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14510
14511 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14512 IEM_MC_COMMIT_EFLAGS(EFlags);
14513 IEM_MC_ADVANCE_RIP_AND_FINISH();
14514 IEM_MC_END();
14515 break;
14516
14517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14518 }
14519 }
14520}
14521
14522
14523/**
14524 * @opmaps grp3_f7
14525 * @opcode /2
14526 * @opflclass unchanged
14527 */
14528FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14529{
14530/** @todo does not modify EFLAGS */
14531 IEMOP_MNEMONIC(not_Ev, "not Ev");
14532 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14533 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14534}
14535
14536
14537/**
14538 * @opmaps grp3_f7
14539 * @opcode /3
14540 * @opflclass arithmetic
14541 */
14542FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14543{
14544 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14545 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14546 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14547}
14548
14549
14550/**
14551 * @opmaps grp3_f7
14552 * @opcode /4
14553 * @opflclass multiply
14554 */
14555FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14556{
14557 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14559 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14560}
14561
14562
14563/**
14564 * @opmaps grp3_f7
14565 * @opcode /5
14566 * @opflclass multiply
14567 */
14568FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14569{
14570 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14571 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14572 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14573}
14574
14575
14576/**
14577 * @opmaps grp3_f7
14578 * @opcode /6
14579 * @opflclass division
14580 */
14581FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14582{
14583 IEMOP_MNEMONIC(div_Ev, "div Ev");
14584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14585 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14586}
14587
14588
14589/**
14590 * @opmaps grp3_f7
14591 * @opcode /7
14592 * @opflclass division
14593 */
14594FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14595{
14596 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14597 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14598 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14599}
14600
14601
14602/**
14603 * @opcode 0xf7
14604 */
14605FNIEMOP_DEF(iemOp_Grp3_Ev)
14606{
14607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14608 switch (IEM_GET_MODRM_REG_8(bRm))
14609 {
14610 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14611 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14612 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14613 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14614 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14615 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14616 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14617 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14619 }
14620}
14621
14622
14623/**
14624 * @opcode 0xf8
14625 * @opflmodify cf
14626 * @opflclear cf
14627 */
14628FNIEMOP_DEF(iemOp_clc)
14629{
14630 IEMOP_MNEMONIC(clc, "clc");
14631 IEM_MC_BEGIN(0, 0, 0, 0);
14632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14633 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14634 IEM_MC_ADVANCE_RIP_AND_FINISH();
14635 IEM_MC_END();
14636}
14637
14638
14639/**
14640 * @opcode 0xf9
14641 * @opflmodify cf
14642 * @opflset cf
14643 */
14644FNIEMOP_DEF(iemOp_stc)
14645{
14646 IEMOP_MNEMONIC(stc, "stc");
14647 IEM_MC_BEGIN(0, 0, 0, 0);
14648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14649 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14650 IEM_MC_ADVANCE_RIP_AND_FINISH();
14651 IEM_MC_END();
14652}
14653
14654
14655/**
14656 * @opcode 0xfa
14657 * @opfltest iopl,vm
14658 * @opflmodify if,vif
14659 */
14660FNIEMOP_DEF(iemOp_cli)
14661{
14662 IEMOP_MNEMONIC(cli, "cli");
14663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14664 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14665}
14666
14667
14668/**
14669 * @opcode 0xfb
14670 * @opfltest iopl,vm
14671 * @opflmodify if,vif
14672 */
14673FNIEMOP_DEF(iemOp_sti)
14674{
14675 IEMOP_MNEMONIC(sti, "sti");
14676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14677 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14678 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14679}
14680
14681
14682/**
14683 * @opcode 0xfc
14684 * @opflmodify df
14685 * @opflclear df
14686 */
14687FNIEMOP_DEF(iemOp_cld)
14688{
14689 IEMOP_MNEMONIC(cld, "cld");
14690 IEM_MC_BEGIN(0, 0, 0, 0);
14691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14692 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14693 IEM_MC_ADVANCE_RIP_AND_FINISH();
14694 IEM_MC_END();
14695}
14696
14697
14698/**
14699 * @opcode 0xfd
14700 * @opflmodify df
14701 * @opflset df
14702 */
14703FNIEMOP_DEF(iemOp_std)
14704{
14705 IEMOP_MNEMONIC(std, "std");
14706 IEM_MC_BEGIN(0, 0, 0, 0);
14707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14708 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14709 IEM_MC_ADVANCE_RIP_AND_FINISH();
14710 IEM_MC_END();
14711}
14712
14713
14714/**
14715 * @opmaps grp4
14716 * @opcode /0
14717 * @opflclass incdec
14718 */
14719FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14720{
14721 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14722 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14723}
14724
14725
14726/**
14727 * @opmaps grp4
14728 * @opcode /1
14729 * @opflclass incdec
14730 */
14731FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14732{
14733 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14734 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14735}
14736
14737
14738/**
14739 * @opcode 0xfe
14740 */
14741FNIEMOP_DEF(iemOp_Grp4)
14742{
14743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14744 switch (IEM_GET_MODRM_REG_8(bRm))
14745 {
14746 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14747 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14748 default:
14749 /** @todo is the eff-addr decoded? */
14750 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14751 IEMOP_RAISE_INVALID_OPCODE_RET();
14752 }
14753}
14754
14755/**
14756 * @opmaps grp5
14757 * @opcode /0
14758 * @opflclass incdec
14759 */
14760FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14761{
14762 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14763 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14764 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14765}
14766
14767
14768/**
14769 * @opmaps grp5
14770 * @opcode /1
14771 * @opflclass incdec
14772 */
14773FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14774{
14775 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14776 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14777 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14778}
14779
14780
14781/**
14782 * Opcode 0xff /2.
14783 * @param bRm The RM byte.
14784 */
14785FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14786{
14787 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14788 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14789
14790 if (IEM_IS_MODRM_REG_MODE(bRm))
14791 {
14792 /* The new RIP is taken from a register. */
14793 switch (pVCpu->iem.s.enmEffOpSize)
14794 {
14795 case IEMMODE_16BIT:
14796 IEM_MC_BEGIN(1, 0, 0, 0);
14797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14798 IEM_MC_ARG(uint16_t, u16Target, 0);
14799 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14800 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14801 IEM_MC_END();
14802 break;
14803
14804 case IEMMODE_32BIT:
14805 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14807 IEM_MC_ARG(uint32_t, u32Target, 0);
14808 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14809 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14810 IEM_MC_END();
14811 break;
14812
14813 case IEMMODE_64BIT:
14814 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14816 IEM_MC_ARG(uint64_t, u64Target, 0);
14817 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14818 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14819 IEM_MC_END();
14820 break;
14821
14822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14823 }
14824 }
14825 else
14826 {
14827 /* The new RIP is taken from a register. */
14828 switch (pVCpu->iem.s.enmEffOpSize)
14829 {
14830 case IEMMODE_16BIT:
14831 IEM_MC_BEGIN(1, 1, 0, 0);
14832 IEM_MC_ARG(uint16_t, u16Target, 0);
14833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14836 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14837 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14838 IEM_MC_END();
14839 break;
14840
14841 case IEMMODE_32BIT:
14842 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14843 IEM_MC_ARG(uint32_t, u32Target, 0);
14844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14847 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14848 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14849 IEM_MC_END();
14850 break;
14851
14852 case IEMMODE_64BIT:
14853 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14854 IEM_MC_ARG(uint64_t, u64Target, 0);
14855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14858 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14859 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14860 IEM_MC_END();
14861 break;
14862
14863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14864 }
14865 }
14866}
14867
14868#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14869 /* Registers? How?? */ \
14870 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14871 { /* likely */ } \
14872 else \
14873 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14874 \
14875 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14876 /** @todo what does VIA do? */ \
14877 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14878 { /* likely */ } \
14879 else \
14880 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14881 \
14882 /* Far pointer loaded from memory. */ \
14883 switch (pVCpu->iem.s.enmEffOpSize) \
14884 { \
14885 case IEMMODE_16BIT: \
14886 IEM_MC_BEGIN(3, 1, 0, 0); \
14887 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14888 IEM_MC_ARG(uint16_t, offSeg, 1); \
14889 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14893 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14894 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14895 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14896 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14897 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14898 IEM_MC_END(); \
14899 break; \
14900 \
14901 case IEMMODE_32BIT: \
14902 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14903 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14904 IEM_MC_ARG(uint32_t, offSeg, 1); \
14905 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14909 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14910 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14911 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14912 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14913 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14914 IEM_MC_END(); \
14915 break; \
14916 \
14917 case IEMMODE_64BIT: \
14918 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14919 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14920 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14921 IEM_MC_ARG(uint64_t, offSeg, 1); \
14922 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14926 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14927 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14928 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14929 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14930 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14931 IEM_MC_END(); \
14932 break; \
14933 \
14934 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14935 } do {} while (0)
14936
14937
14938/**
14939 * Opcode 0xff /3.
14940 * @param bRm The RM byte.
14941 */
14942FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14943{
14944 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14945 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14946}
14947
14948
14949/**
14950 * Opcode 0xff /4.
14951 * @param bRm The RM byte.
14952 */
14953FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14954{
14955 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14956 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14957
14958 if (IEM_IS_MODRM_REG_MODE(bRm))
14959 {
14960 /* The new RIP is taken from a register. */
14961 switch (pVCpu->iem.s.enmEffOpSize)
14962 {
14963 case IEMMODE_16BIT:
14964 IEM_MC_BEGIN(0, 1, 0, 0);
14965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14966 IEM_MC_LOCAL(uint16_t, u16Target);
14967 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14968 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14969 IEM_MC_END();
14970 break;
14971
14972 case IEMMODE_32BIT:
14973 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14975 IEM_MC_LOCAL(uint32_t, u32Target);
14976 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14977 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14978 IEM_MC_END();
14979 break;
14980
14981 case IEMMODE_64BIT:
14982 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14984 IEM_MC_LOCAL(uint64_t, u64Target);
14985 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14986 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14987 IEM_MC_END();
14988 break;
14989
14990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14991 }
14992 }
14993 else
14994 {
14995 /* The new RIP is taken from a memory location. */
14996 switch (pVCpu->iem.s.enmEffOpSize)
14997 {
14998 case IEMMODE_16BIT:
14999 IEM_MC_BEGIN(0, 2, 0, 0);
15000 IEM_MC_LOCAL(uint16_t, u16Target);
15001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15004 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15005 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
15006 IEM_MC_END();
15007 break;
15008
15009 case IEMMODE_32BIT:
15010 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
15011 IEM_MC_LOCAL(uint32_t, u32Target);
15012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15015 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15016 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
15017 IEM_MC_END();
15018 break;
15019
15020 case IEMMODE_64BIT:
15021 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15022 IEM_MC_LOCAL(uint64_t, u64Target);
15023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15026 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15027 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
15028 IEM_MC_END();
15029 break;
15030
15031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15032 }
15033 }
15034}
15035
15036
15037/**
15038 * Opcode 0xff /5.
15039 * @param bRm The RM byte.
15040 */
15041FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15042{
15043 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15044 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15045}
15046
15047
15048/**
15049 * Opcode 0xff /6.
15050 * @param bRm The RM byte.
15051 */
15052FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15053{
15054 IEMOP_MNEMONIC(push_Ev, "push Ev");
15055
15056 /* Registers are handled by a common worker. */
15057 if (IEM_IS_MODRM_REG_MODE(bRm))
15058 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15059
15060 /* Memory we do here. */
15061 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15062 switch (pVCpu->iem.s.enmEffOpSize)
15063 {
15064 case IEMMODE_16BIT:
15065 IEM_MC_BEGIN(0, 2, 0, 0);
15066 IEM_MC_LOCAL(uint16_t, u16Src);
15067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15070 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15071 IEM_MC_PUSH_U16(u16Src);
15072 IEM_MC_ADVANCE_RIP_AND_FINISH();
15073 IEM_MC_END();
15074 break;
15075
15076 case IEMMODE_32BIT:
15077 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15078 IEM_MC_LOCAL(uint32_t, u32Src);
15079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15082 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15083 IEM_MC_PUSH_U32(u32Src);
15084 IEM_MC_ADVANCE_RIP_AND_FINISH();
15085 IEM_MC_END();
15086 break;
15087
15088 case IEMMODE_64BIT:
15089 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15090 IEM_MC_LOCAL(uint64_t, u64Src);
15091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15094 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15095 IEM_MC_PUSH_U64(u64Src);
15096 IEM_MC_ADVANCE_RIP_AND_FINISH();
15097 IEM_MC_END();
15098 break;
15099
15100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15101 }
15102}
15103
15104
15105/**
15106 * @opcode 0xff
15107 */
15108FNIEMOP_DEF(iemOp_Grp5)
15109{
15110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15111 switch (IEM_GET_MODRM_REG_8(bRm))
15112 {
15113 case 0:
15114 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15115 case 1:
15116 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15117 case 2:
15118 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15119 case 3:
15120 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15121 case 4:
15122 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15123 case 5:
15124 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15125 case 6:
15126 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15127 case 7:
15128 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15129 IEMOP_RAISE_INVALID_OPCODE_RET();
15130 }
15131 AssertFailedReturn(VERR_IEM_IPE_3);
15132}
15133
15134
15135
15136const PFNIEMOP g_apfnOneByteMap[256] =
15137{
15138 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15139 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15140 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15141 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15142 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15143 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15144 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15145 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15146 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15147 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15148 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15149 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15150 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15151 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15152 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15153 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15154 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15155 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15156 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15157 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15158 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15159 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15160 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15161 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15162 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15163 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15164 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15165 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15166 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15167 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15168 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15169 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15170 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15171 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15172 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15173 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15174 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15175 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15176 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15177 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15178 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15179 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15180 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15181 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15182 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15183 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15184 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15185 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15186 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15187 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15188 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15189 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15190 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15191 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15192 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15193 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15194 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15195 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15196 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15197 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15198 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15199 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15200 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15201 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15202};
15203
15204
15205/** @} */
15206
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette