VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103644

Last change on this file since 103644 was 103644, checked in by vboxsync, 9 months ago

VMM/IEM: Use native emitter for the reg/reg variant in IEMOP_BODY_BINARY_rm_rv_RO & IEMOP_BODY_BINARY_rm_rv_RW. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 565.0 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103644 2024-03-02 01:55:02Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
75 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
76 IEM_MC_LOCAL(uint8_t, u8Dst); \
77 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
78 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
79 IEM_MC_LOCAL(uint32_t, uEFlags); \
80 IEM_MC_FETCH_EFLAGS(uEFlags); \
81 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
82 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
83 IEM_MC_COMMIT_EFLAGS(uEFlags); \
84 } IEM_MC_NATIVE_ELSE() { \
85 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
86 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
87 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
88 IEM_MC_REF_EFLAGS(pEFlags); \
89 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
90 } IEM_MC_NATIVE_ENDIF(); \
91 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
92 IEM_MC_END(); \
93 } \
94 else \
95 { \
96 /* \
97 * We're accessing memory. \
98 * Note! We're putting the eflags on the stack here so we can commit them \
99 * after the memory. \
100 */ \
101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
102 { \
103 IEM_MC_BEGIN(3, 3, 0, 0); \
104 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
105 IEM_MC_ARG(uint8_t, u8Src, 1); \
106 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
108 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
109 \
110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
111 IEMOP_HLP_DONE_DECODING(); \
112 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
113 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
114 IEM_MC_FETCH_EFLAGS(EFlags); \
115 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
116 \
117 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
118 IEM_MC_COMMIT_EFLAGS(EFlags); \
119 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
120 IEM_MC_END(); \
121 } \
122 else \
123 { \
124 IEM_MC_BEGIN(3, 3, 0, 0); \
125 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
126 IEM_MC_ARG(uint8_t, u8Src, 1); \
127 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
129 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
130 \
131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
132 IEMOP_HLP_DONE_DECODING(); \
133 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
134 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
135 IEM_MC_FETCH_EFLAGS(EFlags); \
136 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
137 \
138 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
139 IEM_MC_COMMIT_EFLAGS(EFlags); \
140 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
141 IEM_MC_END(); \
142 } \
143 } \
144 (void)0
145
146/**
147 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
148 * operands.
149 */
150#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
152 \
153 /* \
154 * If rm is denoting a register, no more instruction bytes. \
155 */ \
156 if (IEM_IS_MODRM_REG_MODE(bRm)) \
157 { \
158 IEM_MC_BEGIN(3, 0, 0, 0); \
159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
160 IEM_MC_ARG(uint8_t, u8Src, 1); \
161 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
162 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
163 IEM_MC_LOCAL(uint8_t, u8Dst); \
164 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
165 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
166 IEM_MC_LOCAL(uint32_t, uEFlags); \
167 IEM_MC_FETCH_EFLAGS(uEFlags); \
168 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
169 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Dst); \
170 IEM_MC_COMMIT_EFLAGS(uEFlags); \
171 } IEM_MC_NATIVE_ELSE() { \
172 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
173 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
174 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
175 IEM_MC_REF_EFLAGS(pEFlags); \
176 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
177 } IEM_MC_NATIVE_ENDIF(); \
178 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
179 IEM_MC_END(); \
180 } \
181 else \
182 { \
183 /* \
184 * We're accessing memory. \
185 * Note! We're putting the eflags on the stack here so we can commit them \
186 * after the memory. \
187 */ \
188 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
189 { \
190 IEM_MC_BEGIN(3, 3, 0, 0); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
193 IEMOP_HLP_DONE_DECODING(); \
194 \
195 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
196 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
197 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
198 IEM_MC_ARG(uint8_t, u8Src, 1); \
199 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
200 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
201 IEM_MC_FETCH_EFLAGS(EFlags); \
202 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
203 \
204 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
205 IEM_MC_COMMIT_EFLAGS(EFlags); \
206 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
207 IEM_MC_END(); \
208 } \
209 else \
210 { \
211 /** @todo we should probably decode the address first. */ \
212 IEMOP_HLP_DONE_DECODING(); \
213 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
214 } \
215 } \
216 (void)0
217
218/**
219 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
220 * destination.
221 */
222#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8, a_EmitterBasename, a_fNativeArchs) \
223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
224 \
225 /* \
226 * If rm is denoting a register, no more instruction bytes. \
227 */ \
228 if (IEM_IS_MODRM_REG_MODE(bRm)) \
229 { \
230 IEM_MC_BEGIN(3, 0, 0, 0); \
231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
234 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
235 IEM_MC_LOCAL(uint8_t, u8Dst); \
236 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
237 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
238 IEM_MC_LOCAL(uint32_t, uEFlags); \
239 IEM_MC_FETCH_EFLAGS(uEFlags); \
240 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
241 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst); \
242 IEM_MC_COMMIT_EFLAGS(uEFlags); \
243 } IEM_MC_NATIVE_ELSE() { \
244 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
245 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
246 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
247 IEM_MC_REF_EFLAGS(pEFlags); \
248 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
249 } IEM_MC_NATIVE_ENDIF(); \
250 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
251 IEM_MC_END(); \
252 } \
253 else \
254 { \
255 /* \
256 * We're accessing memory. \
257 */ \
258 IEM_MC_BEGIN(3, 1, 0, 0); \
259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
262 IEM_MC_ARG(uint8_t, u8Src, 1); \
263 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
264 IEM_MC_NATIVE_IF(a_fNativeArchs) { \
265 IEM_MC_LOCAL(uint8_t, u8Dst); \
266 IEM_MC_FETCH_GREG_U8(u8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
267 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
268 IEM_MC_LOCAL(uint32_t, uEFlags); \
269 IEM_MC_FETCH_EFLAGS(uEFlags); \
270 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u8Dst, u8Src, uEFlags, 8); \
271 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst); \
272 IEM_MC_COMMIT_EFLAGS(uEFlags); \
273 } IEM_MC_NATIVE_ELSE() { \
274 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
275 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
276 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
277 IEM_MC_REF_EFLAGS(pEFlags); \
278 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
279 } IEM_MC_NATIVE_ENDIF(); \
280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
281 IEM_MC_END(); \
282 } \
283 (void)0
284
285
286/**
287 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
288 * memory/register as the destination.
289 */
290#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
292 \
293 /* \
294 * If rm is denoting a register, no more instruction bytes. \
295 */ \
296 if (IEM_IS_MODRM_REG_MODE(bRm)) \
297 { \
298 switch (pVCpu->iem.s.enmEffOpSize) \
299 { \
300 case IEMMODE_16BIT: \
301 IEM_MC_BEGIN(3, 0, 0, 0); \
302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
303 IEM_MC_ARG(uint16_t, u16Src, 1); \
304 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
305 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
306 IEM_MC_LOCAL(uint16_t, u16Dst); \
307 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
308 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
309 IEM_MC_LOCAL(uint32_t, uEFlags); \
310 IEM_MC_FETCH_EFLAGS(uEFlags); \
311 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
312 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
313 IEM_MC_COMMIT_EFLAGS(uEFlags); \
314 } IEM_MC_NATIVE_ELSE() { \
315 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
316 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
317 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
318 IEM_MC_REF_EFLAGS(pEFlags); \
319 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
320 } IEM_MC_NATIVE_ENDIF(); \
321 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
322 IEM_MC_END(); \
323 break; \
324 \
325 case IEMMODE_32BIT: \
326 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
328 IEM_MC_ARG(uint32_t, u32Src, 1); \
329 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
330 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
331 IEM_MC_LOCAL(uint32_t, u32Dst); \
332 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
333 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
334 IEM_MC_LOCAL(uint32_t, uEFlags); \
335 IEM_MC_FETCH_EFLAGS(uEFlags); \
336 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
337 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
338 IEM_MC_COMMIT_EFLAGS(uEFlags); \
339 } IEM_MC_NATIVE_ELSE() { \
340 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
341 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
342 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
343 IEM_MC_REF_EFLAGS(pEFlags); \
344 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
345 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
346 } IEM_MC_NATIVE_ENDIF(); \
347 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
348 IEM_MC_END(); \
349 break; \
350 \
351 case IEMMODE_64BIT: \
352 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
354 IEM_MC_ARG(uint64_t, u64Src, 1); \
355 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
356 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
357 IEM_MC_LOCAL(uint64_t, u64Dst); \
358 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
359 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
360 IEM_MC_LOCAL(uint32_t, uEFlags); \
361 IEM_MC_FETCH_EFLAGS(uEFlags); \
362 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
363 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
364 IEM_MC_COMMIT_EFLAGS(uEFlags); \
365 } IEM_MC_NATIVE_ELSE() { \
366 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
367 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
368 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
369 IEM_MC_REF_EFLAGS(pEFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
371 } IEM_MC_NATIVE_ENDIF(); \
372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
373 IEM_MC_END(); \
374 break; \
375 \
376 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
377 } \
378 } \
379 else \
380 { \
381 /* \
382 * We're accessing memory. \
383 * Note! We're putting the eflags on the stack here so we can commit them \
384 * after the memory. \
385 */ \
386 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
387 { \
388 switch (pVCpu->iem.s.enmEffOpSize) \
389 { \
390 case IEMMODE_16BIT: \
391 IEM_MC_BEGIN(3, 3, 0, 0); \
392 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
393 IEM_MC_ARG(uint16_t, u16Src, 1); \
394 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
396 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
397 \
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
399 IEMOP_HLP_DONE_DECODING(); \
400 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
401 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
402 IEM_MC_FETCH_EFLAGS(EFlags); \
403 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
404 \
405 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
406 IEM_MC_COMMIT_EFLAGS(EFlags); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 case IEMMODE_32BIT: \
412 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
413 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
414 IEM_MC_ARG(uint32_t, u32Src, 1); \
415 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
417 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
418 \
419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
420 IEMOP_HLP_DONE_DECODING(); \
421 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
422 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
423 IEM_MC_FETCH_EFLAGS(EFlags); \
424 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
425 \
426 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
427 IEM_MC_COMMIT_EFLAGS(EFlags); \
428 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
429 IEM_MC_END(); \
430 break; \
431 \
432 case IEMMODE_64BIT: \
433 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
434 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
435 IEM_MC_ARG(uint64_t, u64Src, 1); \
436 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
438 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
439 \
440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
441 IEMOP_HLP_DONE_DECODING(); \
442 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
443 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
444 IEM_MC_FETCH_EFLAGS(EFlags); \
445 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
446 \
447 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
448 IEM_MC_COMMIT_EFLAGS(EFlags); \
449 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
450 IEM_MC_END(); \
451 break; \
452 \
453 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
454 } \
455 } \
456 else \
457 { \
458 (void)0
459/* Separate macro to work around parsing issue in IEMAllInstPython.py */
460#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
461 switch (pVCpu->iem.s.enmEffOpSize) \
462 { \
463 case IEMMODE_16BIT: \
464 IEM_MC_BEGIN(3, 3, 0, 0); \
465 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
466 IEM_MC_ARG(uint16_t, u16Src, 1); \
467 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
469 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
470 \
471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
472 IEMOP_HLP_DONE_DECODING(); \
473 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
474 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
475 IEM_MC_FETCH_EFLAGS(EFlags); \
476 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
477 \
478 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
479 IEM_MC_COMMIT_EFLAGS(EFlags); \
480 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
481 IEM_MC_END(); \
482 break; \
483 \
484 case IEMMODE_32BIT: \
485 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
486 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
487 IEM_MC_ARG(uint32_t, u32Src, 1); \
488 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
490 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
491 \
492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
493 IEMOP_HLP_DONE_DECODING(); \
494 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
495 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
496 IEM_MC_FETCH_EFLAGS(EFlags); \
497 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
498 \
499 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
500 IEM_MC_COMMIT_EFLAGS(EFlags); \
501 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
502 IEM_MC_END(); \
503 break; \
504 \
505 case IEMMODE_64BIT: \
506 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
507 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
508 IEM_MC_ARG(uint64_t, u64Src, 1); \
509 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
511 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
512 \
513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
514 IEMOP_HLP_DONE_DECODING(); \
515 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
516 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
517 IEM_MC_FETCH_EFLAGS(EFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
519 \
520 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
521 IEM_MC_COMMIT_EFLAGS(EFlags); \
522 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
523 IEM_MC_END(); \
524 break; \
525 \
526 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
527 } \
528 } \
529 } \
530 (void)0
531
532/**
533 * Body for read-only word/dword/qword instructions like TEST and CMP with
534 * memory/register as the destination.
535 */
536#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_EmitterBasename, a_fRegRegNativeArchs, a_fMemRegNativeArchs) \
537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
538 \
539 /* \
540 * If rm is denoting a register, no more instruction bytes. \
541 */ \
542 if (IEM_IS_MODRM_REG_MODE(bRm)) \
543 { \
544 switch (pVCpu->iem.s.enmEffOpSize) \
545 { \
546 case IEMMODE_16BIT: \
547 IEM_MC_BEGIN(3, 0, 0, 0); \
548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
549 IEM_MC_ARG(uint16_t, u16Src, 1); \
550 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
551 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
552 IEM_MC_LOCAL(uint16_t, u16Dst); \
553 IEM_MC_FETCH_GREG_U16(u16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
554 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
555 IEM_MC_LOCAL(uint32_t, uEFlags); \
556 IEM_MC_FETCH_EFLAGS(uEFlags); \
557 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u16Dst, u16Src, uEFlags, 16); \
558 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst); \
559 IEM_MC_COMMIT_EFLAGS(uEFlags); \
560 } IEM_MC_NATIVE_ELSE() { \
561 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
562 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
563 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
564 IEM_MC_REF_EFLAGS(pEFlags); \
565 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
566 } IEM_MC_NATIVE_ENDIF(); \
567 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
568 IEM_MC_END(); \
569 break; \
570 \
571 case IEMMODE_32BIT: \
572 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
574 IEM_MC_ARG(uint32_t, u32Src, 1); \
575 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
576 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
577 IEM_MC_LOCAL(uint32_t, u32Dst); \
578 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
579 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
580 IEM_MC_LOCAL(uint32_t, uEFlags); \
581 IEM_MC_FETCH_EFLAGS(uEFlags); \
582 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u32Dst, u32Src, uEFlags, 32); \
583 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst); \
584 IEM_MC_COMMIT_EFLAGS(uEFlags); \
585 } IEM_MC_NATIVE_ELSE() { \
586 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
587 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
588 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
589 IEM_MC_REF_EFLAGS(pEFlags); \
590 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
591 } IEM_MC_NATIVE_ENDIF(); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
601 IEM_MC_NATIVE_IF(a_fRegRegNativeArchs) { \
602 IEM_MC_LOCAL(uint64_t, u64Dst); \
603 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
604 /** @todo IEM_MC_LOCAL_EFLAGS(uEFlags); */ \
605 IEM_MC_LOCAL(uint32_t, uEFlags); \
606 IEM_MC_FETCH_EFLAGS(uEFlags); \
607 IEM_MC_NATIVE_EMIT_4(RT_CONCAT3(iemNativeEmit_,a_EmitterBasename,_r_r_efl), u64Dst, u64Src, uEFlags, 64); \
608 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst); \
609 IEM_MC_COMMIT_EFLAGS(uEFlags); \
610 } IEM_MC_NATIVE_ELSE() { \
611 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
612 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
613 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
614 IEM_MC_REF_EFLAGS(pEFlags); \
615 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
616 } IEM_MC_NATIVE_ENDIF(); \
617 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
618 IEM_MC_END(); \
619 break; \
620 \
621 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
622 } \
623 } \
624 else \
625 { \
626 /* \
627 * We're accessing memory. \
628 * Note! We're putting the eflags on the stack here so we can commit them \
629 * after the memory. \
630 */ \
631 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
632 { \
633 switch (pVCpu->iem.s.enmEffOpSize) \
634 { \
635 case IEMMODE_16BIT: \
636 IEM_MC_BEGIN(3, 3, 0, 0); \
637 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
638 IEM_MC_ARG(uint16_t, u16Src, 1); \
639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
641 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
642 \
643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
644 IEMOP_HLP_DONE_DECODING(); \
645 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
646 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
647 IEM_MC_FETCH_EFLAGS(EFlags); \
648 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
649 \
650 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
651 IEM_MC_COMMIT_EFLAGS(EFlags); \
652 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
653 IEM_MC_END(); \
654 break; \
655 \
656 case IEMMODE_32BIT: \
657 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
658 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
659 IEM_MC_ARG(uint32_t, u32Src, 1); \
660 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
662 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
663 \
664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
665 IEMOP_HLP_DONE_DECODING(); \
666 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
667 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
668 IEM_MC_FETCH_EFLAGS(EFlags); \
669 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
670 \
671 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
672 IEM_MC_COMMIT_EFLAGS(EFlags); \
673 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
674 IEM_MC_END(); \
675 break; \
676 \
677 case IEMMODE_64BIT: \
678 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
679 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
680 IEM_MC_ARG(uint64_t, u64Src, 1); \
681 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
683 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
684 \
685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
686 IEMOP_HLP_DONE_DECODING(); \
687 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
688 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
689 IEM_MC_FETCH_EFLAGS(EFlags); \
690 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
691 \
692 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
693 IEM_MC_COMMIT_EFLAGS(EFlags); \
694 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
695 IEM_MC_END(); \
696 break; \
697 \
698 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
699 } \
700 } \
701 else \
702 { \
703 IEMOP_HLP_DONE_DECODING(); \
704 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
705 } \
706 } \
707 (void)0
708
709
710/**
711 * Body for instructions like ADD, AND, OR, ++ with working on AL with
712 * a byte immediate.
713 */
714#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
715 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
716 \
717 IEM_MC_BEGIN(3, 0, 0, 0); \
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
719 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
720 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
721 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
722 \
723 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
724 IEM_MC_REF_EFLAGS(pEFlags); \
725 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
726 \
727 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
728 IEM_MC_END()
729
730/**
731 * Body for instructions like ADD, AND, OR, ++ with working on
732 * AX/EAX/RAX with a word/dword immediate.
733 */
734#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
735 switch (pVCpu->iem.s.enmEffOpSize) \
736 { \
737 case IEMMODE_16BIT: \
738 { \
739 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
740 \
741 IEM_MC_BEGIN(3, 0, 0, 0); \
742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
743 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
744 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
745 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
746 \
747 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
748 IEM_MC_REF_EFLAGS(pEFlags); \
749 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
750 \
751 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
752 IEM_MC_END(); \
753 } \
754 \
755 case IEMMODE_32BIT: \
756 { \
757 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
758 \
759 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
761 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
762 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
763 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
764 \
765 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
766 IEM_MC_REF_EFLAGS(pEFlags); \
767 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
768 \
769 if (a_fModifiesDstReg) \
770 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
771 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
772 IEM_MC_END(); \
773 } \
774 \
775 case IEMMODE_64BIT: \
776 { \
777 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
778 \
779 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
781 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
782 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
783 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
784 \
785 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
786 IEM_MC_REF_EFLAGS(pEFlags); \
787 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
788 \
789 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
790 IEM_MC_END(); \
791 } \
792 \
793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
794 } \
795 (void)0
796
797
798
799/* Instruction specification format - work in progress: */
800
801/**
802 * @opcode 0x00
803 * @opmnemonic add
804 * @op1 rm:Eb
805 * @op2 reg:Gb
806 * @opmaps one
807 * @openc ModR/M
808 * @opflclass arithmetic
809 * @ophints harmless ignores_op_sizes
810 * @opstats add_Eb_Gb
811 * @opgroup og_gen_arith_bin
812 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
813 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
814 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
815 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
816 */
817FNIEMOP_DEF(iemOp_add_Eb_Gb)
818{
819 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
820 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked, add, 0, 0);
821}
822
823
824/**
825 * @opcode 0x01
826 * @opgroup og_gen_arith_bin
827 * @opflclass arithmetic
828 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
829 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
830 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
831 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
832 */
833FNIEMOP_DEF(iemOp_add_Ev_Gv)
834{
835 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
836 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, add, 0, 0);
837 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
838}
839
840
841/**
842 * @opcode 0x02
843 * @opgroup og_gen_arith_bin
844 * @opflclass arithmetic
845 * @opcopytests iemOp_add_Eb_Gb
846 */
847FNIEMOP_DEF(iemOp_add_Gb_Eb)
848{
849 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
850 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8, add, 0);
851}
852
853
854/**
855 * @opcode 0x03
856 * @opgroup og_gen_arith_bin
857 * @opflclass arithmetic
858 * @opcopytests iemOp_add_Ev_Gv
859 */
860FNIEMOP_DEF(iemOp_add_Gv_Ev)
861{
862 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
864 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0, add, 0);
865}
866
867
868/**
869 * @opcode 0x04
870 * @opgroup og_gen_arith_bin
871 * @opflclass arithmetic
872 * @opcopytests iemOp_add_Eb_Gb
873 */
874FNIEMOP_DEF(iemOp_add_Al_Ib)
875{
876 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
877 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
878}
879
880
881/**
882 * @opcode 0x05
883 * @opgroup og_gen_arith_bin
884 * @opflclass arithmetic
885 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
886 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
887 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
888 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
889 */
890FNIEMOP_DEF(iemOp_add_eAX_Iz)
891{
892 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
893 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
894}
895
896
897/**
898 * @opcode 0x06
899 * @opgroup og_stack_sreg
900 */
901FNIEMOP_DEF(iemOp_push_ES)
902{
903 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
904 IEMOP_HLP_NO_64BIT();
905 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
906}
907
908
909/**
910 * @opcode 0x07
911 * @opgroup og_stack_sreg
912 */
913FNIEMOP_DEF(iemOp_pop_ES)
914{
915 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
916 IEMOP_HLP_NO_64BIT();
917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
918 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
919 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
920 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
921 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
922 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
923 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
924 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
925}
926
927
928/**
929 * @opcode 0x08
930 * @opgroup og_gen_arith_bin
931 * @opflclass logical
932 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
933 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
934 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
935 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
936 */
937FNIEMOP_DEF(iemOp_or_Eb_Gb)
938{
939 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
940 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
941 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked, or, 0, 0);
942}
943
944
945/*
946 * @opcode 0x09
947 * @opgroup og_gen_arith_bin
948 * @opflclass logical
949 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
950 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
951 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
952 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
953 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
954 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
955 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
956 */
957FNIEMOP_DEF(iemOp_or_Ev_Gv)
958{
959 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
960 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
961 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, or, 0, 0);
962 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
963}
964
965
966/**
967 * @opcode 0x0a
968 * @opgroup og_gen_arith_bin
969 * @opflclass logical
970 * @opcopytests iemOp_or_Eb_Gb
971 */
972FNIEMOP_DEF(iemOp_or_Gb_Eb)
973{
974 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
975 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
976 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8, or, 0);
977}
978
979
980/**
981 * @opcode 0x0b
982 * @opgroup og_gen_arith_bin
983 * @opflclass logical
984 * @opcopytests iemOp_or_Ev_Gv
985 */
986FNIEMOP_DEF(iemOp_or_Gv_Ev)
987{
988 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
989 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
991 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0, or, 0);
992}
993
994
995/**
996 * @opcode 0x0c
997 * @opgroup og_gen_arith_bin
998 * @opflclass logical
999 * @opcopytests iemOp_or_Eb_Gb
1000 */
1001FNIEMOP_DEF(iemOp_or_Al_Ib)
1002{
1003 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1004 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1005 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
1006}
1007
1008
1009/**
1010 * @opcode 0x0d
1011 * @opgroup og_gen_arith_bin
1012 * @opflclass logical
1013 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
1014 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
1015 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1016 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1017 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1018 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
1019 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
1020 */
1021FNIEMOP_DEF(iemOp_or_eAX_Iz)
1022{
1023 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1024 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1025 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
1026}
1027
1028
1029/**
1030 * @opcode 0x0e
1031 * @opgroup og_stack_sreg
1032 */
1033FNIEMOP_DEF(iemOp_push_CS)
1034{
1035 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
1036 IEMOP_HLP_NO_64BIT();
1037 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
1038}
1039
1040
1041/**
1042 * @opcode 0x0f
1043 * @opmnemonic EscTwo0f
1044 * @openc two0f
1045 * @opdisenum OP_2B_ESC
1046 * @ophints harmless
1047 * @opgroup og_escapes
1048 */
1049FNIEMOP_DEF(iemOp_2byteEscape)
1050{
1051#if 0 /// @todo def VBOX_STRICT
1052 /* Sanity check the table the first time around. */
1053 static bool s_fTested = false;
1054 if (RT_LIKELY(s_fTested)) { /* likely */ }
1055 else
1056 {
1057 s_fTested = true;
1058 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
1059 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
1060 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
1061 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
1062 }
1063#endif
1064
1065 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
1066 {
1067 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1068 IEMOP_HLP_MIN_286();
1069 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
1070 }
1071 /* @opdone */
1072
1073 /*
1074 * On the 8086 this is a POP CS instruction.
1075 * For the time being we don't specify this this.
1076 */
1077 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1078 IEMOP_HLP_NO_64BIT();
1079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1080 /** @todo eliminate END_TB here */
1081 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1082 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1083 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1084 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1085}
1086
1087/**
1088 * @opcode 0x10
1089 * @opgroup og_gen_arith_bin
1090 * @opflclass arithmetic_carry
1091 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1092 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1093 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1094 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1095 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1096 */
1097FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1098{
1099 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1100 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked, adc, 0, 0);
1101}
1102
1103
1104/**
1105 * @opcode 0x11
1106 * @opgroup og_gen_arith_bin
1107 * @opflclass arithmetic_carry
1108 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1109 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1110 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1111 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1112 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1113 */
1114FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1115{
1116 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1117 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, adc, 0, 0);
1118 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1119}
1120
1121
1122/**
1123 * @opcode 0x12
1124 * @opgroup og_gen_arith_bin
1125 * @opflclass arithmetic_carry
1126 * @opcopytests iemOp_adc_Eb_Gb
1127 */
1128FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1129{
1130 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1131 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8, adc, 0);
1132}
1133
1134
1135/**
1136 * @opcode 0x13
1137 * @opgroup og_gen_arith_bin
1138 * @opflclass arithmetic_carry
1139 * @opcopytests iemOp_adc_Ev_Gv
1140 */
1141FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1142{
1143 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1145 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0, adc, 0);
1146}
1147
1148
1149/**
1150 * @opcode 0x14
1151 * @opgroup og_gen_arith_bin
1152 * @opflclass arithmetic_carry
1153 * @opcopytests iemOp_adc_Eb_Gb
1154 */
1155FNIEMOP_DEF(iemOp_adc_Al_Ib)
1156{
1157 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1158 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1159}
1160
1161
1162/**
1163 * @opcode 0x15
1164 * @opgroup og_gen_arith_bin
1165 * @opflclass arithmetic_carry
1166 * @opcopytests iemOp_adc_Ev_Gv
1167 */
1168FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1169{
1170 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1171 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1172}
1173
1174
1175/**
1176 * @opcode 0x16
1177 */
1178FNIEMOP_DEF(iemOp_push_SS)
1179{
1180 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1181 IEMOP_HLP_NO_64BIT();
1182 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1183}
1184
1185
1186/**
1187 * @opcode 0x17
1188 */
1189FNIEMOP_DEF(iemOp_pop_SS)
1190{
1191 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEMOP_HLP_NO_64BIT();
1194 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1195 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1196 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1197 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1198 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1199 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1200 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1201}
1202
1203
1204/**
1205 * @opcode 0x18
1206 * @opgroup og_gen_arith_bin
1207 * @opflclass arithmetic_carry
1208 */
1209FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1210{
1211 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1212 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked, sbb, 0, 0);
1213}
1214
1215
1216/**
1217 * @opcode 0x19
1218 * @opgroup og_gen_arith_bin
1219 * @opflclass arithmetic_carry
1220 */
1221FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1222{
1223 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1224 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, sbb, 0, 0);
1225 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1226}
1227
1228
1229/**
1230 * @opcode 0x1a
1231 * @opgroup og_gen_arith_bin
1232 * @opflclass arithmetic_carry
1233 */
1234FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1235{
1236 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1237 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8, sbb, 0);
1238}
1239
1240
1241/**
1242 * @opcode 0x1b
1243 * @opgroup og_gen_arith_bin
1244 * @opflclass arithmetic_carry
1245 */
1246FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1247{
1248 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1250 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0, sbb, 0);
1251}
1252
1253
1254/**
1255 * @opcode 0x1c
1256 * @opgroup og_gen_arith_bin
1257 * @opflclass arithmetic_carry
1258 */
1259FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1260{
1261 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1262 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1263}
1264
1265
1266/**
1267 * @opcode 0x1d
1268 * @opgroup og_gen_arith_bin
1269 * @opflclass arithmetic_carry
1270 */
1271FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1272{
1273 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1274 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1275}
1276
1277
1278/**
1279 * @opcode 0x1e
1280 * @opgroup og_stack_sreg
1281 */
1282FNIEMOP_DEF(iemOp_push_DS)
1283{
1284 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1285 IEMOP_HLP_NO_64BIT();
1286 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1287}
1288
1289
1290/**
1291 * @opcode 0x1f
1292 * @opgroup og_stack_sreg
1293 */
1294FNIEMOP_DEF(iemOp_pop_DS)
1295{
1296 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1298 IEMOP_HLP_NO_64BIT();
1299 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1300 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1301 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1302 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1303 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1304 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1305 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1306}
1307
1308
1309/**
1310 * @opcode 0x20
1311 * @opgroup og_gen_arith_bin
1312 * @opflclass logical
1313 */
1314FNIEMOP_DEF(iemOp_and_Eb_Gb)
1315{
1316 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1318 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked, and, 0, 0);
1319}
1320
1321
1322/**
1323 * @opcode 0x21
1324 * @opgroup og_gen_arith_bin
1325 * @opflclass logical
1326 */
1327FNIEMOP_DEF(iemOp_and_Ev_Gv)
1328{
1329 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1330 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1331 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, and, 0, 0);
1332 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1333}
1334
1335
1336/**
1337 * @opcode 0x22
1338 * @opgroup og_gen_arith_bin
1339 * @opflclass logical
1340 */
1341FNIEMOP_DEF(iemOp_and_Gb_Eb)
1342{
1343 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1344 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1345 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8, and, 0);
1346}
1347
1348
1349/**
1350 * @opcode 0x23
1351 * @opgroup og_gen_arith_bin
1352 * @opflclass logical
1353 */
1354FNIEMOP_DEF(iemOp_and_Gv_Ev)
1355{
1356 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1359 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0, and, 0);
1360}
1361
1362
1363/**
1364 * @opcode 0x24
1365 * @opgroup og_gen_arith_bin
1366 * @opflclass logical
1367 */
1368FNIEMOP_DEF(iemOp_and_Al_Ib)
1369{
1370 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1371 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1372 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1373}
1374
1375
1376/**
1377 * @opcode 0x25
1378 * @opgroup og_gen_arith_bin
1379 * @opflclass logical
1380 */
1381FNIEMOP_DEF(iemOp_and_eAX_Iz)
1382{
1383 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1384 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1385 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1386}
1387
1388
1389/**
1390 * @opcode 0x26
1391 * @opmnemonic SEG
1392 * @op1 ES
1393 * @opgroup og_prefix
1394 * @openc prefix
1395 * @opdisenum OP_SEG
1396 * @ophints harmless
1397 */
1398FNIEMOP_DEF(iemOp_seg_ES)
1399{
1400 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1401 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1402 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1403
1404 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1405 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1406}
1407
1408
1409/**
1410 * @opcode 0x27
1411 * @opfltest af,cf
1412 * @opflmodify cf,pf,af,zf,sf,of
1413 * @opflundef of
1414 */
1415FNIEMOP_DEF(iemOp_daa)
1416{
1417 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1418 IEMOP_HLP_NO_64BIT();
1419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1420 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1421 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1422}
1423
1424
1425/**
1426 * @opcode 0x28
1427 * @opgroup og_gen_arith_bin
1428 * @opflclass arithmetic
1429 */
1430FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1431{
1432 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1433 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked, sub, 0, 0);
1434}
1435
1436
1437/**
1438 * @opcode 0x29
1439 * @opgroup og_gen_arith_bin
1440 * @opflclass arithmetic
1441 */
1442FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1443{
1444 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1445 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, sub, 0, 0);
1446 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1447}
1448
1449
1450/**
1451 * @opcode 0x2a
1452 * @opgroup og_gen_arith_bin
1453 * @opflclass arithmetic
1454 */
1455FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1456{
1457 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1458 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8, sub, 0);
1459}
1460
1461
1462/**
1463 * @opcode 0x2b
1464 * @opgroup og_gen_arith_bin
1465 * @opflclass arithmetic
1466 */
1467FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1468{
1469 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1471 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0, sub, 0);
1472}
1473
1474
1475/**
1476 * @opcode 0x2c
1477 * @opgroup og_gen_arith_bin
1478 * @opflclass arithmetic
1479 */
1480FNIEMOP_DEF(iemOp_sub_Al_Ib)
1481{
1482 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1483 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1484}
1485
1486
1487/**
1488 * @opcode 0x2d
1489 * @opgroup og_gen_arith_bin
1490 * @opflclass arithmetic
1491 */
1492FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1493{
1494 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1495 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1496}
1497
1498
1499/**
1500 * @opcode 0x2e
1501 * @opmnemonic SEG
1502 * @op1 CS
1503 * @opgroup og_prefix
1504 * @openc prefix
1505 * @opdisenum OP_SEG
1506 * @ophints harmless
1507 */
1508FNIEMOP_DEF(iemOp_seg_CS)
1509{
1510 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1511 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1512 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1513
1514 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1515 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1516}
1517
1518
1519/**
1520 * @opcode 0x2f
1521 * @opfltest af,cf
1522 * @opflmodify cf,pf,af,zf,sf,of
1523 * @opflundef of
1524 */
1525FNIEMOP_DEF(iemOp_das)
1526{
1527 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1528 IEMOP_HLP_NO_64BIT();
1529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1531 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1532}
1533
1534
1535/**
1536 * @opcode 0x30
1537 * @opgroup og_gen_arith_bin
1538 * @opflclass logical
1539 */
1540FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1541{
1542 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1543 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1544 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1545}
1546
1547
1548/**
1549 * @opcode 0x31
1550 * @opgroup og_gen_arith_bin
1551 * @opflclass logical
1552 */
1553FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1554{
1555 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1556 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1557 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64, 0);
1558 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1559}
1560
1561
1562/**
1563 * @opcode 0x32
1564 * @opgroup og_gen_arith_bin
1565 * @opflclass logical
1566 */
1567FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1568{
1569 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1570 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1571 /** @todo xor al,al optimization */
1572 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1573}
1574
1575
1576/**
1577 * @opcode 0x33
1578 * @opgroup og_gen_arith_bin
1579 * @opflclass logical
1580 */
1581FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1582{
1583 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1585
1586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1587
1588 /*
1589 * Deal with special case of 'xor rN, rN' which sets rN to zero and has a known EFLAGS outcome.
1590 */
1591 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
1592 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
1593 {
1594 switch (pVCpu->iem.s.enmEffOpSize)
1595 {
1596 case IEMMODE_16BIT:
1597 IEM_MC_BEGIN(1, 0, 0, 0);
1598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1599 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1600 IEM_MC_LOCAL(uint32_t, fEFlags);
1601 IEM_MC_FETCH_EFLAGS(fEFlags);
1602 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1603 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1604 IEM_MC_COMMIT_EFLAGS(fEFlags);
1605 IEM_MC_ADVANCE_RIP_AND_FINISH();
1606 IEM_MC_END();
1607 break;
1608
1609 case IEMMODE_32BIT:
1610 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
1611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1612 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1613 IEM_MC_LOCAL(uint32_t, fEFlags);
1614 IEM_MC_FETCH_EFLAGS(fEFlags);
1615 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1616 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1617 IEM_MC_COMMIT_EFLAGS(fEFlags);
1618 IEM_MC_ADVANCE_RIP_AND_FINISH();
1619 IEM_MC_END();
1620 break;
1621
1622 case IEMMODE_64BIT:
1623 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
1624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1625 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1626 IEM_MC_LOCAL(uint32_t, fEFlags);
1627 IEM_MC_FETCH_EFLAGS(fEFlags);
1628 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1629 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1630 IEM_MC_COMMIT_EFLAGS(fEFlags);
1631 IEM_MC_ADVANCE_RIP_AND_FINISH();
1632 IEM_MC_END();
1633 break;
1634
1635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1636 }
1637 }
1638
1639 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1640}
1641
1642
1643/**
1644 * @opcode 0x34
1645 * @opgroup og_gen_arith_bin
1646 * @opflclass logical
1647 */
1648FNIEMOP_DEF(iemOp_xor_Al_Ib)
1649{
1650 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1651 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1652 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1653}
1654
1655
1656/**
1657 * @opcode 0x35
1658 * @opgroup og_gen_arith_bin
1659 * @opflclass logical
1660 */
1661FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1662{
1663 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1664 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1665 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1666}
1667
1668
1669/**
1670 * @opcode 0x36
1671 * @opmnemonic SEG
1672 * @op1 SS
1673 * @opgroup og_prefix
1674 * @openc prefix
1675 * @opdisenum OP_SEG
1676 * @ophints harmless
1677 */
1678FNIEMOP_DEF(iemOp_seg_SS)
1679{
1680 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1681 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1682 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1683
1684 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1685 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1686}
1687
1688
1689/**
1690 * @opcode 0x37
1691 * @opfltest af
1692 * @opflmodify cf,pf,af,zf,sf,of
1693 * @opflundef pf,zf,sf,of
1694 * @opgroup og_gen_arith_dec
1695 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1696 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1697 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1698 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1699 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1700 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1701 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1702 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1703 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1704 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1705 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1706 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1707 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1708 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1709 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1710 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1711 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1712 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1713 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1714 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1715 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1716 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1717 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1718 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1719 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1720 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1721 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1722 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1723 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1724 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1725 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1726 */
1727FNIEMOP_DEF(iemOp_aaa)
1728{
1729 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1730 IEMOP_HLP_NO_64BIT();
1731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1732 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1733
1734 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1735}
1736
1737
1738/**
1739 * @opcode 0x38
1740 * @opflclass arithmetic
1741 */
1742FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1743{
1744 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1745 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8, cmp, 0, 0);
1746}
1747
1748
1749/**
1750 * @opcode 0x39
1751 * @opflclass arithmetic
1752 */
1753FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1754{
1755 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1756 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, cmp, 0, 0);
1757}
1758
1759
1760/**
1761 * @opcode 0x3a
1762 * @opflclass arithmetic
1763 */
1764FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1765{
1766 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1767 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8, cmp, 0);
1768}
1769
1770
1771/**
1772 * @opcode 0x3b
1773 * @opflclass arithmetic
1774 */
1775FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1776{
1777 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1779 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0, cmp, 0);
1780}
1781
1782
1783/**
1784 * @opcode 0x3c
1785 * @opflclass arithmetic
1786 */
1787FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1788{
1789 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1790 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1791}
1792
1793
1794/**
1795 * @opcode 0x3d
1796 * @opflclass arithmetic
1797 */
1798FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1799{
1800 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1801 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1802}
1803
1804
1805/**
1806 * @opcode 0x3e
1807 */
1808FNIEMOP_DEF(iemOp_seg_DS)
1809{
1810 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1811 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1812 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1813
1814 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1815 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1816}
1817
1818
1819/**
1820 * @opcode 0x3f
1821 * @opfltest af
1822 * @opflmodify cf,pf,af,zf,sf,of
1823 * @opflundef pf,zf,sf,of
1824 * @opgroup og_gen_arith_dec
1825 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1826 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1827 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1828 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1829 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1830 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1831 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1832 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1833 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1834 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1835 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1836 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1837 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1838 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1839 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1840 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1841 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1842 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1843 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1844 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1845 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1846 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1847 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1848 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1849 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1850 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1851 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1852 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1853 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1854 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1855 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1856 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1857 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1858 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1859 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1860 */
1861FNIEMOP_DEF(iemOp_aas)
1862{
1863 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1864 IEMOP_HLP_NO_64BIT();
1865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1866 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1867
1868 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1869}
1870
1871
1872/**
1873 * Common 'inc/dec register' helper.
1874 *
1875 * Not for 64-bit code, only for what became the rex prefixes.
1876 */
1877#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1878 switch (pVCpu->iem.s.enmEffOpSize) \
1879 { \
1880 case IEMMODE_16BIT: \
1881 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1883 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1884 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1885 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1886 IEM_MC_REF_EFLAGS(pEFlags); \
1887 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1888 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1889 IEM_MC_END(); \
1890 break; \
1891 \
1892 case IEMMODE_32BIT: \
1893 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1895 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1896 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1897 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1898 IEM_MC_REF_EFLAGS(pEFlags); \
1899 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1900 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1901 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1902 IEM_MC_END(); \
1903 break; \
1904 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1905 } \
1906 (void)0
1907
1908/**
1909 * @opcode 0x40
1910 * @opflclass incdec
1911 */
1912FNIEMOP_DEF(iemOp_inc_eAX)
1913{
1914 /*
1915 * This is a REX prefix in 64-bit mode.
1916 */
1917 if (IEM_IS_64BIT_CODE(pVCpu))
1918 {
1919 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1920 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1921
1922 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1923 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1924 }
1925
1926 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1927 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1928}
1929
1930
1931/**
1932 * @opcode 0x41
1933 * @opflclass incdec
1934 */
1935FNIEMOP_DEF(iemOp_inc_eCX)
1936{
1937 /*
1938 * This is a REX prefix in 64-bit mode.
1939 */
1940 if (IEM_IS_64BIT_CODE(pVCpu))
1941 {
1942 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1943 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1944 pVCpu->iem.s.uRexB = 1 << 3;
1945
1946 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1947 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1948 }
1949
1950 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1951 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1952}
1953
1954
1955/**
1956 * @opcode 0x42
1957 * @opflclass incdec
1958 */
1959FNIEMOP_DEF(iemOp_inc_eDX)
1960{
1961 /*
1962 * This is a REX prefix in 64-bit mode.
1963 */
1964 if (IEM_IS_64BIT_CODE(pVCpu))
1965 {
1966 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1967 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1968 pVCpu->iem.s.uRexIndex = 1 << 3;
1969
1970 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1971 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1972 }
1973
1974 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1975 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1976}
1977
1978
1979
1980/**
1981 * @opcode 0x43
1982 * @opflclass incdec
1983 */
1984FNIEMOP_DEF(iemOp_inc_eBX)
1985{
1986 /*
1987 * This is a REX prefix in 64-bit mode.
1988 */
1989 if (IEM_IS_64BIT_CODE(pVCpu))
1990 {
1991 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1992 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1993 pVCpu->iem.s.uRexB = 1 << 3;
1994 pVCpu->iem.s.uRexIndex = 1 << 3;
1995
1996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1997 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1998 }
1999
2000 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
2001 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
2002}
2003
2004
2005/**
2006 * @opcode 0x44
2007 * @opflclass incdec
2008 */
2009FNIEMOP_DEF(iemOp_inc_eSP)
2010{
2011 /*
2012 * This is a REX prefix in 64-bit mode.
2013 */
2014 if (IEM_IS_64BIT_CODE(pVCpu))
2015 {
2016 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
2017 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
2018 pVCpu->iem.s.uRexReg = 1 << 3;
2019
2020 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2021 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2022 }
2023
2024 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
2025 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
2026}
2027
2028
2029/**
2030 * @opcode 0x45
2031 * @opflclass incdec
2032 */
2033FNIEMOP_DEF(iemOp_inc_eBP)
2034{
2035 /*
2036 * This is a REX prefix in 64-bit mode.
2037 */
2038 if (IEM_IS_64BIT_CODE(pVCpu))
2039 {
2040 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
2041 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
2042 pVCpu->iem.s.uRexReg = 1 << 3;
2043 pVCpu->iem.s.uRexB = 1 << 3;
2044
2045 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2046 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2047 }
2048
2049 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
2050 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
2051}
2052
2053
2054/**
2055 * @opcode 0x46
2056 * @opflclass incdec
2057 */
2058FNIEMOP_DEF(iemOp_inc_eSI)
2059{
2060 /*
2061 * This is a REX prefix in 64-bit mode.
2062 */
2063 if (IEM_IS_64BIT_CODE(pVCpu))
2064 {
2065 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
2066 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
2067 pVCpu->iem.s.uRexReg = 1 << 3;
2068 pVCpu->iem.s.uRexIndex = 1 << 3;
2069
2070 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2071 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2072 }
2073
2074 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
2075 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
2076}
2077
2078
2079/**
2080 * @opcode 0x47
2081 * @opflclass incdec
2082 */
2083FNIEMOP_DEF(iemOp_inc_eDI)
2084{
2085 /*
2086 * This is a REX prefix in 64-bit mode.
2087 */
2088 if (IEM_IS_64BIT_CODE(pVCpu))
2089 {
2090 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
2091 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2092 pVCpu->iem.s.uRexReg = 1 << 3;
2093 pVCpu->iem.s.uRexB = 1 << 3;
2094 pVCpu->iem.s.uRexIndex = 1 << 3;
2095
2096 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2097 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2098 }
2099
2100 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2101 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2102}
2103
2104
2105/**
2106 * @opcode 0x48
2107 * @opflclass incdec
2108 */
2109FNIEMOP_DEF(iemOp_dec_eAX)
2110{
2111 /*
2112 * This is a REX prefix in 64-bit mode.
2113 */
2114 if (IEM_IS_64BIT_CODE(pVCpu))
2115 {
2116 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2117 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2118 iemRecalEffOpSize(pVCpu);
2119
2120 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2121 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2122 }
2123
2124 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2125 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2126}
2127
2128
2129/**
2130 * @opcode 0x49
2131 * @opflclass incdec
2132 */
2133FNIEMOP_DEF(iemOp_dec_eCX)
2134{
2135 /*
2136 * This is a REX prefix in 64-bit mode.
2137 */
2138 if (IEM_IS_64BIT_CODE(pVCpu))
2139 {
2140 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2141 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2142 pVCpu->iem.s.uRexB = 1 << 3;
2143 iemRecalEffOpSize(pVCpu);
2144
2145 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2146 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2147 }
2148
2149 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2150 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2151}
2152
2153
2154/**
2155 * @opcode 0x4a
2156 * @opflclass incdec
2157 */
2158FNIEMOP_DEF(iemOp_dec_eDX)
2159{
2160 /*
2161 * This is a REX prefix in 64-bit mode.
2162 */
2163 if (IEM_IS_64BIT_CODE(pVCpu))
2164 {
2165 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2166 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2167 pVCpu->iem.s.uRexIndex = 1 << 3;
2168 iemRecalEffOpSize(pVCpu);
2169
2170 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2171 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2172 }
2173
2174 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2175 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2176}
2177
2178
2179/**
2180 * @opcode 0x4b
2181 * @opflclass incdec
2182 */
2183FNIEMOP_DEF(iemOp_dec_eBX)
2184{
2185 /*
2186 * This is a REX prefix in 64-bit mode.
2187 */
2188 if (IEM_IS_64BIT_CODE(pVCpu))
2189 {
2190 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2191 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2192 pVCpu->iem.s.uRexB = 1 << 3;
2193 pVCpu->iem.s.uRexIndex = 1 << 3;
2194 iemRecalEffOpSize(pVCpu);
2195
2196 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2197 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2198 }
2199
2200 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2201 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2202}
2203
2204
2205/**
2206 * @opcode 0x4c
2207 * @opflclass incdec
2208 */
2209FNIEMOP_DEF(iemOp_dec_eSP)
2210{
2211 /*
2212 * This is a REX prefix in 64-bit mode.
2213 */
2214 if (IEM_IS_64BIT_CODE(pVCpu))
2215 {
2216 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2217 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2218 pVCpu->iem.s.uRexReg = 1 << 3;
2219 iemRecalEffOpSize(pVCpu);
2220
2221 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2222 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2223 }
2224
2225 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2226 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2227}
2228
2229
2230/**
2231 * @opcode 0x4d
2232 * @opflclass incdec
2233 */
2234FNIEMOP_DEF(iemOp_dec_eBP)
2235{
2236 /*
2237 * This is a REX prefix in 64-bit mode.
2238 */
2239 if (IEM_IS_64BIT_CODE(pVCpu))
2240 {
2241 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2242 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2243 pVCpu->iem.s.uRexReg = 1 << 3;
2244 pVCpu->iem.s.uRexB = 1 << 3;
2245 iemRecalEffOpSize(pVCpu);
2246
2247 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2248 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2249 }
2250
2251 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2252 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2253}
2254
2255
2256/**
2257 * @opcode 0x4e
2258 * @opflclass incdec
2259 */
2260FNIEMOP_DEF(iemOp_dec_eSI)
2261{
2262 /*
2263 * This is a REX prefix in 64-bit mode.
2264 */
2265 if (IEM_IS_64BIT_CODE(pVCpu))
2266 {
2267 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2268 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2269 pVCpu->iem.s.uRexReg = 1 << 3;
2270 pVCpu->iem.s.uRexIndex = 1 << 3;
2271 iemRecalEffOpSize(pVCpu);
2272
2273 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2274 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2275 }
2276
2277 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2278 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2279}
2280
2281
2282/**
2283 * @opcode 0x4f
2284 * @opflclass incdec
2285 */
2286FNIEMOP_DEF(iemOp_dec_eDI)
2287{
2288 /*
2289 * This is a REX prefix in 64-bit mode.
2290 */
2291 if (IEM_IS_64BIT_CODE(pVCpu))
2292 {
2293 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2294 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2295 pVCpu->iem.s.uRexReg = 1 << 3;
2296 pVCpu->iem.s.uRexB = 1 << 3;
2297 pVCpu->iem.s.uRexIndex = 1 << 3;
2298 iemRecalEffOpSize(pVCpu);
2299
2300 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2301 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2302 }
2303
2304 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2305 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2306}
2307
2308
2309/**
2310 * Common 'push register' helper.
2311 */
2312FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2313{
2314 if (IEM_IS_64BIT_CODE(pVCpu))
2315 {
2316 iReg |= pVCpu->iem.s.uRexB;
2317 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2318 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2319 }
2320
2321 switch (pVCpu->iem.s.enmEffOpSize)
2322 {
2323 case IEMMODE_16BIT:
2324 IEM_MC_BEGIN(0, 1, 0, 0);
2325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2326 IEM_MC_LOCAL(uint16_t, u16Value);
2327 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2328 IEM_MC_PUSH_U16(u16Value);
2329 IEM_MC_ADVANCE_RIP_AND_FINISH();
2330 IEM_MC_END();
2331 break;
2332
2333 case IEMMODE_32BIT:
2334 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2336 IEM_MC_LOCAL(uint32_t, u32Value);
2337 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2338 IEM_MC_PUSH_U32(u32Value);
2339 IEM_MC_ADVANCE_RIP_AND_FINISH();
2340 IEM_MC_END();
2341 break;
2342
2343 case IEMMODE_64BIT:
2344 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2346 IEM_MC_LOCAL(uint64_t, u64Value);
2347 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2348 IEM_MC_PUSH_U64(u64Value);
2349 IEM_MC_ADVANCE_RIP_AND_FINISH();
2350 IEM_MC_END();
2351 break;
2352
2353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2354 }
2355}
2356
2357
2358/**
2359 * @opcode 0x50
2360 */
2361FNIEMOP_DEF(iemOp_push_eAX)
2362{
2363 IEMOP_MNEMONIC(push_rAX, "push rAX");
2364 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2365}
2366
2367
2368/**
2369 * @opcode 0x51
2370 */
2371FNIEMOP_DEF(iemOp_push_eCX)
2372{
2373 IEMOP_MNEMONIC(push_rCX, "push rCX");
2374 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2375}
2376
2377
2378/**
2379 * @opcode 0x52
2380 */
2381FNIEMOP_DEF(iemOp_push_eDX)
2382{
2383 IEMOP_MNEMONIC(push_rDX, "push rDX");
2384 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2385}
2386
2387
2388/**
2389 * @opcode 0x53
2390 */
2391FNIEMOP_DEF(iemOp_push_eBX)
2392{
2393 IEMOP_MNEMONIC(push_rBX, "push rBX");
2394 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2395}
2396
2397
2398/**
2399 * @opcode 0x54
2400 */
2401FNIEMOP_DEF(iemOp_push_eSP)
2402{
2403 IEMOP_MNEMONIC(push_rSP, "push rSP");
2404 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2405 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2406
2407 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2408 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2410 IEM_MC_LOCAL(uint16_t, u16Value);
2411 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2412 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2413 IEM_MC_PUSH_U16(u16Value);
2414 IEM_MC_ADVANCE_RIP_AND_FINISH();
2415 IEM_MC_END();
2416}
2417
2418
2419/**
2420 * @opcode 0x55
2421 */
2422FNIEMOP_DEF(iemOp_push_eBP)
2423{
2424 IEMOP_MNEMONIC(push_rBP, "push rBP");
2425 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2426}
2427
2428
2429/**
2430 * @opcode 0x56
2431 */
2432FNIEMOP_DEF(iemOp_push_eSI)
2433{
2434 IEMOP_MNEMONIC(push_rSI, "push rSI");
2435 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2436}
2437
2438
2439/**
2440 * @opcode 0x57
2441 */
2442FNIEMOP_DEF(iemOp_push_eDI)
2443{
2444 IEMOP_MNEMONIC(push_rDI, "push rDI");
2445 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2446}
2447
2448
2449/**
2450 * Common 'pop register' helper.
2451 */
2452FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2453{
2454 if (IEM_IS_64BIT_CODE(pVCpu))
2455 {
2456 iReg |= pVCpu->iem.s.uRexB;
2457 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2458 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2459 }
2460
2461 switch (pVCpu->iem.s.enmEffOpSize)
2462 {
2463 case IEMMODE_16BIT:
2464 IEM_MC_BEGIN(0, 0, 0, 0);
2465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2466 IEM_MC_POP_GREG_U16(iReg);
2467 IEM_MC_ADVANCE_RIP_AND_FINISH();
2468 IEM_MC_END();
2469 break;
2470
2471 case IEMMODE_32BIT:
2472 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2474 IEM_MC_POP_GREG_U32(iReg);
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 break;
2478
2479 case IEMMODE_64BIT:
2480 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482 IEM_MC_POP_GREG_U64(iReg);
2483 IEM_MC_ADVANCE_RIP_AND_FINISH();
2484 IEM_MC_END();
2485 break;
2486
2487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2488 }
2489}
2490
2491
2492/**
2493 * @opcode 0x58
2494 */
2495FNIEMOP_DEF(iemOp_pop_eAX)
2496{
2497 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2498 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2499}
2500
2501
2502/**
2503 * @opcode 0x59
2504 */
2505FNIEMOP_DEF(iemOp_pop_eCX)
2506{
2507 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2508 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2509}
2510
2511
2512/**
2513 * @opcode 0x5a
2514 */
2515FNIEMOP_DEF(iemOp_pop_eDX)
2516{
2517 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2518 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2519}
2520
2521
2522/**
2523 * @opcode 0x5b
2524 */
2525FNIEMOP_DEF(iemOp_pop_eBX)
2526{
2527 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2528 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2529}
2530
2531
2532/**
2533 * @opcode 0x5c
2534 */
2535FNIEMOP_DEF(iemOp_pop_eSP)
2536{
2537 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2538 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2539}
2540
2541
2542/**
2543 * @opcode 0x5d
2544 */
2545FNIEMOP_DEF(iemOp_pop_eBP)
2546{
2547 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2548 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2549}
2550
2551
2552/**
2553 * @opcode 0x5e
2554 */
2555FNIEMOP_DEF(iemOp_pop_eSI)
2556{
2557 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2558 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2559}
2560
2561
2562/**
2563 * @opcode 0x5f
2564 */
2565FNIEMOP_DEF(iemOp_pop_eDI)
2566{
2567 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2568 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2569}
2570
2571
2572/**
2573 * @opcode 0x60
2574 */
2575FNIEMOP_DEF(iemOp_pusha)
2576{
2577 IEMOP_MNEMONIC(pusha, "pusha");
2578 IEMOP_HLP_MIN_186();
2579 IEMOP_HLP_NO_64BIT();
2580 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2581 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2582 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2583 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2584}
2585
2586
2587/**
2588 * @opcode 0x61
2589 */
2590FNIEMOP_DEF(iemOp_popa__mvex)
2591{
2592 if (!IEM_IS_64BIT_CODE(pVCpu))
2593 {
2594 IEMOP_MNEMONIC(popa, "popa");
2595 IEMOP_HLP_MIN_186();
2596 IEMOP_HLP_NO_64BIT();
2597 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2598 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2599 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2600 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2601 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2602 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2603 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2604 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2605 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2606 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2607 iemCImpl_popa_16);
2608 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2609 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2610 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2611 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2612 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2613 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2614 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2615 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2616 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2617 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2618 iemCImpl_popa_32);
2619 }
2620 IEMOP_MNEMONIC(mvex, "mvex");
2621 Log(("mvex prefix is not supported!\n"));
2622 IEMOP_RAISE_INVALID_OPCODE_RET();
2623}
2624
2625
2626/**
2627 * @opcode 0x62
2628 * @opmnemonic bound
2629 * @op1 Gv_RO
2630 * @op2 Ma
2631 * @opmincpu 80186
2632 * @ophints harmless x86_invalid_64
2633 * @optest op1=0 op2=0 ->
2634 * @optest op1=1 op2=0 -> value.xcpt=5
2635 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2636 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2637 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2638 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2639 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2640 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2641 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2642 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2643 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2644 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2645 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2646 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2647 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2648 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2649 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2650 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2651 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2652 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2653 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2654 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2655 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2656 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2657 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2658 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2659 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2660 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2661 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2662 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2663 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2664 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2665 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2666 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2667 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2668 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2669 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2670 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2671 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2672 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2673 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2674 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2675 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2676 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2677 */
2678FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2679{
2680 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2681 compatability mode it is invalid with MOD=3.
2682
2683 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2684 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2685 given as R and X without an exact description, so we assume it builds on
2686 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2687 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2688 uint8_t bRm;
2689 if (!IEM_IS_64BIT_CODE(pVCpu))
2690 {
2691 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2692 IEMOP_HLP_MIN_186();
2693 IEM_OPCODE_GET_NEXT_U8(&bRm);
2694 if (IEM_IS_MODRM_MEM_MODE(bRm))
2695 {
2696 /** @todo testcase: check that there are two memory accesses involved. Check
2697 * whether they're both read before the \#BR triggers. */
2698 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2699 {
2700 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2701 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2702 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2703 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2705
2706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2708
2709 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2710 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2711 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2712
2713 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2714 IEM_MC_END();
2715 }
2716 else /* 32-bit operands */
2717 {
2718 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2719 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2720 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2721 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2723
2724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2726
2727 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2728 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2729 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2730
2731 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2732 IEM_MC_END();
2733 }
2734 }
2735
2736 /*
2737 * @opdone
2738 */
2739 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2740 {
2741 /* Note that there is no need for the CPU to fetch further bytes
2742 here because MODRM.MOD == 3. */
2743 Log(("evex not supported by the guest CPU!\n"));
2744 IEMOP_RAISE_INVALID_OPCODE_RET();
2745 }
2746 }
2747 else
2748 {
2749 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2750 * does modr/m read, whereas AMD probably doesn't... */
2751 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2752 {
2753 Log(("evex not supported by the guest CPU!\n"));
2754 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2755 }
2756 IEM_OPCODE_GET_NEXT_U8(&bRm);
2757 }
2758
2759 IEMOP_MNEMONIC(evex, "evex");
2760 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2761 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2762 Log(("evex prefix is not implemented!\n"));
2763 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2764}
2765
2766
2767/**
2768 * @opcode 0x63
2769 * @opflmodify zf
2770 * @note non-64-bit modes.
2771 */
2772FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2773{
2774 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2775 IEMOP_HLP_MIN_286();
2776 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2778
2779 if (IEM_IS_MODRM_REG_MODE(bRm))
2780 {
2781 /* Register */
2782 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2783 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2784 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2785 IEM_MC_ARG(uint16_t, u16Src, 1);
2786 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2787
2788 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2789 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2790 IEM_MC_REF_EFLAGS(pEFlags);
2791 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2792
2793 IEM_MC_ADVANCE_RIP_AND_FINISH();
2794 IEM_MC_END();
2795 }
2796 else
2797 {
2798 /* Memory */
2799 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2800 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2801 IEM_MC_ARG(uint16_t, u16Src, 1);
2802 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2804 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2805
2806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2807 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2808 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2809 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2810 IEM_MC_FETCH_EFLAGS(EFlags);
2811 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2812
2813 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2814 IEM_MC_COMMIT_EFLAGS(EFlags);
2815 IEM_MC_ADVANCE_RIP_AND_FINISH();
2816 IEM_MC_END();
2817 }
2818}
2819
2820
2821/**
2822 * @opcode 0x63
2823 *
2824 * @note This is a weird one. It works like a regular move instruction if
2825 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2826 * @todo This definitely needs a testcase to verify the odd cases. */
2827FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2828{
2829 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2830
2831 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2833
2834 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2835 {
2836 if (IEM_IS_MODRM_REG_MODE(bRm))
2837 {
2838 /*
2839 * Register to register.
2840 */
2841 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_LOCAL(uint64_t, u64Value);
2844 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2845 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2846 IEM_MC_ADVANCE_RIP_AND_FINISH();
2847 IEM_MC_END();
2848 }
2849 else
2850 {
2851 /*
2852 * We're loading a register from memory.
2853 */
2854 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2855 IEM_MC_LOCAL(uint64_t, u64Value);
2856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2859 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2860 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2861 IEM_MC_ADVANCE_RIP_AND_FINISH();
2862 IEM_MC_END();
2863 }
2864 }
2865 else
2866 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2867}
2868
2869
2870/**
2871 * @opcode 0x64
2872 * @opmnemonic segfs
2873 * @opmincpu 80386
2874 * @opgroup og_prefixes
2875 */
2876FNIEMOP_DEF(iemOp_seg_FS)
2877{
2878 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2879 IEMOP_HLP_MIN_386();
2880
2881 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2882 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2883
2884 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2885 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2886}
2887
2888
2889/**
2890 * @opcode 0x65
2891 * @opmnemonic seggs
2892 * @opmincpu 80386
2893 * @opgroup og_prefixes
2894 */
2895FNIEMOP_DEF(iemOp_seg_GS)
2896{
2897 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2898 IEMOP_HLP_MIN_386();
2899
2900 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2901 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2902
2903 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2904 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2905}
2906
2907
2908/**
2909 * @opcode 0x66
2910 * @opmnemonic opsize
2911 * @openc prefix
2912 * @opmincpu 80386
2913 * @ophints harmless
2914 * @opgroup og_prefixes
2915 */
2916FNIEMOP_DEF(iemOp_op_size)
2917{
2918 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2919 IEMOP_HLP_MIN_386();
2920
2921 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2922 iemRecalEffOpSize(pVCpu);
2923
2924 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2925 when REPZ or REPNZ are present. */
2926 if (pVCpu->iem.s.idxPrefix == 0)
2927 pVCpu->iem.s.idxPrefix = 1;
2928
2929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2930 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2931}
2932
2933
2934/**
2935 * @opcode 0x67
2936 * @opmnemonic addrsize
2937 * @openc prefix
2938 * @opmincpu 80386
2939 * @ophints harmless
2940 * @opgroup og_prefixes
2941 */
2942FNIEMOP_DEF(iemOp_addr_size)
2943{
2944 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2945 IEMOP_HLP_MIN_386();
2946
2947 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2948 switch (pVCpu->iem.s.enmDefAddrMode)
2949 {
2950 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2951 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2952 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2953 default: AssertFailed();
2954 }
2955
2956 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2957 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2958}
2959
2960
2961/**
2962 * @opcode 0x68
2963 */
2964FNIEMOP_DEF(iemOp_push_Iz)
2965{
2966 IEMOP_MNEMONIC(push_Iz, "push Iz");
2967 IEMOP_HLP_MIN_186();
2968 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2969 switch (pVCpu->iem.s.enmEffOpSize)
2970 {
2971 case IEMMODE_16BIT:
2972 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2973 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2975 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2976 IEM_MC_PUSH_U16(u16Value);
2977 IEM_MC_ADVANCE_RIP_AND_FINISH();
2978 IEM_MC_END();
2979 break;
2980
2981 case IEMMODE_32BIT:
2982 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2983 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2985 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2986 IEM_MC_PUSH_U32(u32Value);
2987 IEM_MC_ADVANCE_RIP_AND_FINISH();
2988 IEM_MC_END();
2989 break;
2990
2991 case IEMMODE_64BIT:
2992 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2993 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2995 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2996 IEM_MC_PUSH_U64(u64Value);
2997 IEM_MC_ADVANCE_RIP_AND_FINISH();
2998 IEM_MC_END();
2999 break;
3000
3001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3002 }
3003}
3004
3005
3006/**
3007 * @opcode 0x69
3008 * @opflclass multiply
3009 */
3010FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3011{
3012 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3013 IEMOP_HLP_MIN_186();
3014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3015 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3016
3017 switch (pVCpu->iem.s.enmEffOpSize)
3018 {
3019 case IEMMODE_16BIT:
3020 {
3021 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3022 if (IEM_IS_MODRM_REG_MODE(bRm))
3023 {
3024 /* register operand */
3025 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3026 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3028 IEM_MC_LOCAL(uint16_t, u16Tmp);
3029 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3030 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3031 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
3032 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3033 IEM_MC_REF_EFLAGS(pEFlags);
3034 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3035 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3036
3037 IEM_MC_ADVANCE_RIP_AND_FINISH();
3038 IEM_MC_END();
3039 }
3040 else
3041 {
3042 /* memory operand */
3043 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3046
3047 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3049
3050 IEM_MC_LOCAL(uint16_t, u16Tmp);
3051 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3052
3053 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3054 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3056 IEM_MC_REF_EFLAGS(pEFlags);
3057 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3058 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3059
3060 IEM_MC_ADVANCE_RIP_AND_FINISH();
3061 IEM_MC_END();
3062 }
3063 break;
3064 }
3065
3066 case IEMMODE_32BIT:
3067 {
3068 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3069 if (IEM_IS_MODRM_REG_MODE(bRm))
3070 {
3071 /* register operand */
3072 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3073 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 IEM_MC_LOCAL(uint32_t, u32Tmp);
3076 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3077
3078 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3079 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
3080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3081 IEM_MC_REF_EFLAGS(pEFlags);
3082 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3083 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3084
3085 IEM_MC_ADVANCE_RIP_AND_FINISH();
3086 IEM_MC_END();
3087 }
3088 else
3089 {
3090 /* memory operand */
3091 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3094
3095 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3097
3098 IEM_MC_LOCAL(uint32_t, u32Tmp);
3099 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3100
3101 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3102 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3103 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3104 IEM_MC_REF_EFLAGS(pEFlags);
3105 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3106 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3107
3108 IEM_MC_ADVANCE_RIP_AND_FINISH();
3109 IEM_MC_END();
3110 }
3111 break;
3112 }
3113
3114 case IEMMODE_64BIT:
3115 {
3116 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3117 if (IEM_IS_MODRM_REG_MODE(bRm))
3118 {
3119 /* register operand */
3120 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3121 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3123 IEM_MC_LOCAL(uint64_t, u64Tmp);
3124 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3125
3126 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3127 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3129 IEM_MC_REF_EFLAGS(pEFlags);
3130 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3131 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3132
3133 IEM_MC_ADVANCE_RIP_AND_FINISH();
3134 IEM_MC_END();
3135 }
3136 else
3137 {
3138 /* memory operand */
3139 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3142
3143 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3145
3146 IEM_MC_LOCAL(uint64_t, u64Tmp);
3147 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3148
3149 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3150 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3151 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3152 IEM_MC_REF_EFLAGS(pEFlags);
3153 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3154 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3155
3156 IEM_MC_ADVANCE_RIP_AND_FINISH();
3157 IEM_MC_END();
3158 }
3159 break;
3160 }
3161
3162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3163 }
3164}
3165
3166
3167/**
3168 * @opcode 0x6a
3169 */
3170FNIEMOP_DEF(iemOp_push_Ib)
3171{
3172 IEMOP_MNEMONIC(push_Ib, "push Ib");
3173 IEMOP_HLP_MIN_186();
3174 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3175 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3176
3177 switch (pVCpu->iem.s.enmEffOpSize)
3178 {
3179 case IEMMODE_16BIT:
3180 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3182 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3183 IEM_MC_PUSH_U16(uValue);
3184 IEM_MC_ADVANCE_RIP_AND_FINISH();
3185 IEM_MC_END();
3186 break;
3187 case IEMMODE_32BIT:
3188 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3190 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3191 IEM_MC_PUSH_U32(uValue);
3192 IEM_MC_ADVANCE_RIP_AND_FINISH();
3193 IEM_MC_END();
3194 break;
3195 case IEMMODE_64BIT:
3196 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3198 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3199 IEM_MC_PUSH_U64(uValue);
3200 IEM_MC_ADVANCE_RIP_AND_FINISH();
3201 IEM_MC_END();
3202 break;
3203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3204 }
3205}
3206
3207
3208/**
3209 * @opcode 0x6b
3210 * @opflclass multiply
3211 */
3212FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3213{
3214 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3215 IEMOP_HLP_MIN_186();
3216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3217 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3218
3219 switch (pVCpu->iem.s.enmEffOpSize)
3220 {
3221 case IEMMODE_16BIT:
3222 {
3223 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3224 if (IEM_IS_MODRM_REG_MODE(bRm))
3225 {
3226 /* register operand */
3227 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3228 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3230
3231 IEM_MC_LOCAL(uint16_t, u16Tmp);
3232 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3233
3234 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3235 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3236 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3237 IEM_MC_REF_EFLAGS(pEFlags);
3238 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3239 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3240
3241 IEM_MC_ADVANCE_RIP_AND_FINISH();
3242 IEM_MC_END();
3243 }
3244 else
3245 {
3246 /* memory operand */
3247 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3248
3249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3251
3252 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3254
3255 IEM_MC_LOCAL(uint16_t, u16Tmp);
3256 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3257
3258 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3259 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3261 IEM_MC_REF_EFLAGS(pEFlags);
3262 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3263 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3264
3265 IEM_MC_ADVANCE_RIP_AND_FINISH();
3266 IEM_MC_END();
3267 }
3268 break;
3269 }
3270
3271 case IEMMODE_32BIT:
3272 {
3273 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3274 if (IEM_IS_MODRM_REG_MODE(bRm))
3275 {
3276 /* register operand */
3277 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3278 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 IEM_MC_LOCAL(uint32_t, u32Tmp);
3281 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3282
3283 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3284 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3286 IEM_MC_REF_EFLAGS(pEFlags);
3287 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3288 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3289
3290 IEM_MC_ADVANCE_RIP_AND_FINISH();
3291 IEM_MC_END();
3292 }
3293 else
3294 {
3295 /* memory operand */
3296 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3299
3300 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3302
3303 IEM_MC_LOCAL(uint32_t, u32Tmp);
3304 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3305
3306 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3307 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3309 IEM_MC_REF_EFLAGS(pEFlags);
3310 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3311 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3312
3313 IEM_MC_ADVANCE_RIP_AND_FINISH();
3314 IEM_MC_END();
3315 }
3316 break;
3317 }
3318
3319 case IEMMODE_64BIT:
3320 {
3321 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3322 if (IEM_IS_MODRM_REG_MODE(bRm))
3323 {
3324 /* register operand */
3325 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3326 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3328 IEM_MC_LOCAL(uint64_t, u64Tmp);
3329 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3330
3331 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3332 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3333 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3334 IEM_MC_REF_EFLAGS(pEFlags);
3335 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3336 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3337
3338 IEM_MC_ADVANCE_RIP_AND_FINISH();
3339 IEM_MC_END();
3340 }
3341 else
3342 {
3343 /* memory operand */
3344 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3347
3348 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3350
3351 IEM_MC_LOCAL(uint64_t, u64Tmp);
3352 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3353
3354 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3355 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3356 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3357 IEM_MC_REF_EFLAGS(pEFlags);
3358 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3359 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3360
3361 IEM_MC_ADVANCE_RIP_AND_FINISH();
3362 IEM_MC_END();
3363 }
3364 break;
3365 }
3366
3367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3368 }
3369}
3370
3371
3372/**
3373 * @opcode 0x6c
3374 * @opfltest iopl,df
3375 */
3376FNIEMOP_DEF(iemOp_insb_Yb_DX)
3377{
3378 IEMOP_HLP_MIN_186();
3379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3380 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3381 {
3382 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3383 switch (pVCpu->iem.s.enmEffAddrMode)
3384 {
3385 case IEMMODE_16BIT:
3386 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3387 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3388 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3389 iemCImpl_rep_ins_op8_addr16, false);
3390 case IEMMODE_32BIT:
3391 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3392 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3393 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3394 iemCImpl_rep_ins_op8_addr32, false);
3395 case IEMMODE_64BIT:
3396 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3397 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3398 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3399 iemCImpl_rep_ins_op8_addr64, false);
3400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3401 }
3402 }
3403 else
3404 {
3405 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3406 switch (pVCpu->iem.s.enmEffAddrMode)
3407 {
3408 case IEMMODE_16BIT:
3409 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3410 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3411 iemCImpl_ins_op8_addr16, false);
3412 case IEMMODE_32BIT:
3413 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3414 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3415 iemCImpl_ins_op8_addr32, false);
3416 case IEMMODE_64BIT:
3417 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3419 iemCImpl_ins_op8_addr64, false);
3420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3421 }
3422 }
3423}
3424
3425
3426/**
3427 * @opcode 0x6d
3428 * @opfltest iopl,df
3429 */
3430FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3431{
3432 IEMOP_HLP_MIN_186();
3433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3434 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3435 {
3436 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3437 switch (pVCpu->iem.s.enmEffOpSize)
3438 {
3439 case IEMMODE_16BIT:
3440 switch (pVCpu->iem.s.enmEffAddrMode)
3441 {
3442 case IEMMODE_16BIT:
3443 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3444 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3445 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3446 iemCImpl_rep_ins_op16_addr16, false);
3447 case IEMMODE_32BIT:
3448 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3449 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3451 iemCImpl_rep_ins_op16_addr32, false);
3452 case IEMMODE_64BIT:
3453 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3455 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3456 iemCImpl_rep_ins_op16_addr64, false);
3457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3458 }
3459 break;
3460 case IEMMODE_64BIT:
3461 case IEMMODE_32BIT:
3462 switch (pVCpu->iem.s.enmEffAddrMode)
3463 {
3464 case IEMMODE_16BIT:
3465 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3466 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3467 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3468 iemCImpl_rep_ins_op32_addr16, false);
3469 case IEMMODE_32BIT:
3470 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3471 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3472 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3473 iemCImpl_rep_ins_op32_addr32, false);
3474 case IEMMODE_64BIT:
3475 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3476 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3477 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3478 iemCImpl_rep_ins_op32_addr64, false);
3479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3480 }
3481 break;
3482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3483 }
3484 }
3485 else
3486 {
3487 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3488 switch (pVCpu->iem.s.enmEffOpSize)
3489 {
3490 case IEMMODE_16BIT:
3491 switch (pVCpu->iem.s.enmEffAddrMode)
3492 {
3493 case IEMMODE_16BIT:
3494 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3495 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3496 iemCImpl_ins_op16_addr16, false);
3497 case IEMMODE_32BIT:
3498 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3499 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3500 iemCImpl_ins_op16_addr32, false);
3501 case IEMMODE_64BIT:
3502 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3503 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3504 iemCImpl_ins_op16_addr64, false);
3505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3506 }
3507 break;
3508 case IEMMODE_64BIT:
3509 case IEMMODE_32BIT:
3510 switch (pVCpu->iem.s.enmEffAddrMode)
3511 {
3512 case IEMMODE_16BIT:
3513 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3514 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3515 iemCImpl_ins_op32_addr16, false);
3516 case IEMMODE_32BIT:
3517 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3518 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3519 iemCImpl_ins_op32_addr32, false);
3520 case IEMMODE_64BIT:
3521 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3522 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3523 iemCImpl_ins_op32_addr64, false);
3524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3525 }
3526 break;
3527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3528 }
3529 }
3530}
3531
3532
3533/**
3534 * @opcode 0x6e
3535 * @opfltest iopl,df
3536 */
3537FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3538{
3539 IEMOP_HLP_MIN_186();
3540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3541 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3542 {
3543 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3544 switch (pVCpu->iem.s.enmEffAddrMode)
3545 {
3546 case IEMMODE_16BIT:
3547 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3548 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3549 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3550 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3551 case IEMMODE_32BIT:
3552 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3553 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3555 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3556 case IEMMODE_64BIT:
3557 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3558 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3559 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3560 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3562 }
3563 }
3564 else
3565 {
3566 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3567 switch (pVCpu->iem.s.enmEffAddrMode)
3568 {
3569 case IEMMODE_16BIT:
3570 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3571 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3572 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3573 case IEMMODE_32BIT:
3574 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3575 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3576 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3577 case IEMMODE_64BIT:
3578 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3579 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3580 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3582 }
3583 }
3584}
3585
3586
3587/**
3588 * @opcode 0x6f
3589 * @opfltest iopl,df
3590 */
3591FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3592{
3593 IEMOP_HLP_MIN_186();
3594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3595 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3596 {
3597 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3598 switch (pVCpu->iem.s.enmEffOpSize)
3599 {
3600 case IEMMODE_16BIT:
3601 switch (pVCpu->iem.s.enmEffAddrMode)
3602 {
3603 case IEMMODE_16BIT:
3604 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3605 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3606 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3607 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3608 case IEMMODE_32BIT:
3609 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3610 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3611 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3612 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3613 case IEMMODE_64BIT:
3614 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3615 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3616 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3617 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3619 }
3620 break;
3621 case IEMMODE_64BIT:
3622 case IEMMODE_32BIT:
3623 switch (pVCpu->iem.s.enmEffAddrMode)
3624 {
3625 case IEMMODE_16BIT:
3626 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3627 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3628 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3629 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3630 case IEMMODE_32BIT:
3631 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3632 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3633 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3634 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3635 case IEMMODE_64BIT:
3636 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3637 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3638 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3639 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3641 }
3642 break;
3643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3644 }
3645 }
3646 else
3647 {
3648 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3649 switch (pVCpu->iem.s.enmEffOpSize)
3650 {
3651 case IEMMODE_16BIT:
3652 switch (pVCpu->iem.s.enmEffAddrMode)
3653 {
3654 case IEMMODE_16BIT:
3655 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3656 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3657 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3658 case IEMMODE_32BIT:
3659 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3660 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3661 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3662 case IEMMODE_64BIT:
3663 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3664 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3665 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3667 }
3668 break;
3669 case IEMMODE_64BIT:
3670 case IEMMODE_32BIT:
3671 switch (pVCpu->iem.s.enmEffAddrMode)
3672 {
3673 case IEMMODE_16BIT:
3674 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3675 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3676 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3677 case IEMMODE_32BIT:
3678 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3679 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3680 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3681 case IEMMODE_64BIT:
3682 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3683 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3684 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3686 }
3687 break;
3688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3689 }
3690 }
3691}
3692
3693
3694/**
3695 * @opcode 0x70
3696 * @opfltest of
3697 */
3698FNIEMOP_DEF(iemOp_jo_Jb)
3699{
3700 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3701 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3702 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3703
3704 IEM_MC_BEGIN(0, 0, 0, 0);
3705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3707 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3708 } IEM_MC_ELSE() {
3709 IEM_MC_ADVANCE_RIP_AND_FINISH();
3710 } IEM_MC_ENDIF();
3711 IEM_MC_END();
3712}
3713
3714
3715/**
3716 * @opcode 0x71
3717 * @opfltest of
3718 */
3719FNIEMOP_DEF(iemOp_jno_Jb)
3720{
3721 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3722 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3723 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3724
3725 IEM_MC_BEGIN(0, 0, 0, 0);
3726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3727 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3728 IEM_MC_ADVANCE_RIP_AND_FINISH();
3729 } IEM_MC_ELSE() {
3730 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3731 } IEM_MC_ENDIF();
3732 IEM_MC_END();
3733}
3734
3735/**
3736 * @opcode 0x72
3737 * @opfltest cf
3738 */
3739FNIEMOP_DEF(iemOp_jc_Jb)
3740{
3741 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3742 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3743 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3744
3745 IEM_MC_BEGIN(0, 0, 0, 0);
3746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3747 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3748 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3749 } IEM_MC_ELSE() {
3750 IEM_MC_ADVANCE_RIP_AND_FINISH();
3751 } IEM_MC_ENDIF();
3752 IEM_MC_END();
3753}
3754
3755
3756/**
3757 * @opcode 0x73
3758 * @opfltest cf
3759 */
3760FNIEMOP_DEF(iemOp_jnc_Jb)
3761{
3762 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3763 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3764 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3765
3766 IEM_MC_BEGIN(0, 0, 0, 0);
3767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3769 IEM_MC_ADVANCE_RIP_AND_FINISH();
3770 } IEM_MC_ELSE() {
3771 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3772 } IEM_MC_ENDIF();
3773 IEM_MC_END();
3774}
3775
3776
3777/**
3778 * @opcode 0x74
3779 * @opfltest zf
3780 */
3781FNIEMOP_DEF(iemOp_je_Jb)
3782{
3783 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3784 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3785 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3786
3787 IEM_MC_BEGIN(0, 0, 0, 0);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3790 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3791 } IEM_MC_ELSE() {
3792 IEM_MC_ADVANCE_RIP_AND_FINISH();
3793 } IEM_MC_ENDIF();
3794 IEM_MC_END();
3795}
3796
3797
3798/**
3799 * @opcode 0x75
3800 * @opfltest zf
3801 */
3802FNIEMOP_DEF(iemOp_jne_Jb)
3803{
3804 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3805 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3806 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3807
3808 IEM_MC_BEGIN(0, 0, 0, 0);
3809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3811 IEM_MC_ADVANCE_RIP_AND_FINISH();
3812 } IEM_MC_ELSE() {
3813 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3814 } IEM_MC_ENDIF();
3815 IEM_MC_END();
3816}
3817
3818
3819/**
3820 * @opcode 0x76
3821 * @opfltest cf,zf
3822 */
3823FNIEMOP_DEF(iemOp_jbe_Jb)
3824{
3825 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3826 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3828
3829 IEM_MC_BEGIN(0, 0, 0, 0);
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3831 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3832 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3833 } IEM_MC_ELSE() {
3834 IEM_MC_ADVANCE_RIP_AND_FINISH();
3835 } IEM_MC_ENDIF();
3836 IEM_MC_END();
3837}
3838
3839
3840/**
3841 * @opcode 0x77
3842 * @opfltest cf,zf
3843 */
3844FNIEMOP_DEF(iemOp_jnbe_Jb)
3845{
3846 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3847 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3849
3850 IEM_MC_BEGIN(0, 0, 0, 0);
3851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3852 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3853 IEM_MC_ADVANCE_RIP_AND_FINISH();
3854 } IEM_MC_ELSE() {
3855 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3856 } IEM_MC_ENDIF();
3857 IEM_MC_END();
3858}
3859
3860
3861/**
3862 * @opcode 0x78
3863 * @opfltest sf
3864 */
3865FNIEMOP_DEF(iemOp_js_Jb)
3866{
3867 IEMOP_MNEMONIC(js_Jb, "js Jb");
3868 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3869 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3870
3871 IEM_MC_BEGIN(0, 0, 0, 0);
3872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3873 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3874 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3875 } IEM_MC_ELSE() {
3876 IEM_MC_ADVANCE_RIP_AND_FINISH();
3877 } IEM_MC_ENDIF();
3878 IEM_MC_END();
3879}
3880
3881
3882/**
3883 * @opcode 0x79
3884 * @opfltest sf
3885 */
3886FNIEMOP_DEF(iemOp_jns_Jb)
3887{
3888 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3889 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3890 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3891
3892 IEM_MC_BEGIN(0, 0, 0, 0);
3893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3895 IEM_MC_ADVANCE_RIP_AND_FINISH();
3896 } IEM_MC_ELSE() {
3897 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3898 } IEM_MC_ENDIF();
3899 IEM_MC_END();
3900}
3901
3902
3903/**
3904 * @opcode 0x7a
3905 * @opfltest pf
3906 */
3907FNIEMOP_DEF(iemOp_jp_Jb)
3908{
3909 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3910 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3911 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3912
3913 IEM_MC_BEGIN(0, 0, 0, 0);
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3916 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3917 } IEM_MC_ELSE() {
3918 IEM_MC_ADVANCE_RIP_AND_FINISH();
3919 } IEM_MC_ENDIF();
3920 IEM_MC_END();
3921}
3922
3923
3924/**
3925 * @opcode 0x7b
3926 * @opfltest pf
3927 */
3928FNIEMOP_DEF(iemOp_jnp_Jb)
3929{
3930 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3931 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0, 0, 0);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3937 IEM_MC_ADVANCE_RIP_AND_FINISH();
3938 } IEM_MC_ELSE() {
3939 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3940 } IEM_MC_ENDIF();
3941 IEM_MC_END();
3942}
3943
3944
3945/**
3946 * @opcode 0x7c
3947 * @opfltest sf,of
3948 */
3949FNIEMOP_DEF(iemOp_jl_Jb)
3950{
3951 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3952 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3953 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3954
3955 IEM_MC_BEGIN(0, 0, 0, 0);
3956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3957 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3958 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3959 } IEM_MC_ELSE() {
3960 IEM_MC_ADVANCE_RIP_AND_FINISH();
3961 } IEM_MC_ENDIF();
3962 IEM_MC_END();
3963}
3964
3965
3966/**
3967 * @opcode 0x7d
3968 * @opfltest sf,of
3969 */
3970FNIEMOP_DEF(iemOp_jnl_Jb)
3971{
3972 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3973 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3974 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3975
3976 IEM_MC_BEGIN(0, 0, 0, 0);
3977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3978 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3979 IEM_MC_ADVANCE_RIP_AND_FINISH();
3980 } IEM_MC_ELSE() {
3981 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3982 } IEM_MC_ENDIF();
3983 IEM_MC_END();
3984}
3985
3986
3987/**
3988 * @opcode 0x7e
3989 * @opfltest zf,sf,of
3990 */
3991FNIEMOP_DEF(iemOp_jle_Jb)
3992{
3993 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3994 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3995 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3996
3997 IEM_MC_BEGIN(0, 0, 0, 0);
3998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3999 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4000 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4001 } IEM_MC_ELSE() {
4002 IEM_MC_ADVANCE_RIP_AND_FINISH();
4003 } IEM_MC_ENDIF();
4004 IEM_MC_END();
4005}
4006
4007
4008/**
4009 * @opcode 0x7f
4010 * @opfltest zf,sf,of
4011 */
4012FNIEMOP_DEF(iemOp_jnle_Jb)
4013{
4014 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
4015 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
4016 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
4017
4018 IEM_MC_BEGIN(0, 0, 0, 0);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4021 IEM_MC_ADVANCE_RIP_AND_FINISH();
4022 } IEM_MC_ELSE() {
4023 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
4024 } IEM_MC_ENDIF();
4025 IEM_MC_END();
4026}
4027
4028
4029/**
4030 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4031 * iemOp_Grp1_Eb_Ib_80.
4032 */
4033#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
4034 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4035 { \
4036 /* register target */ \
4037 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4038 IEM_MC_BEGIN(3, 0, 0, 0); \
4039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4040 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4041 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4042 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4043 \
4044 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4045 IEM_MC_REF_EFLAGS(pEFlags); \
4046 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4047 \
4048 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4049 IEM_MC_END(); \
4050 } \
4051 else \
4052 { \
4053 /* memory target */ \
4054 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4055 { \
4056 IEM_MC_BEGIN(3, 3, 0, 0); \
4057 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4058 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4060 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4061 \
4062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4063 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4064 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4065 IEMOP_HLP_DONE_DECODING(); \
4066 \
4067 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4068 IEM_MC_FETCH_EFLAGS(EFlags); \
4069 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4070 \
4071 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4072 IEM_MC_COMMIT_EFLAGS(EFlags); \
4073 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4074 IEM_MC_END(); \
4075 } \
4076 else \
4077 { \
4078 (void)0
4079
4080#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
4081 IEM_MC_BEGIN(3, 3, 0, 0); \
4082 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4083 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4085 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4086 \
4087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4088 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4089 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4090 IEMOP_HLP_DONE_DECODING(); \
4091 \
4092 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4093 IEM_MC_FETCH_EFLAGS(EFlags); \
4094 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4095 \
4096 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4097 IEM_MC_COMMIT_EFLAGS(EFlags); \
4098 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4099 IEM_MC_END(); \
4100 } \
4101 } \
4102 (void)0
4103
4104#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4105 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4106 { \
4107 /* register target */ \
4108 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4109 IEM_MC_BEGIN(3, 0, 0, 0); \
4110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4111 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4112 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4113 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4114 \
4115 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4116 IEM_MC_REF_EFLAGS(pEFlags); \
4117 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4118 \
4119 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4120 IEM_MC_END(); \
4121 } \
4122 else \
4123 { \
4124 /* memory target */ \
4125 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4126 { \
4127 IEM_MC_BEGIN(3, 3, 0, 0); \
4128 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4129 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4131 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4132 \
4133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4134 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4135 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4136 IEMOP_HLP_DONE_DECODING(); \
4137 \
4138 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4139 IEM_MC_FETCH_EFLAGS(EFlags); \
4140 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4141 \
4142 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4143 IEM_MC_COMMIT_EFLAGS(EFlags); \
4144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4145 IEM_MC_END(); \
4146 } \
4147 else \
4148 { \
4149 (void)0
4150
4151#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4152 IEMOP_HLP_DONE_DECODING(); \
4153 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4154 } \
4155 } \
4156 (void)0
4157
4158
4159
4160/**
4161 * @opmaps grp1_80,grp1_83
4162 * @opcode /0
4163 * @opflclass arithmetic
4164 */
4165FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4166{
4167 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4168 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4169 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4170}
4171
4172
4173/**
4174 * @opmaps grp1_80,grp1_83
4175 * @opcode /1
4176 * @opflclass logical
4177 */
4178FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4179{
4180 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4181 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4182 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4183}
4184
4185
4186/**
4187 * @opmaps grp1_80,grp1_83
4188 * @opcode /2
4189 * @opflclass arithmetic_carry
4190 */
4191FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4192{
4193 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4194 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4195 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4196}
4197
4198
4199/**
4200 * @opmaps grp1_80,grp1_83
4201 * @opcode /3
4202 * @opflclass arithmetic_carry
4203 */
4204FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4205{
4206 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4207 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4208 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4209}
4210
4211
4212/**
4213 * @opmaps grp1_80,grp1_83
4214 * @opcode /4
4215 * @opflclass logical
4216 */
4217FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4218{
4219 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4220 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4221 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4222}
4223
4224
4225/**
4226 * @opmaps grp1_80,grp1_83
4227 * @opcode /5
4228 * @opflclass arithmetic
4229 */
4230FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4231{
4232 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4233 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4234 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4235}
4236
4237
4238/**
4239 * @opmaps grp1_80,grp1_83
4240 * @opcode /6
4241 * @opflclass logical
4242 */
4243FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4244{
4245 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4246 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4247 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4248}
4249
4250
4251/**
4252 * @opmaps grp1_80,grp1_83
4253 * @opcode /7
4254 * @opflclass arithmetic
4255 */
4256FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4257{
4258 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4259 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4260 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4261}
4262
4263
4264/**
4265 * @opcode 0x80
4266 */
4267FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4268{
4269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4270 switch (IEM_GET_MODRM_REG_8(bRm))
4271 {
4272 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4273 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4274 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4275 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4276 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4277 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4278 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4279 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4280 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4281 }
4282}
4283
4284
4285/**
4286 * Body for a group 1 binary operator.
4287 */
4288#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4289 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4290 { \
4291 /* register target */ \
4292 switch (pVCpu->iem.s.enmEffOpSize) \
4293 { \
4294 case IEMMODE_16BIT: \
4295 { \
4296 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4297 IEM_MC_BEGIN(3, 0, 0, 0); \
4298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4299 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4300 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4302 \
4303 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4304 IEM_MC_REF_EFLAGS(pEFlags); \
4305 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4306 \
4307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4308 IEM_MC_END(); \
4309 break; \
4310 } \
4311 \
4312 case IEMMODE_32BIT: \
4313 { \
4314 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4315 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4317 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4318 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4319 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4320 \
4321 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4322 IEM_MC_REF_EFLAGS(pEFlags); \
4323 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4324 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4325 \
4326 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4327 IEM_MC_END(); \
4328 break; \
4329 } \
4330 \
4331 case IEMMODE_64BIT: \
4332 { \
4333 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4334 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4336 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4337 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4338 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4339 \
4340 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4341 IEM_MC_REF_EFLAGS(pEFlags); \
4342 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4343 \
4344 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4345 IEM_MC_END(); \
4346 break; \
4347 } \
4348 \
4349 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4350 } \
4351 } \
4352 else \
4353 { \
4354 /* memory target */ \
4355 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4356 { \
4357 switch (pVCpu->iem.s.enmEffOpSize) \
4358 { \
4359 case IEMMODE_16BIT: \
4360 { \
4361 IEM_MC_BEGIN(3, 3, 0, 0); \
4362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4364 \
4365 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4366 IEMOP_HLP_DONE_DECODING(); \
4367 \
4368 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4369 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4370 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4371 \
4372 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4373 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4374 IEM_MC_FETCH_EFLAGS(EFlags); \
4375 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4376 \
4377 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4378 IEM_MC_COMMIT_EFLAGS(EFlags); \
4379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4380 IEM_MC_END(); \
4381 break; \
4382 } \
4383 \
4384 case IEMMODE_32BIT: \
4385 { \
4386 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4389 \
4390 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4391 IEMOP_HLP_DONE_DECODING(); \
4392 \
4393 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4394 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4395 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4396 \
4397 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4398 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4399 IEM_MC_FETCH_EFLAGS(EFlags); \
4400 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4401 \
4402 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4403 IEM_MC_COMMIT_EFLAGS(EFlags); \
4404 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4405 IEM_MC_END(); \
4406 break; \
4407 } \
4408 \
4409 case IEMMODE_64BIT: \
4410 { \
4411 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4412 \
4413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4415 \
4416 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4417 IEMOP_HLP_DONE_DECODING(); \
4418 \
4419 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4420 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4421 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4422 \
4423 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4424 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4425 IEM_MC_FETCH_EFLAGS(EFlags); \
4426 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4427 \
4428 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4429 IEM_MC_COMMIT_EFLAGS(EFlags); \
4430 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4431 IEM_MC_END(); \
4432 break; \
4433 } \
4434 \
4435 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4436 } \
4437 } \
4438 else \
4439 { \
4440 (void)0
4441/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4442#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4443 switch (pVCpu->iem.s.enmEffOpSize) \
4444 { \
4445 case IEMMODE_16BIT: \
4446 { \
4447 IEM_MC_BEGIN(3, 3, 0, 0); \
4448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4450 \
4451 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4452 IEMOP_HLP_DONE_DECODING(); \
4453 \
4454 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4455 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4456 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4457 \
4458 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4459 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4460 IEM_MC_FETCH_EFLAGS(EFlags); \
4461 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4462 \
4463 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4464 IEM_MC_COMMIT_EFLAGS(EFlags); \
4465 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4466 IEM_MC_END(); \
4467 break; \
4468 } \
4469 \
4470 case IEMMODE_32BIT: \
4471 { \
4472 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4475 \
4476 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4477 IEMOP_HLP_DONE_DECODING(); \
4478 \
4479 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4480 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4481 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4482 \
4483 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4484 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4485 IEM_MC_FETCH_EFLAGS(EFlags); \
4486 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4487 \
4488 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4489 IEM_MC_COMMIT_EFLAGS(EFlags); \
4490 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4491 IEM_MC_END(); \
4492 break; \
4493 } \
4494 \
4495 case IEMMODE_64BIT: \
4496 { \
4497 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4500 \
4501 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4502 IEMOP_HLP_DONE_DECODING(); \
4503 \
4504 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4505 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4506 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4507 \
4508 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4509 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4510 IEM_MC_FETCH_EFLAGS(EFlags); \
4511 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4512 \
4513 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4514 IEM_MC_COMMIT_EFLAGS(EFlags); \
4515 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4516 IEM_MC_END(); \
4517 break; \
4518 } \
4519 \
4520 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4521 } \
4522 } \
4523 } \
4524 (void)0
4525
4526/* read-only version */
4527#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4528 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4529 { \
4530 /* register target */ \
4531 switch (pVCpu->iem.s.enmEffOpSize) \
4532 { \
4533 case IEMMODE_16BIT: \
4534 { \
4535 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4536 IEM_MC_BEGIN(3, 0, 0, 0); \
4537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4538 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4539 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4540 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4541 \
4542 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4543 IEM_MC_REF_EFLAGS(pEFlags); \
4544 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4545 \
4546 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4547 IEM_MC_END(); \
4548 break; \
4549 } \
4550 \
4551 case IEMMODE_32BIT: \
4552 { \
4553 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4554 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4556 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4557 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4558 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4559 \
4560 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4561 IEM_MC_REF_EFLAGS(pEFlags); \
4562 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4563 \
4564 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4565 IEM_MC_END(); \
4566 break; \
4567 } \
4568 \
4569 case IEMMODE_64BIT: \
4570 { \
4571 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4572 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4574 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4575 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4576 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4577 \
4578 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4579 IEM_MC_REF_EFLAGS(pEFlags); \
4580 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4581 \
4582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4583 IEM_MC_END(); \
4584 break; \
4585 } \
4586 \
4587 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4588 } \
4589 } \
4590 else \
4591 { \
4592 /* memory target */ \
4593 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4594 { \
4595 switch (pVCpu->iem.s.enmEffOpSize) \
4596 { \
4597 case IEMMODE_16BIT: \
4598 { \
4599 IEM_MC_BEGIN(3, 3, 0, 0); \
4600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4602 \
4603 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4604 IEMOP_HLP_DONE_DECODING(); \
4605 \
4606 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4607 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4608 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4609 \
4610 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4611 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4612 IEM_MC_FETCH_EFLAGS(EFlags); \
4613 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4614 \
4615 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4616 IEM_MC_COMMIT_EFLAGS(EFlags); \
4617 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4618 IEM_MC_END(); \
4619 break; \
4620 } \
4621 \
4622 case IEMMODE_32BIT: \
4623 { \
4624 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4627 \
4628 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4629 IEMOP_HLP_DONE_DECODING(); \
4630 \
4631 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4632 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4633 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4634 \
4635 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4636 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4637 IEM_MC_FETCH_EFLAGS(EFlags); \
4638 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4639 \
4640 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4641 IEM_MC_COMMIT_EFLAGS(EFlags); \
4642 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4643 IEM_MC_END(); \
4644 break; \
4645 } \
4646 \
4647 case IEMMODE_64BIT: \
4648 { \
4649 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4652 \
4653 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4654 IEMOP_HLP_DONE_DECODING(); \
4655 \
4656 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4657 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4658 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4659 \
4660 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4661 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4662 IEM_MC_FETCH_EFLAGS(EFlags); \
4663 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4664 \
4665 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4666 IEM_MC_COMMIT_EFLAGS(EFlags); \
4667 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4668 IEM_MC_END(); \
4669 break; \
4670 } \
4671 \
4672 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4673 } \
4674 } \
4675 else \
4676 { \
4677 IEMOP_HLP_DONE_DECODING(); \
4678 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4679 } \
4680 } \
4681 (void)0
4682
4683
4684/**
4685 * @opmaps grp1_81
4686 * @opcode /0
4687 * @opflclass arithmetic
4688 */
4689FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4690{
4691 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4692 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4693 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4694}
4695
4696
4697/**
4698 * @opmaps grp1_81
4699 * @opcode /1
4700 * @opflclass logical
4701 */
4702FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4703{
4704 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4705 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4706 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4707}
4708
4709
4710/**
4711 * @opmaps grp1_81
4712 * @opcode /2
4713 * @opflclass arithmetic_carry
4714 */
4715FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4716{
4717 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4718 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4719 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4720}
4721
4722
4723/**
4724 * @opmaps grp1_81
4725 * @opcode /3
4726 * @opflclass arithmetic_carry
4727 */
4728FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4729{
4730 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4731 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4732 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4733}
4734
4735
4736/**
4737 * @opmaps grp1_81
4738 * @opcode /4
4739 * @opflclass logical
4740 */
4741FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4742{
4743 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4744 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4745 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4746}
4747
4748
4749/**
4750 * @opmaps grp1_81
4751 * @opcode /5
4752 * @opflclass arithmetic
4753 */
4754FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4755{
4756 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4757 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4758 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4759}
4760
4761
4762/**
4763 * @opmaps grp1_81
4764 * @opcode /6
4765 * @opflclass logical
4766 */
4767FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4768{
4769 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4770 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4771 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4772}
4773
4774
4775/**
4776 * @opmaps grp1_81
4777 * @opcode /7
4778 * @opflclass arithmetic
4779 */
4780FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4781{
4782 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4783 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4784}
4785
4786
4787/**
4788 * @opcode 0x81
4789 */
4790FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4791{
4792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4793 switch (IEM_GET_MODRM_REG_8(bRm))
4794 {
4795 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4796 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4797 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4798 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4799 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4800 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4801 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4802 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4804 }
4805}
4806
4807
4808/**
4809 * @opcode 0x82
4810 * @opmnemonic grp1_82
4811 * @opgroup og_groups
4812 */
4813FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4814{
4815 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4816 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4817}
4818
4819
4820/**
4821 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4822 * iemOp_Grp1_Ev_Ib.
4823 */
4824#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4825 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4826 { \
4827 /* \
4828 * Register target \
4829 */ \
4830 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4831 switch (pVCpu->iem.s.enmEffOpSize) \
4832 { \
4833 case IEMMODE_16BIT: \
4834 IEM_MC_BEGIN(3, 0, 0, 0); \
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4836 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4837 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4838 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4839 \
4840 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4841 IEM_MC_REF_EFLAGS(pEFlags); \
4842 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4843 \
4844 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4845 IEM_MC_END(); \
4846 break; \
4847 \
4848 case IEMMODE_32BIT: \
4849 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4851 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4852 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4853 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4854 \
4855 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4856 IEM_MC_REF_EFLAGS(pEFlags); \
4857 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4858 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4859 \
4860 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4861 IEM_MC_END(); \
4862 break; \
4863 \
4864 case IEMMODE_64BIT: \
4865 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4867 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4868 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4869 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4870 \
4871 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4872 IEM_MC_REF_EFLAGS(pEFlags); \
4873 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4874 \
4875 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4876 IEM_MC_END(); \
4877 break; \
4878 \
4879 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4880 } \
4881 } \
4882 else \
4883 { \
4884 /* \
4885 * Memory target. \
4886 */ \
4887 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4888 { \
4889 switch (pVCpu->iem.s.enmEffOpSize) \
4890 { \
4891 case IEMMODE_16BIT: \
4892 IEM_MC_BEGIN(3, 3, 0, 0); \
4893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4895 \
4896 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4897 IEMOP_HLP_DONE_DECODING(); \
4898 \
4899 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4900 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4901 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4902 \
4903 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4904 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4905 IEM_MC_FETCH_EFLAGS(EFlags); \
4906 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4907 \
4908 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4909 IEM_MC_COMMIT_EFLAGS(EFlags); \
4910 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4911 IEM_MC_END(); \
4912 break; \
4913 \
4914 case IEMMODE_32BIT: \
4915 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4918 \
4919 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4920 IEMOP_HLP_DONE_DECODING(); \
4921 \
4922 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4923 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4924 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4925 \
4926 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4927 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4928 IEM_MC_FETCH_EFLAGS(EFlags); \
4929 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4930 \
4931 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4932 IEM_MC_COMMIT_EFLAGS(EFlags); \
4933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4934 IEM_MC_END(); \
4935 break; \
4936 \
4937 case IEMMODE_64BIT: \
4938 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4941 \
4942 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4943 IEMOP_HLP_DONE_DECODING(); \
4944 \
4945 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4946 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4947 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4948 \
4949 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4950 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4951 IEM_MC_FETCH_EFLAGS(EFlags); \
4952 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4953 \
4954 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4955 IEM_MC_COMMIT_EFLAGS(EFlags); \
4956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4957 IEM_MC_END(); \
4958 break; \
4959 \
4960 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4961 } \
4962 } \
4963 else \
4964 { \
4965 (void)0
4966/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4967#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4968 switch (pVCpu->iem.s.enmEffOpSize) \
4969 { \
4970 case IEMMODE_16BIT: \
4971 IEM_MC_BEGIN(3, 3, 0, 0); \
4972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4974 \
4975 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4976 IEMOP_HLP_DONE_DECODING(); \
4977 \
4978 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4979 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4980 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4981 \
4982 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4983 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4984 IEM_MC_FETCH_EFLAGS(EFlags); \
4985 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4986 \
4987 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4988 IEM_MC_COMMIT_EFLAGS(EFlags); \
4989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4990 IEM_MC_END(); \
4991 break; \
4992 \
4993 case IEMMODE_32BIT: \
4994 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4997 \
4998 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4999 IEMOP_HLP_DONE_DECODING(); \
5000 \
5001 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5002 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5003 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5004 \
5005 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5006 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5007 IEM_MC_FETCH_EFLAGS(EFlags); \
5008 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
5009 \
5010 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5011 IEM_MC_COMMIT_EFLAGS(EFlags); \
5012 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5013 IEM_MC_END(); \
5014 break; \
5015 \
5016 case IEMMODE_64BIT: \
5017 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5020 \
5021 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5022 IEMOP_HLP_DONE_DECODING(); \
5023 \
5024 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5025 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5026 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5027 \
5028 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5029 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5030 IEM_MC_FETCH_EFLAGS(EFlags); \
5031 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
5032 \
5033 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
5034 IEM_MC_COMMIT_EFLAGS(EFlags); \
5035 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5036 IEM_MC_END(); \
5037 break; \
5038 \
5039 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5040 } \
5041 } \
5042 } \
5043 (void)0
5044
5045/* read-only variant */
5046#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
5047 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5048 { \
5049 /* \
5050 * Register target \
5051 */ \
5052 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5053 switch (pVCpu->iem.s.enmEffOpSize) \
5054 { \
5055 case IEMMODE_16BIT: \
5056 IEM_MC_BEGIN(3, 0, 0, 0); \
5057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5058 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
5059 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5060 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5061 \
5062 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5063 IEM_MC_REF_EFLAGS(pEFlags); \
5064 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5065 \
5066 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5067 IEM_MC_END(); \
5068 break; \
5069 \
5070 case IEMMODE_32BIT: \
5071 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
5072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5073 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
5074 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5075 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5076 \
5077 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5078 IEM_MC_REF_EFLAGS(pEFlags); \
5079 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5080 \
5081 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5082 IEM_MC_END(); \
5083 break; \
5084 \
5085 case IEMMODE_64BIT: \
5086 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5088 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
5089 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5090 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5091 \
5092 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5093 IEM_MC_REF_EFLAGS(pEFlags); \
5094 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5095 \
5096 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5097 IEM_MC_END(); \
5098 break; \
5099 \
5100 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5101 } \
5102 } \
5103 else \
5104 { \
5105 /* \
5106 * Memory target. \
5107 */ \
5108 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5109 { \
5110 switch (pVCpu->iem.s.enmEffOpSize) \
5111 { \
5112 case IEMMODE_16BIT: \
5113 IEM_MC_BEGIN(3, 3, 0, 0); \
5114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5116 \
5117 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5118 IEMOP_HLP_DONE_DECODING(); \
5119 \
5120 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5121 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5122 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5123 \
5124 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5125 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5126 IEM_MC_FETCH_EFLAGS(EFlags); \
5127 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5128 \
5129 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5130 IEM_MC_COMMIT_EFLAGS(EFlags); \
5131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5132 IEM_MC_END(); \
5133 break; \
5134 \
5135 case IEMMODE_32BIT: \
5136 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5139 \
5140 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5141 IEMOP_HLP_DONE_DECODING(); \
5142 \
5143 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5144 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5145 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5146 \
5147 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5148 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5149 IEM_MC_FETCH_EFLAGS(EFlags); \
5150 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5151 \
5152 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5153 IEM_MC_COMMIT_EFLAGS(EFlags); \
5154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5155 IEM_MC_END(); \
5156 break; \
5157 \
5158 case IEMMODE_64BIT: \
5159 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5162 \
5163 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5164 IEMOP_HLP_DONE_DECODING(); \
5165 \
5166 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5167 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5168 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5169 \
5170 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5171 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5172 IEM_MC_FETCH_EFLAGS(EFlags); \
5173 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5174 \
5175 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5176 IEM_MC_COMMIT_EFLAGS(EFlags); \
5177 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5178 IEM_MC_END(); \
5179 break; \
5180 \
5181 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5182 } \
5183 } \
5184 else \
5185 { \
5186 IEMOP_HLP_DONE_DECODING(); \
5187 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5188 } \
5189 } \
5190 (void)0
5191
5192/**
5193 * @opmaps grp1_83
5194 * @opcode /0
5195 * @opflclass arithmetic
5196 */
5197FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5198{
5199 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5200 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5201 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5202}
5203
5204
5205/**
5206 * @opmaps grp1_83
5207 * @opcode /1
5208 * @opflclass logical
5209 */
5210FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5211{
5212 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5213 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5214 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5215}
5216
5217
5218/**
5219 * @opmaps grp1_83
5220 * @opcode /2
5221 * @opflclass arithmetic_carry
5222 */
5223FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5224{
5225 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5226 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5227 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5228}
5229
5230
5231/**
5232 * @opmaps grp1_83
5233 * @opcode /3
5234 * @opflclass arithmetic_carry
5235 */
5236FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5237{
5238 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5239 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5240 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5241}
5242
5243
5244/**
5245 * @opmaps grp1_83
5246 * @opcode /4
5247 * @opflclass logical
5248 */
5249FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5250{
5251 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5252 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5253 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5254}
5255
5256
5257/**
5258 * @opmaps grp1_83
5259 * @opcode /5
5260 * @opflclass arithmetic
5261 */
5262FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5263{
5264 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5265 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5266 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5267}
5268
5269
5270/**
5271 * @opmaps grp1_83
5272 * @opcode /6
5273 * @opflclass logical
5274 */
5275FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5276{
5277 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5278 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5279 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5280}
5281
5282
5283/**
5284 * @opmaps grp1_83
5285 * @opcode /7
5286 * @opflclass arithmetic
5287 */
5288FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5289{
5290 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5291 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5292}
5293
5294
5295/**
5296 * @opcode 0x83
5297 */
5298FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5299{
5300 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5301 to the 386 even if absent in the intel reference manuals and some
5302 3rd party opcode listings. */
5303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5304 switch (IEM_GET_MODRM_REG_8(bRm))
5305 {
5306 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5307 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5308 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5309 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5310 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5311 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5312 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5313 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5315 }
5316}
5317
5318
5319/**
5320 * @opcode 0x84
5321 * @opflclass logical
5322 */
5323FNIEMOP_DEF(iemOp_test_Eb_Gb)
5324{
5325 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5326 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5327 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8, test, 0, 0);
5328}
5329
5330
5331/**
5332 * @opcode 0x85
5333 * @opflclass logical
5334 */
5335FNIEMOP_DEF(iemOp_test_Ev_Gv)
5336{
5337 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5339 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, test, 0, 0);
5340}
5341
5342
5343/**
5344 * @opcode 0x86
5345 */
5346FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5347{
5348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5349 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5350
5351 /*
5352 * If rm is denoting a register, no more instruction bytes.
5353 */
5354 if (IEM_IS_MODRM_REG_MODE(bRm))
5355 {
5356 IEM_MC_BEGIN(0, 2, 0, 0);
5357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5358 IEM_MC_LOCAL(uint8_t, uTmp1);
5359 IEM_MC_LOCAL(uint8_t, uTmp2);
5360
5361 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5362 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5363 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5364 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5365
5366 IEM_MC_ADVANCE_RIP_AND_FINISH();
5367 IEM_MC_END();
5368 }
5369 else
5370 {
5371 /*
5372 * We're accessing memory.
5373 */
5374#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5375 IEM_MC_BEGIN(2, 4, 0, 0); \
5376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5377 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5378 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5379 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5380 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5381 \
5382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5383 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5384 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5385 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5386 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5387 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5388 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5389 \
5390 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5391 IEM_MC_END()
5392
5393 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5394 {
5395 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5396 }
5397 else
5398 {
5399 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5400 }
5401 }
5402}
5403
5404
5405/**
5406 * @opcode 0x87
5407 */
5408FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5409{
5410 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5412
5413 /*
5414 * If rm is denoting a register, no more instruction bytes.
5415 */
5416 if (IEM_IS_MODRM_REG_MODE(bRm))
5417 {
5418 switch (pVCpu->iem.s.enmEffOpSize)
5419 {
5420 case IEMMODE_16BIT:
5421 IEM_MC_BEGIN(0, 2, 0, 0);
5422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5423 IEM_MC_LOCAL(uint16_t, uTmp1);
5424 IEM_MC_LOCAL(uint16_t, uTmp2);
5425
5426 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5427 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5428 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5429 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5430
5431 IEM_MC_ADVANCE_RIP_AND_FINISH();
5432 IEM_MC_END();
5433 break;
5434
5435 case IEMMODE_32BIT:
5436 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5438 IEM_MC_LOCAL(uint32_t, uTmp1);
5439 IEM_MC_LOCAL(uint32_t, uTmp2);
5440
5441 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5442 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5443 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5444 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5445
5446 IEM_MC_ADVANCE_RIP_AND_FINISH();
5447 IEM_MC_END();
5448 break;
5449
5450 case IEMMODE_64BIT:
5451 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5453 IEM_MC_LOCAL(uint64_t, uTmp1);
5454 IEM_MC_LOCAL(uint64_t, uTmp2);
5455
5456 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5457 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5458 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5459 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5460
5461 IEM_MC_ADVANCE_RIP_AND_FINISH();
5462 IEM_MC_END();
5463 break;
5464
5465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5466 }
5467 }
5468 else
5469 {
5470 /*
5471 * We're accessing memory.
5472 */
5473#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5474 do { \
5475 switch (pVCpu->iem.s.enmEffOpSize) \
5476 { \
5477 case IEMMODE_16BIT: \
5478 IEM_MC_BEGIN(2, 4, 0, 0); \
5479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5480 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5481 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5482 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5483 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5484 \
5485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5486 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5487 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5488 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5489 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5490 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5491 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5492 \
5493 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5494 IEM_MC_END(); \
5495 break; \
5496 \
5497 case IEMMODE_32BIT: \
5498 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5500 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5501 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5502 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5503 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5504 \
5505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5506 IEMOP_HLP_DONE_DECODING(); \
5507 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5508 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5509 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5510 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5511 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5512 \
5513 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5514 IEM_MC_END(); \
5515 break; \
5516 \
5517 case IEMMODE_64BIT: \
5518 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5520 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5521 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5522 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5523 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5524 \
5525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5526 IEMOP_HLP_DONE_DECODING(); \
5527 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5528 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5529 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5530 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5531 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5532 \
5533 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5534 IEM_MC_END(); \
5535 break; \
5536 \
5537 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5538 } \
5539 } while (0)
5540 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5541 {
5542 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5543 }
5544 else
5545 {
5546 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5547 }
5548 }
5549}
5550
5551
5552/**
5553 * @opcode 0x88
5554 */
5555FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5556{
5557 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5558
5559 uint8_t bRm;
5560 IEM_OPCODE_GET_NEXT_U8(&bRm);
5561
5562 /*
5563 * If rm is denoting a register, no more instruction bytes.
5564 */
5565 if (IEM_IS_MODRM_REG_MODE(bRm))
5566 {
5567 IEM_MC_BEGIN(0, 1, 0, 0);
5568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5569 IEM_MC_LOCAL(uint8_t, u8Value);
5570 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5571 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5572 IEM_MC_ADVANCE_RIP_AND_FINISH();
5573 IEM_MC_END();
5574 }
5575 else
5576 {
5577 /*
5578 * We're writing a register to memory.
5579 */
5580 IEM_MC_BEGIN(0, 2, 0, 0);
5581 IEM_MC_LOCAL(uint8_t, u8Value);
5582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5585 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5586 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5587 IEM_MC_ADVANCE_RIP_AND_FINISH();
5588 IEM_MC_END();
5589 }
5590}
5591
5592
5593/**
5594 * @opcode 0x89
5595 */
5596FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5597{
5598 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5599
5600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5601
5602 /*
5603 * If rm is denoting a register, no more instruction bytes.
5604 */
5605 if (IEM_IS_MODRM_REG_MODE(bRm))
5606 {
5607 switch (pVCpu->iem.s.enmEffOpSize)
5608 {
5609 case IEMMODE_16BIT:
5610 IEM_MC_BEGIN(0, 1, 0, 0);
5611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5612 IEM_MC_LOCAL(uint16_t, u16Value);
5613 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5614 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5615 IEM_MC_ADVANCE_RIP_AND_FINISH();
5616 IEM_MC_END();
5617 break;
5618
5619 case IEMMODE_32BIT:
5620 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5622 IEM_MC_LOCAL(uint32_t, u32Value);
5623 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5624 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5625 IEM_MC_ADVANCE_RIP_AND_FINISH();
5626 IEM_MC_END();
5627 break;
5628
5629 case IEMMODE_64BIT:
5630 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5632 IEM_MC_LOCAL(uint64_t, u64Value);
5633 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5634 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5635 IEM_MC_ADVANCE_RIP_AND_FINISH();
5636 IEM_MC_END();
5637 break;
5638
5639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5640 }
5641 }
5642 else
5643 {
5644 /*
5645 * We're writing a register to memory.
5646 */
5647 switch (pVCpu->iem.s.enmEffOpSize)
5648 {
5649 case IEMMODE_16BIT:
5650 IEM_MC_BEGIN(0, 2, 0, 0);
5651 IEM_MC_LOCAL(uint16_t, u16Value);
5652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5655 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5656 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5657 IEM_MC_ADVANCE_RIP_AND_FINISH();
5658 IEM_MC_END();
5659 break;
5660
5661 case IEMMODE_32BIT:
5662 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5663 IEM_MC_LOCAL(uint32_t, u32Value);
5664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5667 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5668 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5669 IEM_MC_ADVANCE_RIP_AND_FINISH();
5670 IEM_MC_END();
5671 break;
5672
5673 case IEMMODE_64BIT:
5674 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5675 IEM_MC_LOCAL(uint64_t, u64Value);
5676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5679 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5680 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5681 IEM_MC_ADVANCE_RIP_AND_FINISH();
5682 IEM_MC_END();
5683 break;
5684
5685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5686 }
5687 }
5688}
5689
5690
5691/**
5692 * @opcode 0x8a
5693 */
5694FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5695{
5696 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5697
5698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5699
5700 /*
5701 * If rm is denoting a register, no more instruction bytes.
5702 */
5703 if (IEM_IS_MODRM_REG_MODE(bRm))
5704 {
5705 IEM_MC_BEGIN(0, 1, 0, 0);
5706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5707 IEM_MC_LOCAL(uint8_t, u8Value);
5708 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5709 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5710 IEM_MC_ADVANCE_RIP_AND_FINISH();
5711 IEM_MC_END();
5712 }
5713 else
5714 {
5715 /*
5716 * We're loading a register from memory.
5717 */
5718 IEM_MC_BEGIN(0, 2, 0, 0);
5719 IEM_MC_LOCAL(uint8_t, u8Value);
5720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5723 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5724 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5725 IEM_MC_ADVANCE_RIP_AND_FINISH();
5726 IEM_MC_END();
5727 }
5728}
5729
5730
5731/**
5732 * @opcode 0x8b
5733 */
5734FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5735{
5736 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5737
5738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5739
5740 /*
5741 * If rm is denoting a register, no more instruction bytes.
5742 */
5743 if (IEM_IS_MODRM_REG_MODE(bRm))
5744 {
5745 switch (pVCpu->iem.s.enmEffOpSize)
5746 {
5747 case IEMMODE_16BIT:
5748 IEM_MC_BEGIN(0, 1, 0, 0);
5749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5750 IEM_MC_LOCAL(uint16_t, u16Value);
5751 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5752 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5753 IEM_MC_ADVANCE_RIP_AND_FINISH();
5754 IEM_MC_END();
5755 break;
5756
5757 case IEMMODE_32BIT:
5758 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5760 IEM_MC_LOCAL(uint32_t, u32Value);
5761 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5762 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5763 IEM_MC_ADVANCE_RIP_AND_FINISH();
5764 IEM_MC_END();
5765 break;
5766
5767 case IEMMODE_64BIT:
5768 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770 IEM_MC_LOCAL(uint64_t, u64Value);
5771 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5772 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5773 IEM_MC_ADVANCE_RIP_AND_FINISH();
5774 IEM_MC_END();
5775 break;
5776
5777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5778 }
5779 }
5780 else
5781 {
5782 /*
5783 * We're loading a register from memory.
5784 */
5785 switch (pVCpu->iem.s.enmEffOpSize)
5786 {
5787 case IEMMODE_16BIT:
5788 IEM_MC_BEGIN(0, 2, 0, 0);
5789 IEM_MC_LOCAL(uint16_t, u16Value);
5790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5793 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5794 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5795 IEM_MC_ADVANCE_RIP_AND_FINISH();
5796 IEM_MC_END();
5797 break;
5798
5799 case IEMMODE_32BIT:
5800 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5801 IEM_MC_LOCAL(uint32_t, u32Value);
5802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5805 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5806 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5807 IEM_MC_ADVANCE_RIP_AND_FINISH();
5808 IEM_MC_END();
5809 break;
5810
5811 case IEMMODE_64BIT:
5812 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5813 IEM_MC_LOCAL(uint64_t, u64Value);
5814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5817 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5818 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5819 IEM_MC_ADVANCE_RIP_AND_FINISH();
5820 IEM_MC_END();
5821 break;
5822
5823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5824 }
5825 }
5826}
5827
5828
5829/**
5830 * opcode 0x63
5831 * @todo Table fixme
5832 */
5833FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5834{
5835 if (!IEM_IS_64BIT_CODE(pVCpu))
5836 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5837 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5838 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5839 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5840}
5841
5842
5843/**
5844 * @opcode 0x8c
5845 */
5846FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5847{
5848 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5849
5850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5851
5852 /*
5853 * Check that the destination register exists. The REX.R prefix is ignored.
5854 */
5855 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5856 if (iSegReg > X86_SREG_GS)
5857 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5858
5859 /*
5860 * If rm is denoting a register, no more instruction bytes.
5861 * In that case, the operand size is respected and the upper bits are
5862 * cleared (starting with some pentium).
5863 */
5864 if (IEM_IS_MODRM_REG_MODE(bRm))
5865 {
5866 switch (pVCpu->iem.s.enmEffOpSize)
5867 {
5868 case IEMMODE_16BIT:
5869 IEM_MC_BEGIN(0, 1, 0, 0);
5870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5871 IEM_MC_LOCAL(uint16_t, u16Value);
5872 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5873 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5874 IEM_MC_ADVANCE_RIP_AND_FINISH();
5875 IEM_MC_END();
5876 break;
5877
5878 case IEMMODE_32BIT:
5879 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5881 IEM_MC_LOCAL(uint32_t, u32Value);
5882 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5883 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5884 IEM_MC_ADVANCE_RIP_AND_FINISH();
5885 IEM_MC_END();
5886 break;
5887
5888 case IEMMODE_64BIT:
5889 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5891 IEM_MC_LOCAL(uint64_t, u64Value);
5892 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5893 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5894 IEM_MC_ADVANCE_RIP_AND_FINISH();
5895 IEM_MC_END();
5896 break;
5897
5898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5899 }
5900 }
5901 else
5902 {
5903 /*
5904 * We're saving the register to memory. The access is word sized
5905 * regardless of operand size prefixes.
5906 */
5907#if 0 /* not necessary */
5908 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5909#endif
5910 IEM_MC_BEGIN(0, 2, 0, 0);
5911 IEM_MC_LOCAL(uint16_t, u16Value);
5912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5915 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5916 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5917 IEM_MC_ADVANCE_RIP_AND_FINISH();
5918 IEM_MC_END();
5919 }
5920}
5921
5922
5923
5924
5925/**
5926 * @opcode 0x8d
5927 */
5928FNIEMOP_DEF(iemOp_lea_Gv_M)
5929{
5930 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5932 if (IEM_IS_MODRM_REG_MODE(bRm))
5933 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5934
5935 switch (pVCpu->iem.s.enmEffOpSize)
5936 {
5937 case IEMMODE_16BIT:
5938 IEM_MC_BEGIN(0, 2, 0, 0);
5939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5942 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5943 * operand-size, which is usually the case. It'll save an instruction
5944 * and a register. */
5945 IEM_MC_LOCAL(uint16_t, u16Cast);
5946 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5947 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5948 IEM_MC_ADVANCE_RIP_AND_FINISH();
5949 IEM_MC_END();
5950 break;
5951
5952 case IEMMODE_32BIT:
5953 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5957 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5958 * operand-size, which is usually the case. It'll save an instruction
5959 * and a register. */
5960 IEM_MC_LOCAL(uint32_t, u32Cast);
5961 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5962 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5963 IEM_MC_ADVANCE_RIP_AND_FINISH();
5964 IEM_MC_END();
5965 break;
5966
5967 case IEMMODE_64BIT:
5968 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5972 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5973 IEM_MC_ADVANCE_RIP_AND_FINISH();
5974 IEM_MC_END();
5975 break;
5976
5977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5978 }
5979}
5980
5981
5982/**
5983 * @opcode 0x8e
5984 */
5985FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5986{
5987 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5988
5989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5990
5991 /*
5992 * The practical operand size is 16-bit.
5993 */
5994#if 0 /* not necessary */
5995 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5996#endif
5997
5998 /*
5999 * Check that the destination register exists and can be used with this
6000 * instruction. The REX.R prefix is ignored.
6001 */
6002 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
6003 /** @todo r=bird: What does 8086 do here wrt CS? */
6004 if ( iSegReg == X86_SREG_CS
6005 || iSegReg > X86_SREG_GS)
6006 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6007
6008 /*
6009 * If rm is denoting a register, no more instruction bytes.
6010 *
6011 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
6012 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
6013 * register. This is a restriction of the current recompiler
6014 * approach.
6015 */
6016 if (IEM_IS_MODRM_REG_MODE(bRm))
6017 {
6018#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
6019 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
6020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6021 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6022 IEM_MC_ARG(uint16_t, u16Value, 1); \
6023 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6024 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6025 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6026 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6027 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6028 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6029 iemCImpl_load_SReg, iSRegArg, u16Value); \
6030 IEM_MC_END()
6031
6032 if (iSegReg == X86_SREG_SS)
6033 {
6034 if (IEM_IS_32BIT_CODE(pVCpu))
6035 {
6036 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6037 }
6038 else
6039 {
6040 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6041 }
6042 }
6043 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6044 {
6045 IEMOP_MOV_SW_EV_REG_BODY(0);
6046 }
6047 else
6048 {
6049 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
6050 }
6051#undef IEMOP_MOV_SW_EV_REG_BODY
6052 }
6053 else
6054 {
6055 /*
6056 * We're loading the register from memory. The access is word sized
6057 * regardless of operand size prefixes.
6058 */
6059#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
6060 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
6061 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
6062 IEM_MC_ARG(uint16_t, u16Value, 1); \
6063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
6064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6066 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
6067 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
6068 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
6069 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
6070 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
6071 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
6072 iemCImpl_load_SReg, iSRegArg, u16Value); \
6073 IEM_MC_END()
6074
6075 if (iSegReg == X86_SREG_SS)
6076 {
6077 if (IEM_IS_32BIT_CODE(pVCpu))
6078 {
6079 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
6080 }
6081 else
6082 {
6083 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
6084 }
6085 }
6086 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
6087 {
6088 IEMOP_MOV_SW_EV_MEM_BODY(0);
6089 }
6090 else
6091 {
6092 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6093 }
6094#undef IEMOP_MOV_SW_EV_MEM_BODY
6095 }
6096}
6097
6098
6099/** Opcode 0x8f /0. */
6100FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6101{
6102 /* This bugger is rather annoying as it requires rSP to be updated before
6103 doing the effective address calculations. Will eventually require a
6104 split between the R/M+SIB decoding and the effective address
6105 calculation - which is something that is required for any attempt at
6106 reusing this code for a recompiler. It may also be good to have if we
6107 need to delay #UD exception caused by invalid lock prefixes.
6108
6109 For now, we'll do a mostly safe interpreter-only implementation here. */
6110 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6111 * now until tests show it's checked.. */
6112 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6113
6114 /* Register access is relatively easy and can share code. */
6115 if (IEM_IS_MODRM_REG_MODE(bRm))
6116 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6117
6118 /*
6119 * Memory target.
6120 *
6121 * Intel says that RSP is incremented before it's used in any effective
6122 * address calcuations. This means some serious extra annoyance here since
6123 * we decode and calculate the effective address in one step and like to
6124 * delay committing registers till everything is done.
6125 *
6126 * So, we'll decode and calculate the effective address twice. This will
6127 * require some recoding if turned into a recompiler.
6128 */
6129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6130
6131#if 1 /* This can be compiled, optimize later if needed. */
6132 switch (pVCpu->iem.s.enmEffOpSize)
6133 {
6134 case IEMMODE_16BIT:
6135 IEM_MC_BEGIN(2, 0, 0, 0);
6136 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6139 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6140 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6141 IEM_MC_END();
6142 break;
6143
6144 case IEMMODE_32BIT:
6145 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6146 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6149 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6150 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6151 IEM_MC_END();
6152 break;
6153
6154 case IEMMODE_64BIT:
6155 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6156 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6159 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6160 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6161 IEM_MC_END();
6162 break;
6163
6164 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6165 }
6166
6167#else
6168# ifndef TST_IEM_CHECK_MC
6169 /* Calc effective address with modified ESP. */
6170/** @todo testcase */
6171 RTGCPTR GCPtrEff;
6172 VBOXSTRICTRC rcStrict;
6173 switch (pVCpu->iem.s.enmEffOpSize)
6174 {
6175 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6176 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6177 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6179 }
6180 if (rcStrict != VINF_SUCCESS)
6181 return rcStrict;
6182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6183
6184 /* Perform the operation - this should be CImpl. */
6185 RTUINT64U TmpRsp;
6186 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6187 switch (pVCpu->iem.s.enmEffOpSize)
6188 {
6189 case IEMMODE_16BIT:
6190 {
6191 uint16_t u16Value;
6192 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6193 if (rcStrict == VINF_SUCCESS)
6194 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6195 break;
6196 }
6197
6198 case IEMMODE_32BIT:
6199 {
6200 uint32_t u32Value;
6201 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6202 if (rcStrict == VINF_SUCCESS)
6203 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6204 break;
6205 }
6206
6207 case IEMMODE_64BIT:
6208 {
6209 uint64_t u64Value;
6210 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6211 if (rcStrict == VINF_SUCCESS)
6212 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6213 break;
6214 }
6215
6216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6217 }
6218 if (rcStrict == VINF_SUCCESS)
6219 {
6220 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6221 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6222 }
6223 return rcStrict;
6224
6225# else
6226 return VERR_IEM_IPE_2;
6227# endif
6228#endif
6229}
6230
6231
6232/**
6233 * @opcode 0x8f
6234 */
6235FNIEMOP_DEF(iemOp_Grp1A__xop)
6236{
6237 /*
6238 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6239 * three byte VEX prefix, except that the mmmmm field cannot have the values
6240 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6241 */
6242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6243 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6244 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6245
6246 IEMOP_MNEMONIC(xop, "xop");
6247 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6248 {
6249 /** @todo Test when exctly the XOP conformance checks kick in during
6250 * instruction decoding and fetching (using \#PF). */
6251 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6252 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6253 if ( ( pVCpu->iem.s.fPrefixes
6254 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6255 == 0)
6256 {
6257 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6258 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6259 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6260 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6261 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6262 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6263 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6264 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6265 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6266
6267 /** @todo XOP: Just use new tables and decoders. */
6268 switch (bRm & 0x1f)
6269 {
6270 case 8: /* xop opcode map 8. */
6271 IEMOP_BITCH_ABOUT_STUB();
6272 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6273
6274 case 9: /* xop opcode map 9. */
6275 IEMOP_BITCH_ABOUT_STUB();
6276 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6277
6278 case 10: /* xop opcode map 10. */
6279 IEMOP_BITCH_ABOUT_STUB();
6280 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6281
6282 default:
6283 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6284 IEMOP_RAISE_INVALID_OPCODE_RET();
6285 }
6286 }
6287 else
6288 Log(("XOP: Invalid prefix mix!\n"));
6289 }
6290 else
6291 Log(("XOP: XOP support disabled!\n"));
6292 IEMOP_RAISE_INVALID_OPCODE_RET();
6293}
6294
6295
6296/**
6297 * Common 'xchg reg,rAX' helper.
6298 */
6299FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6300{
6301 iReg |= pVCpu->iem.s.uRexB;
6302 switch (pVCpu->iem.s.enmEffOpSize)
6303 {
6304 case IEMMODE_16BIT:
6305 IEM_MC_BEGIN(0, 2, 0, 0);
6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6307 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6308 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6309 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6310 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6311 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6312 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6313 IEM_MC_ADVANCE_RIP_AND_FINISH();
6314 IEM_MC_END();
6315 break;
6316
6317 case IEMMODE_32BIT:
6318 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6320 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6321 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6322 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6323 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6324 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6325 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6326 IEM_MC_ADVANCE_RIP_AND_FINISH();
6327 IEM_MC_END();
6328 break;
6329
6330 case IEMMODE_64BIT:
6331 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6333 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6334 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6335 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6336 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6337 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6338 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6339 IEM_MC_ADVANCE_RIP_AND_FINISH();
6340 IEM_MC_END();
6341 break;
6342
6343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6344 }
6345}
6346
6347
6348/**
6349 * @opcode 0x90
6350 */
6351FNIEMOP_DEF(iemOp_nop)
6352{
6353 /* R8/R8D and RAX/EAX can be exchanged. */
6354 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6355 {
6356 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6357 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6358 }
6359
6360 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6361 {
6362 IEMOP_MNEMONIC(pause, "pause");
6363 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6364 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6365 if (!IEM_IS_IN_GUEST(pVCpu))
6366 { /* probable */ }
6367#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6368 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6369 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6370#endif
6371#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6372 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6373 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6374#endif
6375 }
6376 else
6377 IEMOP_MNEMONIC(nop, "nop");
6378 /** @todo testcase: lock nop; lock pause */
6379 IEM_MC_BEGIN(0, 0, 0, 0);
6380 IEMOP_HLP_DONE_DECODING();
6381 IEM_MC_ADVANCE_RIP_AND_FINISH();
6382 IEM_MC_END();
6383}
6384
6385
6386/**
6387 * @opcode 0x91
6388 */
6389FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6390{
6391 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6392 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6393}
6394
6395
6396/**
6397 * @opcode 0x92
6398 */
6399FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6400{
6401 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6402 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6403}
6404
6405
6406/**
6407 * @opcode 0x93
6408 */
6409FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6410{
6411 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6412 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6413}
6414
6415
6416/**
6417 * @opcode 0x94
6418 */
6419FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6420{
6421 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6422 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6423}
6424
6425
6426/**
6427 * @opcode 0x95
6428 */
6429FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6430{
6431 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6432 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6433}
6434
6435
6436/**
6437 * @opcode 0x96
6438 */
6439FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6440{
6441 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6442 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6443}
6444
6445
6446/**
6447 * @opcode 0x97
6448 */
6449FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6450{
6451 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6452 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6453}
6454
6455
6456/**
6457 * @opcode 0x98
6458 */
6459FNIEMOP_DEF(iemOp_cbw)
6460{
6461 switch (pVCpu->iem.s.enmEffOpSize)
6462 {
6463 case IEMMODE_16BIT:
6464 IEMOP_MNEMONIC(cbw, "cbw");
6465 IEM_MC_BEGIN(0, 1, 0, 0);
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6467 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6468 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6469 } IEM_MC_ELSE() {
6470 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6471 } IEM_MC_ENDIF();
6472 IEM_MC_ADVANCE_RIP_AND_FINISH();
6473 IEM_MC_END();
6474 break;
6475
6476 case IEMMODE_32BIT:
6477 IEMOP_MNEMONIC(cwde, "cwde");
6478 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6481 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6482 } IEM_MC_ELSE() {
6483 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6484 } IEM_MC_ENDIF();
6485 IEM_MC_ADVANCE_RIP_AND_FINISH();
6486 IEM_MC_END();
6487 break;
6488
6489 case IEMMODE_64BIT:
6490 IEMOP_MNEMONIC(cdqe, "cdqe");
6491 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6493 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6494 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6495 } IEM_MC_ELSE() {
6496 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6497 } IEM_MC_ENDIF();
6498 IEM_MC_ADVANCE_RIP_AND_FINISH();
6499 IEM_MC_END();
6500 break;
6501
6502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6503 }
6504}
6505
6506
6507/**
6508 * @opcode 0x99
6509 */
6510FNIEMOP_DEF(iemOp_cwd)
6511{
6512 switch (pVCpu->iem.s.enmEffOpSize)
6513 {
6514 case IEMMODE_16BIT:
6515 IEMOP_MNEMONIC(cwd, "cwd");
6516 IEM_MC_BEGIN(0, 1, 0, 0);
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6518 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6519 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6520 } IEM_MC_ELSE() {
6521 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6522 } IEM_MC_ENDIF();
6523 IEM_MC_ADVANCE_RIP_AND_FINISH();
6524 IEM_MC_END();
6525 break;
6526
6527 case IEMMODE_32BIT:
6528 IEMOP_MNEMONIC(cdq, "cdq");
6529 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6531 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6532 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6533 } IEM_MC_ELSE() {
6534 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6535 } IEM_MC_ENDIF();
6536 IEM_MC_ADVANCE_RIP_AND_FINISH();
6537 IEM_MC_END();
6538 break;
6539
6540 case IEMMODE_64BIT:
6541 IEMOP_MNEMONIC(cqo, "cqo");
6542 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6544 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6545 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6546 } IEM_MC_ELSE() {
6547 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6548 } IEM_MC_ENDIF();
6549 IEM_MC_ADVANCE_RIP_AND_FINISH();
6550 IEM_MC_END();
6551 break;
6552
6553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6554 }
6555}
6556
6557
6558/**
6559 * @opcode 0x9a
6560 */
6561FNIEMOP_DEF(iemOp_call_Ap)
6562{
6563 IEMOP_MNEMONIC(call_Ap, "call Ap");
6564 IEMOP_HLP_NO_64BIT();
6565
6566 /* Decode the far pointer address and pass it on to the far call C implementation. */
6567 uint32_t off32Seg;
6568 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6569 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6570 else
6571 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6572 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6575 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6576 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6577 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6578}
6579
6580
6581/** Opcode 0x9b. (aka fwait) */
6582FNIEMOP_DEF(iemOp_wait)
6583{
6584 IEMOP_MNEMONIC(wait, "wait");
6585 IEM_MC_BEGIN(0, 0, 0, 0);
6586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6587 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6588 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6589 IEM_MC_ADVANCE_RIP_AND_FINISH();
6590 IEM_MC_END();
6591}
6592
6593
6594/**
6595 * @opcode 0x9c
6596 */
6597FNIEMOP_DEF(iemOp_pushf_Fv)
6598{
6599 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6601 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6602 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6603 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6604}
6605
6606
6607/**
6608 * @opcode 0x9d
6609 */
6610FNIEMOP_DEF(iemOp_popf_Fv)
6611{
6612 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6614 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6615 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6616 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6617 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6618}
6619
6620
6621/**
6622 * @opcode 0x9e
6623 * @opflmodify cf,pf,af,zf,sf
6624 */
6625FNIEMOP_DEF(iemOp_sahf)
6626{
6627 IEMOP_MNEMONIC(sahf, "sahf");
6628 if ( IEM_IS_64BIT_CODE(pVCpu)
6629 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6630 IEMOP_RAISE_INVALID_OPCODE_RET();
6631 IEM_MC_BEGIN(0, 2, 0, 0);
6632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6633 IEM_MC_LOCAL(uint32_t, u32Flags);
6634 IEM_MC_LOCAL(uint32_t, EFlags);
6635 IEM_MC_FETCH_EFLAGS(EFlags);
6636 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6637 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6638 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6639 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6640 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6641 IEM_MC_COMMIT_EFLAGS(EFlags);
6642 IEM_MC_ADVANCE_RIP_AND_FINISH();
6643 IEM_MC_END();
6644}
6645
6646
6647/**
6648 * @opcode 0x9f
6649 * @opfltest cf,pf,af,zf,sf
6650 */
6651FNIEMOP_DEF(iemOp_lahf)
6652{
6653 IEMOP_MNEMONIC(lahf, "lahf");
6654 if ( IEM_IS_64BIT_CODE(pVCpu)
6655 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6656 IEMOP_RAISE_INVALID_OPCODE_RET();
6657 IEM_MC_BEGIN(0, 1, 0, 0);
6658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6659 IEM_MC_LOCAL(uint8_t, u8Flags);
6660 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6661 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6662 IEM_MC_ADVANCE_RIP_AND_FINISH();
6663 IEM_MC_END();
6664}
6665
6666
6667/**
6668 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6669 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6670 * Will return/throw on failures.
6671 * @param a_GCPtrMemOff The variable to store the offset in.
6672 */
6673#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6674 do \
6675 { \
6676 switch (pVCpu->iem.s.enmEffAddrMode) \
6677 { \
6678 case IEMMODE_16BIT: \
6679 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6680 break; \
6681 case IEMMODE_32BIT: \
6682 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6683 break; \
6684 case IEMMODE_64BIT: \
6685 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6686 break; \
6687 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6688 } \
6689 } while (0)
6690
6691/**
6692 * @opcode 0xa0
6693 */
6694FNIEMOP_DEF(iemOp_mov_AL_Ob)
6695{
6696 /*
6697 * Get the offset.
6698 */
6699 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6700 RTGCPTR GCPtrMemOffDecode;
6701 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6702
6703 /*
6704 * Fetch AL.
6705 */
6706 IEM_MC_BEGIN(0, 2, 0, 0);
6707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6708 IEM_MC_LOCAL(uint8_t, u8Tmp);
6709 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6710 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6711 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6712 IEM_MC_ADVANCE_RIP_AND_FINISH();
6713 IEM_MC_END();
6714}
6715
6716
6717/**
6718 * @opcode 0xa1
6719 */
6720FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6721{
6722 /*
6723 * Get the offset.
6724 */
6725 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6726 RTGCPTR GCPtrMemOffDecode;
6727 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6728
6729 /*
6730 * Fetch rAX.
6731 */
6732 switch (pVCpu->iem.s.enmEffOpSize)
6733 {
6734 case IEMMODE_16BIT:
6735 IEM_MC_BEGIN(0, 2, 0, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 IEM_MC_LOCAL(uint16_t, u16Tmp);
6738 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6739 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6740 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6741 IEM_MC_ADVANCE_RIP_AND_FINISH();
6742 IEM_MC_END();
6743 break;
6744
6745 case IEMMODE_32BIT:
6746 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 IEM_MC_LOCAL(uint32_t, u32Tmp);
6749 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6750 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6751 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6752 IEM_MC_ADVANCE_RIP_AND_FINISH();
6753 IEM_MC_END();
6754 break;
6755
6756 case IEMMODE_64BIT:
6757 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6759 IEM_MC_LOCAL(uint64_t, u64Tmp);
6760 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6761 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6762 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6763 IEM_MC_ADVANCE_RIP_AND_FINISH();
6764 IEM_MC_END();
6765 break;
6766
6767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6768 }
6769}
6770
6771
6772/**
6773 * @opcode 0xa2
6774 */
6775FNIEMOP_DEF(iemOp_mov_Ob_AL)
6776{
6777 /*
6778 * Get the offset.
6779 */
6780 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6781 RTGCPTR GCPtrMemOffDecode;
6782 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6783
6784 /*
6785 * Store AL.
6786 */
6787 IEM_MC_BEGIN(0, 2, 0, 0);
6788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6789 IEM_MC_LOCAL(uint8_t, u8Tmp);
6790 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6791 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6792 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6793 IEM_MC_ADVANCE_RIP_AND_FINISH();
6794 IEM_MC_END();
6795}
6796
6797
6798/**
6799 * @opcode 0xa3
6800 */
6801FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6802{
6803 /*
6804 * Get the offset.
6805 */
6806 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6807 RTGCPTR GCPtrMemOffDecode;
6808 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6809
6810 /*
6811 * Store rAX.
6812 */
6813 switch (pVCpu->iem.s.enmEffOpSize)
6814 {
6815 case IEMMODE_16BIT:
6816 IEM_MC_BEGIN(0, 2, 0, 0);
6817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6818 IEM_MC_LOCAL(uint16_t, u16Tmp);
6819 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6820 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6821 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6822 IEM_MC_ADVANCE_RIP_AND_FINISH();
6823 IEM_MC_END();
6824 break;
6825
6826 case IEMMODE_32BIT:
6827 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6829 IEM_MC_LOCAL(uint32_t, u32Tmp);
6830 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6831 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6832 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6833 IEM_MC_ADVANCE_RIP_AND_FINISH();
6834 IEM_MC_END();
6835 break;
6836
6837 case IEMMODE_64BIT:
6838 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6840 IEM_MC_LOCAL(uint64_t, u64Tmp);
6841 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6842 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6843 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6844 IEM_MC_ADVANCE_RIP_AND_FINISH();
6845 IEM_MC_END();
6846 break;
6847
6848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6849 }
6850}
6851
6852/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6853#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6854 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6856 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6857 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6858 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6859 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6860 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6861 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6863 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6864 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6865 } IEM_MC_ELSE() { \
6866 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6867 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6868 } IEM_MC_ENDIF(); \
6869 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6870 IEM_MC_END() \
6871
6872/**
6873 * @opcode 0xa4
6874 * @opfltest df
6875 */
6876FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6877{
6878 /*
6879 * Use the C implementation if a repeat prefix is encountered.
6880 */
6881 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6882 {
6883 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6885 switch (pVCpu->iem.s.enmEffAddrMode)
6886 {
6887 case IEMMODE_16BIT:
6888 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6889 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6890 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6891 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6892 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6893 case IEMMODE_32BIT:
6894 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6895 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6896 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6897 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6898 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6899 case IEMMODE_64BIT:
6900 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6901 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6902 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6903 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6904 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6906 }
6907 }
6908
6909 /*
6910 * Sharing case implementation with movs[wdq] below.
6911 */
6912 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6913 switch (pVCpu->iem.s.enmEffAddrMode)
6914 {
6915 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6916 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6917 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6919 }
6920}
6921
6922
6923/**
6924 * @opcode 0xa5
6925 * @opfltest df
6926 */
6927FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6928{
6929
6930 /*
6931 * Use the C implementation if a repeat prefix is encountered.
6932 */
6933 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6934 {
6935 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6937 switch (pVCpu->iem.s.enmEffOpSize)
6938 {
6939 case IEMMODE_16BIT:
6940 switch (pVCpu->iem.s.enmEffAddrMode)
6941 {
6942 case IEMMODE_16BIT:
6943 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6944 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6945 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6946 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6947 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6948 case IEMMODE_32BIT:
6949 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6950 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6951 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6952 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6953 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6954 case IEMMODE_64BIT:
6955 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6956 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6957 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6958 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6959 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6961 }
6962 break;
6963 case IEMMODE_32BIT:
6964 switch (pVCpu->iem.s.enmEffAddrMode)
6965 {
6966 case IEMMODE_16BIT:
6967 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6968 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6969 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6970 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6971 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6972 case IEMMODE_32BIT:
6973 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6974 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6975 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6976 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6977 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6978 case IEMMODE_64BIT:
6979 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6980 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6981 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6982 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6983 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6985 }
6986 case IEMMODE_64BIT:
6987 switch (pVCpu->iem.s.enmEffAddrMode)
6988 {
6989 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6990 case IEMMODE_32BIT:
6991 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6992 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6993 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6994 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6995 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6996 case IEMMODE_64BIT:
6997 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6998 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6999 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7000 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7001 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
7002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7003 }
7004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7005 }
7006 }
7007
7008 /*
7009 * Annoying double switch here.
7010 * Using ugly macro for implementing the cases, sharing it with movsb.
7011 */
7012 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
7013 switch (pVCpu->iem.s.enmEffOpSize)
7014 {
7015 case IEMMODE_16BIT:
7016 switch (pVCpu->iem.s.enmEffAddrMode)
7017 {
7018 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7019 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7020 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
7021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7022 }
7023 break;
7024
7025 case IEMMODE_32BIT:
7026 switch (pVCpu->iem.s.enmEffAddrMode)
7027 {
7028 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7029 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7030 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
7031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7032 }
7033 break;
7034
7035 case IEMMODE_64BIT:
7036 switch (pVCpu->iem.s.enmEffAddrMode)
7037 {
7038 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7039 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
7040 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
7041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7042 }
7043 break;
7044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7045 }
7046}
7047
7048#undef IEM_MOVS_CASE
7049
7050/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
7051#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
7052 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
7053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7054 \
7055 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
7056 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
7057 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
7058 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
7059 \
7060 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
7061 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
7062 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
7063 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
7064 \
7065 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7066 IEM_MC_REF_EFLAGS(pEFlags); \
7067 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
7068 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
7069 \
7070 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7071 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7072 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7073 } IEM_MC_ELSE() { \
7074 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7075 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7076 } IEM_MC_ENDIF(); \
7077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7078 IEM_MC_END() \
7079
7080/**
7081 * @opcode 0xa6
7082 * @opflclass arithmetic
7083 * @opfltest df
7084 */
7085FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
7086{
7087
7088 /*
7089 * Use the C implementation if a repeat prefix is encountered.
7090 */
7091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7092 {
7093 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7095 switch (pVCpu->iem.s.enmEffAddrMode)
7096 {
7097 case IEMMODE_16BIT:
7098 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7099 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7100 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7101 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7102 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7103 case IEMMODE_32BIT:
7104 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7105 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7106 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7107 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7108 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7109 case IEMMODE_64BIT:
7110 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7111 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7112 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7113 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7114 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7116 }
7117 }
7118 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7119 {
7120 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7122 switch (pVCpu->iem.s.enmEffAddrMode)
7123 {
7124 case IEMMODE_16BIT:
7125 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7126 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7127 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7128 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7129 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7130 case IEMMODE_32BIT:
7131 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7132 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7133 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7135 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7136 case IEMMODE_64BIT:
7137 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7138 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7139 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7140 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7141 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7143 }
7144 }
7145
7146 /*
7147 * Sharing case implementation with cmps[wdq] below.
7148 */
7149 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7150 switch (pVCpu->iem.s.enmEffAddrMode)
7151 {
7152 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7153 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7154 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7156 }
7157}
7158
7159
7160/**
7161 * @opcode 0xa7
7162 * @opflclass arithmetic
7163 * @opfltest df
7164 */
7165FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7166{
7167 /*
7168 * Use the C implementation if a repeat prefix is encountered.
7169 */
7170 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7171 {
7172 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7174 switch (pVCpu->iem.s.enmEffOpSize)
7175 {
7176 case IEMMODE_16BIT:
7177 switch (pVCpu->iem.s.enmEffAddrMode)
7178 {
7179 case IEMMODE_16BIT:
7180 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7181 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7182 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7183 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7184 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7185 case IEMMODE_32BIT:
7186 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7187 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7188 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7189 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7190 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7191 case IEMMODE_64BIT:
7192 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7193 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7194 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7195 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7196 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7198 }
7199 break;
7200 case IEMMODE_32BIT:
7201 switch (pVCpu->iem.s.enmEffAddrMode)
7202 {
7203 case IEMMODE_16BIT:
7204 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7206 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7207 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7208 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7209 case IEMMODE_32BIT:
7210 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7211 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7212 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7213 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7214 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7215 case IEMMODE_64BIT:
7216 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7217 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7218 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7219 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7220 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7222 }
7223 case IEMMODE_64BIT:
7224 switch (pVCpu->iem.s.enmEffAddrMode)
7225 {
7226 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7227 case IEMMODE_32BIT:
7228 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7229 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7230 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7231 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7232 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7233 case IEMMODE_64BIT:
7234 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7235 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7236 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7237 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7238 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7240 }
7241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7242 }
7243 }
7244
7245 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7246 {
7247 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7249 switch (pVCpu->iem.s.enmEffOpSize)
7250 {
7251 case IEMMODE_16BIT:
7252 switch (pVCpu->iem.s.enmEffAddrMode)
7253 {
7254 case IEMMODE_16BIT:
7255 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7256 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7257 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7258 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7259 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7260 case IEMMODE_32BIT:
7261 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7262 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7263 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7264 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7265 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7266 case IEMMODE_64BIT:
7267 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7268 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7269 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7270 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7271 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7273 }
7274 break;
7275 case IEMMODE_32BIT:
7276 switch (pVCpu->iem.s.enmEffAddrMode)
7277 {
7278 case IEMMODE_16BIT:
7279 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7280 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7281 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7282 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7283 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7284 case IEMMODE_32BIT:
7285 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7286 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7287 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7288 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7289 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7290 case IEMMODE_64BIT:
7291 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7292 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7293 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7294 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7295 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7297 }
7298 case IEMMODE_64BIT:
7299 switch (pVCpu->iem.s.enmEffAddrMode)
7300 {
7301 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7302 case IEMMODE_32BIT:
7303 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7304 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7305 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7306 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7307 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7308 case IEMMODE_64BIT:
7309 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7310 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7311 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7312 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7313 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7315 }
7316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7317 }
7318 }
7319
7320 /*
7321 * Annoying double switch here.
7322 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7323 */
7324 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7325 switch (pVCpu->iem.s.enmEffOpSize)
7326 {
7327 case IEMMODE_16BIT:
7328 switch (pVCpu->iem.s.enmEffAddrMode)
7329 {
7330 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7331 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7332 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7334 }
7335 break;
7336
7337 case IEMMODE_32BIT:
7338 switch (pVCpu->iem.s.enmEffAddrMode)
7339 {
7340 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7341 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7342 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7344 }
7345 break;
7346
7347 case IEMMODE_64BIT:
7348 switch (pVCpu->iem.s.enmEffAddrMode)
7349 {
7350 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7351 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7352 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7354 }
7355 break;
7356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7357 }
7358}
7359
7360#undef IEM_CMPS_CASE
7361
7362/**
7363 * @opcode 0xa8
7364 * @opflclass logical
7365 */
7366FNIEMOP_DEF(iemOp_test_AL_Ib)
7367{
7368 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7369 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7370 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7371}
7372
7373
7374/**
7375 * @opcode 0xa9
7376 * @opflclass logical
7377 */
7378FNIEMOP_DEF(iemOp_test_eAX_Iz)
7379{
7380 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7382 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7383}
7384
7385
7386/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7387#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7388 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7390 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7391 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7392 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7393 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7394 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7395 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7396 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7397 } IEM_MC_ELSE() { \
7398 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7399 } IEM_MC_ENDIF(); \
7400 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7401 IEM_MC_END() \
7402
7403/**
7404 * @opcode 0xaa
7405 */
7406FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7407{
7408 /*
7409 * Use the C implementation if a repeat prefix is encountered.
7410 */
7411 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7412 {
7413 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7415 switch (pVCpu->iem.s.enmEffAddrMode)
7416 {
7417 case IEMMODE_16BIT:
7418 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7419 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7420 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7421 iemCImpl_stos_al_m16);
7422 case IEMMODE_32BIT:
7423 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7424 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7425 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7426 iemCImpl_stos_al_m32);
7427 case IEMMODE_64BIT:
7428 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7429 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7430 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7431 iemCImpl_stos_al_m64);
7432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7433 }
7434 }
7435
7436 /*
7437 * Sharing case implementation with stos[wdq] below.
7438 */
7439 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7440 switch (pVCpu->iem.s.enmEffAddrMode)
7441 {
7442 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7443 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7444 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7446 }
7447}
7448
7449
7450/**
7451 * @opcode 0xab
7452 */
7453FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7454{
7455 /*
7456 * Use the C implementation if a repeat prefix is encountered.
7457 */
7458 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7459 {
7460 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7462 switch (pVCpu->iem.s.enmEffOpSize)
7463 {
7464 case IEMMODE_16BIT:
7465 switch (pVCpu->iem.s.enmEffAddrMode)
7466 {
7467 case IEMMODE_16BIT:
7468 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7469 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7470 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7471 iemCImpl_stos_ax_m16);
7472 case IEMMODE_32BIT:
7473 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7476 iemCImpl_stos_ax_m32);
7477 case IEMMODE_64BIT:
7478 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7479 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7480 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7481 iemCImpl_stos_ax_m64);
7482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7483 }
7484 break;
7485 case IEMMODE_32BIT:
7486 switch (pVCpu->iem.s.enmEffAddrMode)
7487 {
7488 case IEMMODE_16BIT:
7489 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7490 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7491 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7492 iemCImpl_stos_eax_m16);
7493 case IEMMODE_32BIT:
7494 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7495 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7496 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7497 iemCImpl_stos_eax_m32);
7498 case IEMMODE_64BIT:
7499 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7500 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7501 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7502 iemCImpl_stos_eax_m64);
7503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7504 }
7505 case IEMMODE_64BIT:
7506 switch (pVCpu->iem.s.enmEffAddrMode)
7507 {
7508 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7509 case IEMMODE_32BIT:
7510 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7511 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7512 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7513 iemCImpl_stos_rax_m32);
7514 case IEMMODE_64BIT:
7515 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7516 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7517 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7518 iemCImpl_stos_rax_m64);
7519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7520 }
7521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7522 }
7523 }
7524
7525 /*
7526 * Annoying double switch here.
7527 * Using ugly macro for implementing the cases, sharing it with stosb.
7528 */
7529 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7530 switch (pVCpu->iem.s.enmEffOpSize)
7531 {
7532 case IEMMODE_16BIT:
7533 switch (pVCpu->iem.s.enmEffAddrMode)
7534 {
7535 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7536 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7537 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7539 }
7540 break;
7541
7542 case IEMMODE_32BIT:
7543 switch (pVCpu->iem.s.enmEffAddrMode)
7544 {
7545 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7546 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7547 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7549 }
7550 break;
7551
7552 case IEMMODE_64BIT:
7553 switch (pVCpu->iem.s.enmEffAddrMode)
7554 {
7555 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7556 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7557 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7559 }
7560 break;
7561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7562 }
7563}
7564
7565#undef IEM_STOS_CASE
7566
7567/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7568#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7569 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7571 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7572 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7573 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7574 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7575 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7576 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7577 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7578 } IEM_MC_ELSE() { \
7579 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7580 } IEM_MC_ENDIF(); \
7581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7582 IEM_MC_END() \
7583
7584/**
7585 * @opcode 0xac
7586 * @opfltest df
7587 */
7588FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7589{
7590 /*
7591 * Use the C implementation if a repeat prefix is encountered.
7592 */
7593 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7594 {
7595 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7597 switch (pVCpu->iem.s.enmEffAddrMode)
7598 {
7599 case IEMMODE_16BIT:
7600 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7601 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7602 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7603 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7604 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7605 case IEMMODE_32BIT:
7606 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7607 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7608 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7609 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7610 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7611 case IEMMODE_64BIT:
7612 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7613 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7614 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7615 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7616 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7618 }
7619 }
7620
7621 /*
7622 * Sharing case implementation with stos[wdq] below.
7623 */
7624 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7625 switch (pVCpu->iem.s.enmEffAddrMode)
7626 {
7627 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7628 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7629 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7631 }
7632}
7633
7634
7635/**
7636 * @opcode 0xad
7637 * @opfltest df
7638 */
7639FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7640{
7641 /*
7642 * Use the C implementation if a repeat prefix is encountered.
7643 */
7644 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7645 {
7646 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7648 switch (pVCpu->iem.s.enmEffOpSize)
7649 {
7650 case IEMMODE_16BIT:
7651 switch (pVCpu->iem.s.enmEffAddrMode)
7652 {
7653 case IEMMODE_16BIT:
7654 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7655 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7656 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7657 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7658 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7659 case IEMMODE_32BIT:
7660 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7661 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7662 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7663 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7664 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7665 case IEMMODE_64BIT:
7666 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7667 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7668 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7669 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7670 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7672 }
7673 break;
7674 case IEMMODE_32BIT:
7675 switch (pVCpu->iem.s.enmEffAddrMode)
7676 {
7677 case IEMMODE_16BIT:
7678 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7679 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7680 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7681 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7682 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7683 case IEMMODE_32BIT:
7684 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7685 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7686 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7687 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7688 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7689 case IEMMODE_64BIT:
7690 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7691 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7692 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7693 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7694 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7696 }
7697 case IEMMODE_64BIT:
7698 switch (pVCpu->iem.s.enmEffAddrMode)
7699 {
7700 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7701 case IEMMODE_32BIT:
7702 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7703 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7704 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7705 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7706 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7707 case IEMMODE_64BIT:
7708 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7709 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7710 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7711 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7712 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7714 }
7715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7716 }
7717 }
7718
7719 /*
7720 * Annoying double switch here.
7721 * Using ugly macro for implementing the cases, sharing it with lodsb.
7722 */
7723 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7724 switch (pVCpu->iem.s.enmEffOpSize)
7725 {
7726 case IEMMODE_16BIT:
7727 switch (pVCpu->iem.s.enmEffAddrMode)
7728 {
7729 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7730 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7731 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7733 }
7734 break;
7735
7736 case IEMMODE_32BIT:
7737 switch (pVCpu->iem.s.enmEffAddrMode)
7738 {
7739 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7740 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7741 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7743 }
7744 break;
7745
7746 case IEMMODE_64BIT:
7747 switch (pVCpu->iem.s.enmEffAddrMode)
7748 {
7749 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7750 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7751 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7753 }
7754 break;
7755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7756 }
7757}
7758
7759#undef IEM_LODS_CASE
7760
7761/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7762#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7763 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7765 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7766 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7767 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7768 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7769 \
7770 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7771 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7772 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7773 IEM_MC_REF_EFLAGS(pEFlags); \
7774 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7775 \
7776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7777 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7778 } IEM_MC_ELSE() { \
7779 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7780 } IEM_MC_ENDIF(); \
7781 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7782 IEM_MC_END();
7783
7784/**
7785 * @opcode 0xae
7786 * @opflclass arithmetic
7787 * @opfltest df
7788 */
7789FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7790{
7791 /*
7792 * Use the C implementation if a repeat prefix is encountered.
7793 */
7794 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7795 {
7796 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7798 switch (pVCpu->iem.s.enmEffAddrMode)
7799 {
7800 case IEMMODE_16BIT:
7801 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7802 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7804 iemCImpl_repe_scas_al_m16);
7805 case IEMMODE_32BIT:
7806 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7807 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7809 iemCImpl_repe_scas_al_m32);
7810 case IEMMODE_64BIT:
7811 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7814 iemCImpl_repe_scas_al_m64);
7815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7816 }
7817 }
7818 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7819 {
7820 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7822 switch (pVCpu->iem.s.enmEffAddrMode)
7823 {
7824 case IEMMODE_16BIT:
7825 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7826 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7828 iemCImpl_repne_scas_al_m16);
7829 case IEMMODE_32BIT:
7830 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7833 iemCImpl_repne_scas_al_m32);
7834 case IEMMODE_64BIT:
7835 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7838 iemCImpl_repne_scas_al_m64);
7839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7840 }
7841 }
7842
7843 /*
7844 * Sharing case implementation with stos[wdq] below.
7845 */
7846 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7847 switch (pVCpu->iem.s.enmEffAddrMode)
7848 {
7849 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7850 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7851 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7853 }
7854}
7855
7856
7857/**
7858 * @opcode 0xaf
7859 * @opflclass arithmetic
7860 * @opfltest df
7861 */
7862FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7863{
7864 /*
7865 * Use the C implementation if a repeat prefix is encountered.
7866 */
7867 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7868 {
7869 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7871 switch (pVCpu->iem.s.enmEffOpSize)
7872 {
7873 case IEMMODE_16BIT:
7874 switch (pVCpu->iem.s.enmEffAddrMode)
7875 {
7876 case IEMMODE_16BIT:
7877 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7878 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7879 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7880 iemCImpl_repe_scas_ax_m16);
7881 case IEMMODE_32BIT:
7882 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7883 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7884 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7885 iemCImpl_repe_scas_ax_m32);
7886 case IEMMODE_64BIT:
7887 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7888 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7889 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7890 iemCImpl_repe_scas_ax_m64);
7891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7892 }
7893 break;
7894 case IEMMODE_32BIT:
7895 switch (pVCpu->iem.s.enmEffAddrMode)
7896 {
7897 case IEMMODE_16BIT:
7898 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7899 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7900 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7901 iemCImpl_repe_scas_eax_m16);
7902 case IEMMODE_32BIT:
7903 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7904 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7905 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7906 iemCImpl_repe_scas_eax_m32);
7907 case IEMMODE_64BIT:
7908 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7909 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7910 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7911 iemCImpl_repe_scas_eax_m64);
7912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7913 }
7914 case IEMMODE_64BIT:
7915 switch (pVCpu->iem.s.enmEffAddrMode)
7916 {
7917 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7918 case IEMMODE_32BIT:
7919 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7920 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7921 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7922 iemCImpl_repe_scas_rax_m32);
7923 case IEMMODE_64BIT:
7924 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7925 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7926 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7927 iemCImpl_repe_scas_rax_m64);
7928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7929 }
7930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7931 }
7932 }
7933 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7934 {
7935 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7937 switch (pVCpu->iem.s.enmEffOpSize)
7938 {
7939 case IEMMODE_16BIT:
7940 switch (pVCpu->iem.s.enmEffAddrMode)
7941 {
7942 case IEMMODE_16BIT:
7943 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7944 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7945 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7946 iemCImpl_repne_scas_ax_m16);
7947 case IEMMODE_32BIT:
7948 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7949 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7950 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7951 iemCImpl_repne_scas_ax_m32);
7952 case IEMMODE_64BIT:
7953 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7954 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7955 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7956 iemCImpl_repne_scas_ax_m64);
7957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7958 }
7959 break;
7960 case IEMMODE_32BIT:
7961 switch (pVCpu->iem.s.enmEffAddrMode)
7962 {
7963 case IEMMODE_16BIT:
7964 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7965 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7966 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7967 iemCImpl_repne_scas_eax_m16);
7968 case IEMMODE_32BIT:
7969 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7970 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7971 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7972 iemCImpl_repne_scas_eax_m32);
7973 case IEMMODE_64BIT:
7974 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7975 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7976 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7977 iemCImpl_repne_scas_eax_m64);
7978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7979 }
7980 case IEMMODE_64BIT:
7981 switch (pVCpu->iem.s.enmEffAddrMode)
7982 {
7983 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7984 case IEMMODE_32BIT:
7985 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7986 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7987 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7988 iemCImpl_repne_scas_rax_m32);
7989 case IEMMODE_64BIT:
7990 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7991 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7992 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7993 iemCImpl_repne_scas_rax_m64);
7994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7995 }
7996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7997 }
7998 }
7999
8000 /*
8001 * Annoying double switch here.
8002 * Using ugly macro for implementing the cases, sharing it with scasb.
8003 */
8004 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
8005 switch (pVCpu->iem.s.enmEffOpSize)
8006 {
8007 case IEMMODE_16BIT:
8008 switch (pVCpu->iem.s.enmEffAddrMode)
8009 {
8010 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
8011 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
8012 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
8013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8014 }
8015 break;
8016
8017 case IEMMODE_32BIT:
8018 switch (pVCpu->iem.s.enmEffAddrMode)
8019 {
8020 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
8021 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
8022 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
8023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8024 }
8025 break;
8026
8027 case IEMMODE_64BIT:
8028 switch (pVCpu->iem.s.enmEffAddrMode)
8029 {
8030 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
8031 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
8032 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
8033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8034 }
8035 break;
8036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8037 }
8038}
8039
8040#undef IEM_SCAS_CASE
8041
8042/**
8043 * Common 'mov r8, imm8' helper.
8044 */
8045FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
8046{
8047 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8048 IEM_MC_BEGIN(0, 0, 0, 0);
8049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8050 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
8051 IEM_MC_ADVANCE_RIP_AND_FINISH();
8052 IEM_MC_END();
8053}
8054
8055
8056/**
8057 * @opcode 0xb0
8058 */
8059FNIEMOP_DEF(iemOp_mov_AL_Ib)
8060{
8061 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
8062 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8063}
8064
8065
8066/**
8067 * @opcode 0xb1
8068 */
8069FNIEMOP_DEF(iemOp_CL_Ib)
8070{
8071 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
8072 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8073}
8074
8075
8076/**
8077 * @opcode 0xb2
8078 */
8079FNIEMOP_DEF(iemOp_DL_Ib)
8080{
8081 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
8082 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8083}
8084
8085
8086/**
8087 * @opcode 0xb3
8088 */
8089FNIEMOP_DEF(iemOp_BL_Ib)
8090{
8091 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8092 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8093}
8094
8095
8096/**
8097 * @opcode 0xb4
8098 */
8099FNIEMOP_DEF(iemOp_mov_AH_Ib)
8100{
8101 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8102 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8103}
8104
8105
8106/**
8107 * @opcode 0xb5
8108 */
8109FNIEMOP_DEF(iemOp_CH_Ib)
8110{
8111 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8112 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8113}
8114
8115
8116/**
8117 * @opcode 0xb6
8118 */
8119FNIEMOP_DEF(iemOp_DH_Ib)
8120{
8121 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8122 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8123}
8124
8125
8126/**
8127 * @opcode 0xb7
8128 */
8129FNIEMOP_DEF(iemOp_BH_Ib)
8130{
8131 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8132 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8133}
8134
8135
8136/**
8137 * Common 'mov regX,immX' helper.
8138 */
8139FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8140{
8141 switch (pVCpu->iem.s.enmEffOpSize)
8142 {
8143 case IEMMODE_16BIT:
8144 IEM_MC_BEGIN(0, 0, 0, 0);
8145 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8147 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8148 IEM_MC_ADVANCE_RIP_AND_FINISH();
8149 IEM_MC_END();
8150 break;
8151
8152 case IEMMODE_32BIT:
8153 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8154 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8156 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8157 IEM_MC_ADVANCE_RIP_AND_FINISH();
8158 IEM_MC_END();
8159 break;
8160
8161 case IEMMODE_64BIT:
8162 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8163 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8165 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8166 IEM_MC_ADVANCE_RIP_AND_FINISH();
8167 IEM_MC_END();
8168 break;
8169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8170 }
8171}
8172
8173
8174/**
8175 * @opcode 0xb8
8176 */
8177FNIEMOP_DEF(iemOp_eAX_Iv)
8178{
8179 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8180 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8181}
8182
8183
8184/**
8185 * @opcode 0xb9
8186 */
8187FNIEMOP_DEF(iemOp_eCX_Iv)
8188{
8189 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8190 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8191}
8192
8193
8194/**
8195 * @opcode 0xba
8196 */
8197FNIEMOP_DEF(iemOp_eDX_Iv)
8198{
8199 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8200 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8201}
8202
8203
8204/**
8205 * @opcode 0xbb
8206 */
8207FNIEMOP_DEF(iemOp_eBX_Iv)
8208{
8209 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8210 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8211}
8212
8213
8214/**
8215 * @opcode 0xbc
8216 */
8217FNIEMOP_DEF(iemOp_eSP_Iv)
8218{
8219 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8220 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8221}
8222
8223
8224/**
8225 * @opcode 0xbd
8226 */
8227FNIEMOP_DEF(iemOp_eBP_Iv)
8228{
8229 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8230 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8231}
8232
8233
8234/**
8235 * @opcode 0xbe
8236 */
8237FNIEMOP_DEF(iemOp_eSI_Iv)
8238{
8239 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8240 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8241}
8242
8243
8244/**
8245 * @opcode 0xbf
8246 */
8247FNIEMOP_DEF(iemOp_eDI_Iv)
8248{
8249 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8250 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8251}
8252
8253
8254/**
8255 * @opcode 0xc0
8256 */
8257FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8258{
8259 IEMOP_HLP_MIN_186();
8260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8261
8262 /* Need to use a body macro here since the EFLAGS behaviour differs between
8263 the shifts, rotates and rotate w/ carry. Sigh. */
8264#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8265 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8266 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8267 { \
8268 /* register */ \
8269 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8270 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8272 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8273 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8274 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8275 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8276 IEM_MC_REF_EFLAGS(pEFlags); \
8277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8278 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8279 IEM_MC_END(); \
8280 } \
8281 else \
8282 { \
8283 /* memory */ \
8284 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8287 \
8288 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8290 \
8291 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8292 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8293 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8294 \
8295 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8296 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8297 IEM_MC_FETCH_EFLAGS(EFlags); \
8298 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8299 \
8300 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8301 IEM_MC_COMMIT_EFLAGS(EFlags); \
8302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8303 IEM_MC_END(); \
8304 } (void)0
8305
8306 switch (IEM_GET_MODRM_REG_8(bRm))
8307 {
8308 /**
8309 * @opdone
8310 * @opmaps grp2_c0
8311 * @opcode /0
8312 * @opflclass rotate_count
8313 */
8314 case 0:
8315 {
8316 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8318 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8319 break;
8320 }
8321 /**
8322 * @opdone
8323 * @opmaps grp2_c0
8324 * @opcode /1
8325 * @opflclass rotate_count
8326 */
8327 case 1:
8328 {
8329 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8330 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8331 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8332 break;
8333 }
8334 /**
8335 * @opdone
8336 * @opmaps grp2_c0
8337 * @opcode /2
8338 * @opflclass rotate_carry_count
8339 */
8340 case 2:
8341 {
8342 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8343 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8344 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8345 break;
8346 }
8347 /**
8348 * @opdone
8349 * @opmaps grp2_c0
8350 * @opcode /3
8351 * @opflclass rotate_carry_count
8352 */
8353 case 3:
8354 {
8355 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8356 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8357 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8358 break;
8359 }
8360 /**
8361 * @opdone
8362 * @opmaps grp2_c0
8363 * @opcode /4
8364 * @opflclass shift_count
8365 */
8366 case 4:
8367 {
8368 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8369 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8370 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8371 break;
8372 }
8373 /**
8374 * @opdone
8375 * @opmaps grp2_c0
8376 * @opcode /5
8377 * @opflclass shift_count
8378 */
8379 case 5:
8380 {
8381 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8382 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8383 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8384 break;
8385 }
8386 /**
8387 * @opdone
8388 * @opmaps grp2_c0
8389 * @opcode /7
8390 * @opflclass shift_count
8391 */
8392 case 7:
8393 {
8394 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8395 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8396 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8397 break;
8398 }
8399
8400 /** @opdone */
8401 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8402 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8403 }
8404#undef GRP2_BODY_Eb_Ib
8405}
8406
8407
8408/* Need to use a body macro here since the EFLAGS behaviour differs between
8409 the shifts, rotates and rotate w/ carry. Sigh. */
8410#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8411 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8412 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8413 { \
8414 /* register */ \
8415 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8416 switch (pVCpu->iem.s.enmEffOpSize) \
8417 { \
8418 case IEMMODE_16BIT: \
8419 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8421 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8422 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8423 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8424 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8425 IEM_MC_REF_EFLAGS(pEFlags); \
8426 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8427 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8428 IEM_MC_END(); \
8429 break; \
8430 \
8431 case IEMMODE_32BIT: \
8432 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8434 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8435 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8436 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8437 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8438 IEM_MC_REF_EFLAGS(pEFlags); \
8439 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8440 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8441 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8442 IEM_MC_END(); \
8443 break; \
8444 \
8445 case IEMMODE_64BIT: \
8446 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8448 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8449 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8450 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8451 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8452 IEM_MC_REF_EFLAGS(pEFlags); \
8453 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8454 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8455 IEM_MC_END(); \
8456 break; \
8457 \
8458 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8459 } \
8460 } \
8461 else \
8462 { \
8463 /* memory */ \
8464 switch (pVCpu->iem.s.enmEffOpSize) \
8465 { \
8466 case IEMMODE_16BIT: \
8467 IEM_MC_BEGIN(3, 3, 0, 0); \
8468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8470 \
8471 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8473 \
8474 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8475 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8476 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8477 \
8478 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8479 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8480 IEM_MC_FETCH_EFLAGS(EFlags); \
8481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8482 \
8483 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8484 IEM_MC_COMMIT_EFLAGS(EFlags); \
8485 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8486 IEM_MC_END(); \
8487 break; \
8488 \
8489 case IEMMODE_32BIT: \
8490 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8493 \
8494 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8496 \
8497 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8498 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8499 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8500 \
8501 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8502 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8503 IEM_MC_FETCH_EFLAGS(EFlags); \
8504 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8505 \
8506 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8507 IEM_MC_COMMIT_EFLAGS(EFlags); \
8508 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8509 IEM_MC_END(); \
8510 break; \
8511 \
8512 case IEMMODE_64BIT: \
8513 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8516 \
8517 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8519 \
8520 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8521 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8522 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8523 \
8524 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8525 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8526 IEM_MC_FETCH_EFLAGS(EFlags); \
8527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8528 \
8529 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8530 IEM_MC_COMMIT_EFLAGS(EFlags); \
8531 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8532 IEM_MC_END(); \
8533 break; \
8534 \
8535 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8536 } \
8537 } (void)0
8538
8539/**
8540 * @opmaps grp2_c1
8541 * @opcode /0
8542 * @opflclass rotate_count
8543 */
8544FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8545{
8546 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8547 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8548}
8549
8550
8551/**
8552 * @opmaps grp2_c1
8553 * @opcode /1
8554 * @opflclass rotate_count
8555 */
8556FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8557{
8558 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8559 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8560}
8561
8562
8563/**
8564 * @opmaps grp2_c1
8565 * @opcode /2
8566 * @opflclass rotate_carry_count
8567 */
8568FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8569{
8570 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8571 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8572}
8573
8574
8575/**
8576 * @opmaps grp2_c1
8577 * @opcode /3
8578 * @opflclass rotate_carry_count
8579 */
8580FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8581{
8582 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8583 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8584}
8585
8586
8587/**
8588 * @opmaps grp2_c1
8589 * @opcode /4
8590 * @opflclass shift_count
8591 */
8592FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8593{
8594 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8595 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8596}
8597
8598
8599/**
8600 * @opmaps grp2_c1
8601 * @opcode /5
8602 * @opflclass shift_count
8603 */
8604FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8605{
8606 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8607 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8608}
8609
8610
8611/**
8612 * @opmaps grp2_c1
8613 * @opcode /7
8614 * @opflclass shift_count
8615 */
8616FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8617{
8618 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8619 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8620}
8621
8622#undef GRP2_BODY_Ev_Ib
8623
8624/**
8625 * @opcode 0xc1
8626 */
8627FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8628{
8629 IEMOP_HLP_MIN_186();
8630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8631
8632 switch (IEM_GET_MODRM_REG_8(bRm))
8633 {
8634 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8635 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8636 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8637 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8638 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8639 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8640 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
8641 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8642 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8643 }
8644}
8645
8646
8647/**
8648 * @opcode 0xc2
8649 */
8650FNIEMOP_DEF(iemOp_retn_Iw)
8651{
8652 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8653 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8654 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8656 switch (pVCpu->iem.s.enmEffOpSize)
8657 {
8658 case IEMMODE_16BIT:
8659 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8660 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8661 case IEMMODE_32BIT:
8662 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8663 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8664 case IEMMODE_64BIT:
8665 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8666 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8668 }
8669}
8670
8671
8672/**
8673 * @opcode 0xc3
8674 */
8675FNIEMOP_DEF(iemOp_retn)
8676{
8677 IEMOP_MNEMONIC(retn, "retn");
8678 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8680 switch (pVCpu->iem.s.enmEffOpSize)
8681 {
8682 case IEMMODE_16BIT:
8683 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8684 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8685 case IEMMODE_32BIT:
8686 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8687 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8688 case IEMMODE_64BIT:
8689 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8690 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8692 }
8693}
8694
8695
8696/**
8697 * @opcode 0xc4
8698 */
8699FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8700{
8701 /* The LDS instruction is invalid 64-bit mode. In legacy and
8702 compatability mode it is invalid with MOD=3.
8703 The use as a VEX prefix is made possible by assigning the inverted
8704 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8705 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8707 if ( IEM_IS_64BIT_CODE(pVCpu)
8708 || IEM_IS_MODRM_REG_MODE(bRm) )
8709 {
8710 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8711 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8712 {
8713 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8714 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8715 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8716 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8718 if (IEM_IS_64BIT_CODE(pVCpu))
8719 {
8720#if 1
8721 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
8722 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
8723 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
8724 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
8725#else
8726 if (bVex2 & 0x80 /* VEX.W */)
8727 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8728 if (~bRm & 0x20 /* VEX.~B */)
8729 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
8730 if (~bRm & 0x40 /* VEX.~X */)
8731 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
8732 if (~bRm & 0x80 /* VEX.~R */)
8733 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
8734#endif
8735 }
8736 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8737 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8738 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8739 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8740 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8741 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8742
8743 switch (bRm & 0x1f)
8744 {
8745 case 1: /* 0x0f lead opcode byte. */
8746#ifdef IEM_WITH_VEX
8747 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8748#else
8749 IEMOP_BITCH_ABOUT_STUB();
8750 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8751#endif
8752
8753 case 2: /* 0x0f 0x38 lead opcode bytes. */
8754#ifdef IEM_WITH_VEX
8755 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8756#else
8757 IEMOP_BITCH_ABOUT_STUB();
8758 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8759#endif
8760
8761 case 3: /* 0x0f 0x3a lead opcode bytes. */
8762#ifdef IEM_WITH_VEX
8763 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8764#else
8765 IEMOP_BITCH_ABOUT_STUB();
8766 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8767#endif
8768
8769 default:
8770 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8771 IEMOP_RAISE_INVALID_OPCODE_RET();
8772 }
8773 }
8774 Log(("VEX3: VEX support disabled!\n"));
8775 IEMOP_RAISE_INVALID_OPCODE_RET();
8776 }
8777
8778 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8779 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8780}
8781
8782
8783/**
8784 * @opcode 0xc5
8785 */
8786FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8787{
8788 /* The LES instruction is invalid 64-bit mode. In legacy and
8789 compatability mode it is invalid with MOD=3.
8790 The use as a VEX prefix is made possible by assigning the inverted
8791 REX.R to the top MOD bit, and the top bit in the inverted register
8792 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8793 to accessing registers 0..7 in this VEX form. */
8794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8795 if ( IEM_IS_64BIT_CODE(pVCpu)
8796 || IEM_IS_MODRM_REG_MODE(bRm))
8797 {
8798 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8799 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8800 {
8801 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8802 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8803 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8804 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8805 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
8806 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
8807 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8808 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8809 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8810 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8811
8812#ifdef IEM_WITH_VEX
8813 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8814#else
8815 IEMOP_BITCH_ABOUT_STUB();
8816 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8817#endif
8818 }
8819
8820 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8821 Log(("VEX2: VEX support disabled!\n"));
8822 IEMOP_RAISE_INVALID_OPCODE_RET();
8823 }
8824
8825 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8826 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8827}
8828
8829
8830/**
8831 * @opcode 0xc6
8832 */
8833FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8834{
8835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8836 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8837 IEMOP_RAISE_INVALID_OPCODE_RET();
8838 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8839
8840 if (IEM_IS_MODRM_REG_MODE(bRm))
8841 {
8842 /* register access */
8843 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8844 IEM_MC_BEGIN(0, 0, 0, 0);
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8847 IEM_MC_ADVANCE_RIP_AND_FINISH();
8848 IEM_MC_END();
8849 }
8850 else
8851 {
8852 /* memory access. */
8853 IEM_MC_BEGIN(0, 1, 0, 0);
8854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8856 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8858 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8859 IEM_MC_ADVANCE_RIP_AND_FINISH();
8860 IEM_MC_END();
8861 }
8862}
8863
8864
8865/**
8866 * @opcode 0xc7
8867 */
8868FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8869{
8870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8871 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8872 IEMOP_RAISE_INVALID_OPCODE_RET();
8873 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8874
8875 if (IEM_IS_MODRM_REG_MODE(bRm))
8876 {
8877 /* register access */
8878 switch (pVCpu->iem.s.enmEffOpSize)
8879 {
8880 case IEMMODE_16BIT:
8881 IEM_MC_BEGIN(0, 0, 0, 0);
8882 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8884 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8885 IEM_MC_ADVANCE_RIP_AND_FINISH();
8886 IEM_MC_END();
8887 break;
8888
8889 case IEMMODE_32BIT:
8890 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8891 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8893 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8894 IEM_MC_ADVANCE_RIP_AND_FINISH();
8895 IEM_MC_END();
8896 break;
8897
8898 case IEMMODE_64BIT:
8899 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8900 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8902 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8903 IEM_MC_ADVANCE_RIP_AND_FINISH();
8904 IEM_MC_END();
8905 break;
8906
8907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8908 }
8909 }
8910 else
8911 {
8912 /* memory access. */
8913 switch (pVCpu->iem.s.enmEffOpSize)
8914 {
8915 case IEMMODE_16BIT:
8916 IEM_MC_BEGIN(0, 1, 0, 0);
8917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8919 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8922 IEM_MC_ADVANCE_RIP_AND_FINISH();
8923 IEM_MC_END();
8924 break;
8925
8926 case IEMMODE_32BIT:
8927 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8930 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8933 IEM_MC_ADVANCE_RIP_AND_FINISH();
8934 IEM_MC_END();
8935 break;
8936
8937 case IEMMODE_64BIT:
8938 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8941 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8943 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8944 IEM_MC_ADVANCE_RIP_AND_FINISH();
8945 IEM_MC_END();
8946 break;
8947
8948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8949 }
8950 }
8951}
8952
8953
8954
8955
8956/**
8957 * @opcode 0xc8
8958 */
8959FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8960{
8961 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8962 IEMOP_HLP_MIN_186();
8963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8964 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8965 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8967 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8968 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8969 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8970 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8971}
8972
8973
8974/**
8975 * @opcode 0xc9
8976 */
8977FNIEMOP_DEF(iemOp_leave)
8978{
8979 IEMOP_MNEMONIC(leave, "leave");
8980 IEMOP_HLP_MIN_186();
8981 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8983 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8984 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8986 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8987}
8988
8989
8990/**
8991 * @opcode 0xca
8992 */
8993FNIEMOP_DEF(iemOp_retf_Iw)
8994{
8995 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8996 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8998 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8999 | IEM_CIMPL_F_MODE,
9000 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9001 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9002 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9003 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9004 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9005 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9006 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9007 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9008 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9009 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9010 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9011 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9012 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9013 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9014 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9015 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9016 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9017 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
9018}
9019
9020
9021/**
9022 * @opcode 0xcb
9023 */
9024FNIEMOP_DEF(iemOp_retf)
9025{
9026 IEMOP_MNEMONIC(retf, "retf");
9027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9028 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
9029 | IEM_CIMPL_F_MODE,
9030 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9031 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9032 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9033 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9034 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9035 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9036 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9037 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9038 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9039 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9040 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9041 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9042 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9043 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9044 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9045 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9046 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9047 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
9048}
9049
9050
9051/**
9052 * @opcode 0xcc
9053 */
9054FNIEMOP_DEF(iemOp_int3)
9055{
9056 IEMOP_MNEMONIC(int3, "int3");
9057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9058 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9059 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
9060 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
9061}
9062
9063
9064/**
9065 * @opcode 0xcd
9066 */
9067FNIEMOP_DEF(iemOp_int_Ib)
9068{
9069 IEMOP_MNEMONIC(int_Ib, "int Ib");
9070 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
9071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9072 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9073 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
9074 iemCImpl_int, u8Int, IEMINT_INTN);
9075 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9076}
9077
9078
9079/**
9080 * @opcode 0xce
9081 */
9082FNIEMOP_DEF(iemOp_into)
9083{
9084 IEMOP_MNEMONIC(into, "into");
9085 IEMOP_HLP_NO_64BIT();
9086 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9087 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
9088 UINT64_MAX,
9089 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
9090 /** @todo make task-switches, ring-switches, ++ return non-zero status */
9091}
9092
9093
9094/**
9095 * @opcode 0xcf
9096 */
9097FNIEMOP_DEF(iemOp_iret)
9098{
9099 IEMOP_MNEMONIC(iret, "iret");
9100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9101 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9102 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9103 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9104 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9105 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9106 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9107 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9108 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9109 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9110 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9111 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9112 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9113 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9114 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9115 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9116 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9117 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9118 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9119 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9120 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9121 /* Segment registers are sanitized when returning to an outer ring, or fully
9122 reloaded when returning to v86 mode. Thus the large flush list above. */
9123}
9124
9125
9126/**
9127 * @opcode 0xd0
9128 */
9129FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9130{
9131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9132
9133 /* Need to use a body macro here since the EFLAGS behaviour differs between
9134 the shifts, rotates and rotate w/ carry. Sigh. */
9135#define GRP2_BODY_Eb_1(a_pImplExpr) \
9136 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9137 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9138 { \
9139 /* register */ \
9140 IEM_MC_BEGIN(3, 0, 0, 0); \
9141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9142 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9143 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9144 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9145 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9146 IEM_MC_REF_EFLAGS(pEFlags); \
9147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9148 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9149 IEM_MC_END(); \
9150 } \
9151 else \
9152 { \
9153 /* memory */ \
9154 IEM_MC_BEGIN(3, 3, 0, 0); \
9155 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9156 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9157 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9159 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9160 \
9161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9163 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9164 IEM_MC_FETCH_EFLAGS(EFlags); \
9165 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9166 \
9167 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9168 IEM_MC_COMMIT_EFLAGS(EFlags); \
9169 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9170 IEM_MC_END(); \
9171 } (void)0
9172
9173 switch (IEM_GET_MODRM_REG_8(bRm))
9174 {
9175 /**
9176 * @opdone
9177 * @opmaps grp2_d0
9178 * @opcode /0
9179 * @opflclass rotate_1
9180 */
9181 case 0:
9182 {
9183 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9184 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9185 break;
9186 }
9187 /**
9188 * @opdone
9189 * @opmaps grp2_d0
9190 * @opcode /1
9191 * @opflclass rotate_1
9192 */
9193 case 1:
9194 {
9195 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9196 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9197 break;
9198 }
9199 /**
9200 * @opdone
9201 * @opmaps grp2_d0
9202 * @opcode /2
9203 * @opflclass rotate_carry_1
9204 */
9205 case 2:
9206 {
9207 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9208 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9209 break;
9210 }
9211 /**
9212 * @opdone
9213 * @opmaps grp2_d0
9214 * @opcode /3
9215 * @opflclass rotate_carry_1
9216 */
9217 case 3:
9218 {
9219 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9220 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9221 break;
9222 }
9223 /**
9224 * @opdone
9225 * @opmaps grp2_d0
9226 * @opcode /4
9227 * @opflclass shift_1
9228 */
9229 case 4:
9230 {
9231 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9232 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9233 break;
9234 }
9235 /**
9236 * @opdone
9237 * @opmaps grp2_d0
9238 * @opcode /5
9239 * @opflclass shift_1
9240 */
9241 case 5:
9242 {
9243 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9244 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9245 break;
9246 }
9247 /**
9248 * @opdone
9249 * @opmaps grp2_d0
9250 * @opcode /7
9251 * @opflclass shift_1
9252 */
9253 case 7:
9254 {
9255 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9256 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9257 break;
9258 }
9259 /** @opdone */
9260 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9261 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9262 }
9263#undef GRP2_BODY_Eb_1
9264}
9265
9266
9267/* Need to use a body macro here since the EFLAGS behaviour differs between
9268 the shifts, rotates and rotate w/ carry. Sigh. */
9269#define GRP2_BODY_Ev_1(a_pImplExpr) \
9270 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9271 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9272 { \
9273 /* register */ \
9274 switch (pVCpu->iem.s.enmEffOpSize) \
9275 { \
9276 case IEMMODE_16BIT: \
9277 IEM_MC_BEGIN(3, 0, 0, 0); \
9278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9279 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9280 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9281 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9282 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9283 IEM_MC_REF_EFLAGS(pEFlags); \
9284 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9286 IEM_MC_END(); \
9287 break; \
9288 \
9289 case IEMMODE_32BIT: \
9290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9293 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9295 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9296 IEM_MC_REF_EFLAGS(pEFlags); \
9297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9298 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9299 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9300 IEM_MC_END(); \
9301 break; \
9302 \
9303 case IEMMODE_64BIT: \
9304 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9306 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9307 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9308 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9309 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9310 IEM_MC_REF_EFLAGS(pEFlags); \
9311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9313 IEM_MC_END(); \
9314 break; \
9315 \
9316 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9317 } \
9318 } \
9319 else \
9320 { \
9321 /* memory */ \
9322 switch (pVCpu->iem.s.enmEffOpSize) \
9323 { \
9324 case IEMMODE_16BIT: \
9325 IEM_MC_BEGIN(3, 3, 0, 0); \
9326 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9327 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9328 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9330 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9331 \
9332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9334 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9335 IEM_MC_FETCH_EFLAGS(EFlags); \
9336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9337 \
9338 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9339 IEM_MC_COMMIT_EFLAGS(EFlags); \
9340 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9341 IEM_MC_END(); \
9342 break; \
9343 \
9344 case IEMMODE_32BIT: \
9345 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9346 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9347 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9348 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9350 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9351 \
9352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9354 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9355 IEM_MC_FETCH_EFLAGS(EFlags); \
9356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9357 \
9358 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9359 IEM_MC_COMMIT_EFLAGS(EFlags); \
9360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9361 IEM_MC_END(); \
9362 break; \
9363 \
9364 case IEMMODE_64BIT: \
9365 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9366 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9367 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9368 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9370 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9371 \
9372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9374 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9375 IEM_MC_FETCH_EFLAGS(EFlags); \
9376 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9377 \
9378 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9379 IEM_MC_COMMIT_EFLAGS(EFlags); \
9380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9381 IEM_MC_END(); \
9382 break; \
9383 \
9384 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9385 } \
9386 } (void)0
9387
9388/**
9389 * @opmaps grp2_d1
9390 * @opcode /0
9391 * @opflclass rotate_1
9392 */
9393FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9394{
9395 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9396 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9397}
9398
9399
9400/**
9401 * @opmaps grp2_d1
9402 * @opcode /1
9403 * @opflclass rotate_1
9404 */
9405FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9406{
9407 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9408 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9409}
9410
9411
9412/**
9413 * @opmaps grp2_d1
9414 * @opcode /2
9415 * @opflclass rotate_carry_1
9416 */
9417FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9418{
9419 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9420 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9421}
9422
9423
9424/**
9425 * @opmaps grp2_d1
9426 * @opcode /3
9427 * @opflclass rotate_carry_1
9428 */
9429FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9430{
9431 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9432 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9433}
9434
9435
9436/**
9437 * @opmaps grp2_d1
9438 * @opcode /4
9439 * @opflclass shift_1
9440 */
9441FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9442{
9443 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9444 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9445}
9446
9447
9448/**
9449 * @opmaps grp2_d1
9450 * @opcode /5
9451 * @opflclass shift_1
9452 */
9453FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9454{
9455 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9456 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9457}
9458
9459
9460/**
9461 * @opmaps grp2_d1
9462 * @opcode /7
9463 * @opflclass shift_1
9464 */
9465FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9466{
9467 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9468 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9469}
9470
9471#undef GRP2_BODY_Ev_1
9472
9473/**
9474 * @opcode 0xd1
9475 */
9476FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9477{
9478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9479 switch (IEM_GET_MODRM_REG_8(bRm))
9480 {
9481 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9482 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9483 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9484 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9485 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9486 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9487 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9488 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9489 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9490 }
9491}
9492
9493
9494/**
9495 * @opcode 0xd2
9496 */
9497FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9498{
9499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9500
9501 /* Need to use a body macro here since the EFLAGS behaviour differs between
9502 the shifts, rotates and rotate w/ carry. Sigh. */
9503#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9504 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9505 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9506 { \
9507 /* register */ \
9508 IEM_MC_BEGIN(3, 0, 0, 0); \
9509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9510 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9511 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9512 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9513 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9514 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9515 IEM_MC_REF_EFLAGS(pEFlags); \
9516 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9517 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9518 IEM_MC_END(); \
9519 } \
9520 else \
9521 { \
9522 /* memory */ \
9523 IEM_MC_BEGIN(3, 3, 0, 0); \
9524 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9525 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9526 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9528 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9529 \
9530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9532 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9533 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9534 IEM_MC_FETCH_EFLAGS(EFlags); \
9535 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9536 \
9537 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9538 IEM_MC_COMMIT_EFLAGS(EFlags); \
9539 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9540 IEM_MC_END(); \
9541 } (void)0
9542
9543 switch (IEM_GET_MODRM_REG_8(bRm))
9544 {
9545 /**
9546 * @opdone
9547 * @opmaps grp2_d0
9548 * @opcode /0
9549 * @opflclass rotate_count
9550 */
9551 case 0:
9552 {
9553 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9554 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9555 break;
9556 }
9557 /**
9558 * @opdone
9559 * @opmaps grp2_d0
9560 * @opcode /1
9561 * @opflclass rotate_count
9562 */
9563 case 1:
9564 {
9565 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9566 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9567 break;
9568 }
9569 /**
9570 * @opdone
9571 * @opmaps grp2_d0
9572 * @opcode /2
9573 * @opflclass rotate_carry_count
9574 */
9575 case 2:
9576 {
9577 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9578 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9579 break;
9580 }
9581 /**
9582 * @opdone
9583 * @opmaps grp2_d0
9584 * @opcode /3
9585 * @opflclass rotate_carry_count
9586 */
9587 case 3:
9588 {
9589 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9590 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9591 break;
9592 }
9593 /**
9594 * @opdone
9595 * @opmaps grp2_d0
9596 * @opcode /4
9597 * @opflclass shift_count
9598 */
9599 case 4:
9600 {
9601 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9602 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9603 break;
9604 }
9605 /**
9606 * @opdone
9607 * @opmaps grp2_d0
9608 * @opcode /5
9609 * @opflclass shift_count
9610 */
9611 case 5:
9612 {
9613 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9614 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9615 break;
9616 }
9617 /**
9618 * @opdone
9619 * @opmaps grp2_d0
9620 * @opcode /7
9621 * @opflclass shift_count
9622 */
9623 case 7:
9624 {
9625 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9626 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9627 break;
9628 }
9629 /** @opdone */
9630 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9631 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9632 }
9633#undef GRP2_BODY_Eb_CL
9634}
9635
9636
9637/* Need to use a body macro here since the EFLAGS behaviour differs between
9638 the shifts, rotates and rotate w/ carry. Sigh. */
9639#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9640 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9641 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9642 { \
9643 /* register */ \
9644 switch (pVCpu->iem.s.enmEffOpSize) \
9645 { \
9646 case IEMMODE_16BIT: \
9647 IEM_MC_BEGIN(3, 0, 0, 0); \
9648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9649 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9650 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9651 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9652 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9653 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9654 IEM_MC_REF_EFLAGS(pEFlags); \
9655 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9656 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9657 IEM_MC_END(); \
9658 break; \
9659 \
9660 case IEMMODE_32BIT: \
9661 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9663 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9664 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9665 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9666 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9667 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9668 IEM_MC_REF_EFLAGS(pEFlags); \
9669 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9670 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9671 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9672 IEM_MC_END(); \
9673 break; \
9674 \
9675 case IEMMODE_64BIT: \
9676 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9678 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9679 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9680 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9681 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9682 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9683 IEM_MC_REF_EFLAGS(pEFlags); \
9684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9685 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9686 IEM_MC_END(); \
9687 break; \
9688 \
9689 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9690 } \
9691 } \
9692 else \
9693 { \
9694 /* memory */ \
9695 switch (pVCpu->iem.s.enmEffOpSize) \
9696 { \
9697 case IEMMODE_16BIT: \
9698 IEM_MC_BEGIN(3, 3, 0, 0); \
9699 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9700 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9701 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9703 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9704 \
9705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9707 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9708 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9709 IEM_MC_FETCH_EFLAGS(EFlags); \
9710 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9711 \
9712 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9713 IEM_MC_COMMIT_EFLAGS(EFlags); \
9714 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9715 IEM_MC_END(); \
9716 break; \
9717 \
9718 case IEMMODE_32BIT: \
9719 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9720 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9721 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9722 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9724 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9725 \
9726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9728 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9729 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9730 IEM_MC_FETCH_EFLAGS(EFlags); \
9731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9732 \
9733 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9734 IEM_MC_COMMIT_EFLAGS(EFlags); \
9735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9736 IEM_MC_END(); \
9737 break; \
9738 \
9739 case IEMMODE_64BIT: \
9740 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9741 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9742 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9743 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9745 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9746 \
9747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9749 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9750 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9751 IEM_MC_FETCH_EFLAGS(EFlags); \
9752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9753 \
9754 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9755 IEM_MC_COMMIT_EFLAGS(EFlags); \
9756 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9757 IEM_MC_END(); \
9758 break; \
9759 \
9760 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9761 } \
9762 } (void)0
9763
9764
9765/**
9766 * @opmaps grp2_d0
9767 * @opcode /0
9768 * @opflclass rotate_count
9769 */
9770FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
9771{
9772 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9773 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9774}
9775
9776
9777/**
9778 * @opmaps grp2_d0
9779 * @opcode /1
9780 * @opflclass rotate_count
9781 */
9782FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
9783{
9784 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9785 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9786}
9787
9788
9789/**
9790 * @opmaps grp2_d0
9791 * @opcode /2
9792 * @opflclass rotate_carry_count
9793 */
9794FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
9795{
9796 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9797 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9798}
9799
9800
9801/**
9802 * @opmaps grp2_d0
9803 * @opcode /3
9804 * @opflclass rotate_carry_count
9805 */
9806FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
9807{
9808 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9809 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9810}
9811
9812
9813/**
9814 * @opmaps grp2_d0
9815 * @opcode /4
9816 * @opflclass shift_count
9817 */
9818FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
9819{
9820 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9821 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9822}
9823
9824
9825/**
9826 * @opmaps grp2_d0
9827 * @opcode /5
9828 * @opflclass shift_count
9829 */
9830FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
9831{
9832 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9833 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9834}
9835
9836
9837/**
9838 * @opmaps grp2_d0
9839 * @opcode /7
9840 * @opflclass shift_count
9841 */
9842FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
9843{
9844 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9845 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9846}
9847
9848#undef GRP2_BODY_Ev_CL
9849
9850/**
9851 * @opcode 0xd3
9852 */
9853FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9854{
9855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9856 switch (IEM_GET_MODRM_REG_8(bRm))
9857 {
9858 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
9859 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
9860 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
9861 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
9862 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
9863 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
9864 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
9865 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9866 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9867 }
9868}
9869
9870
9871/**
9872 * @opcode 0xd4
9873 * @opflmodify cf,pf,af,zf,sf,of
9874 * @opflundef cf,af,of
9875 */
9876FNIEMOP_DEF(iemOp_aam_Ib)
9877{
9878/** @todo testcase: aam */
9879 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9880 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9882 IEMOP_HLP_NO_64BIT();
9883 if (!bImm)
9884 IEMOP_RAISE_DIVIDE_ERROR_RET();
9885 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9886}
9887
9888
9889/**
9890 * @opcode 0xd5
9891 * @opflmodify cf,pf,af,zf,sf,of
9892 * @opflundef cf,af,of
9893 */
9894FNIEMOP_DEF(iemOp_aad_Ib)
9895{
9896/** @todo testcase: aad? */
9897 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9898 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9900 IEMOP_HLP_NO_64BIT();
9901 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9902}
9903
9904
9905/**
9906 * @opcode 0xd6
9907 */
9908FNIEMOP_DEF(iemOp_salc)
9909{
9910 IEMOP_MNEMONIC(salc, "salc");
9911 IEMOP_HLP_NO_64BIT();
9912
9913 IEM_MC_BEGIN(0, 0, 0, 0);
9914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9916 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9917 } IEM_MC_ELSE() {
9918 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9919 } IEM_MC_ENDIF();
9920 IEM_MC_ADVANCE_RIP_AND_FINISH();
9921 IEM_MC_END();
9922}
9923
9924
9925/**
9926 * @opcode 0xd7
9927 */
9928FNIEMOP_DEF(iemOp_xlat)
9929{
9930 IEMOP_MNEMONIC(xlat, "xlat");
9931 switch (pVCpu->iem.s.enmEffAddrMode)
9932 {
9933 case IEMMODE_16BIT:
9934 IEM_MC_BEGIN(2, 0, 0, 0);
9935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9936 IEM_MC_LOCAL(uint8_t, u8Tmp);
9937 IEM_MC_LOCAL(uint16_t, u16Addr);
9938 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9939 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9940 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9941 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9942 IEM_MC_ADVANCE_RIP_AND_FINISH();
9943 IEM_MC_END();
9944 break;
9945
9946 case IEMMODE_32BIT:
9947 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9949 IEM_MC_LOCAL(uint8_t, u8Tmp);
9950 IEM_MC_LOCAL(uint32_t, u32Addr);
9951 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9952 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9953 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9954 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9955 IEM_MC_ADVANCE_RIP_AND_FINISH();
9956 IEM_MC_END();
9957 break;
9958
9959 case IEMMODE_64BIT:
9960 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9962 IEM_MC_LOCAL(uint8_t, u8Tmp);
9963 IEM_MC_LOCAL(uint64_t, u64Addr);
9964 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9965 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9966 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9967 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9968 IEM_MC_ADVANCE_RIP_AND_FINISH();
9969 IEM_MC_END();
9970 break;
9971
9972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9973 }
9974}
9975
9976
9977/**
9978 * Common worker for FPU instructions working on ST0 and STn, and storing the
9979 * result in ST0.
9980 *
9981 * @param bRm Mod R/M byte.
9982 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9983 */
9984FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9985{
9986 IEM_MC_BEGIN(3, 1, 0, 0);
9987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9988 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9989 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9990 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9991 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9992
9993 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9994 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9995 IEM_MC_PREPARE_FPU_USAGE();
9996 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9997 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9998 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9999 } IEM_MC_ELSE() {
10000 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10001 } IEM_MC_ENDIF();
10002 IEM_MC_ADVANCE_RIP_AND_FINISH();
10003
10004 IEM_MC_END();
10005}
10006
10007
10008/**
10009 * Common worker for FPU instructions working on ST0 and STn, and only affecting
10010 * flags.
10011 *
10012 * @param bRm Mod R/M byte.
10013 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10014 */
10015FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10016{
10017 IEM_MC_BEGIN(3, 1, 0, 0);
10018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10019 IEM_MC_LOCAL(uint16_t, u16Fsw);
10020 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10021 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10022 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10023
10024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10026 IEM_MC_PREPARE_FPU_USAGE();
10027 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10028 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10029 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10030 } IEM_MC_ELSE() {
10031 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10032 } IEM_MC_ENDIF();
10033 IEM_MC_ADVANCE_RIP_AND_FINISH();
10034
10035 IEM_MC_END();
10036}
10037
10038
10039/**
10040 * Common worker for FPU instructions working on ST0 and STn, only affecting
10041 * flags, and popping when done.
10042 *
10043 * @param bRm Mod R/M byte.
10044 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10045 */
10046FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10047{
10048 IEM_MC_BEGIN(3, 1, 0, 0);
10049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10050 IEM_MC_LOCAL(uint16_t, u16Fsw);
10051 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10052 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10053 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10054
10055 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10056 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10057 IEM_MC_PREPARE_FPU_USAGE();
10058 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10059 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10060 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10061 } IEM_MC_ELSE() {
10062 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10063 } IEM_MC_ENDIF();
10064 IEM_MC_ADVANCE_RIP_AND_FINISH();
10065
10066 IEM_MC_END();
10067}
10068
10069
10070/** Opcode 0xd8 11/0. */
10071FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
10072{
10073 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
10074 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
10075}
10076
10077
10078/** Opcode 0xd8 11/1. */
10079FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
10080{
10081 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
10082 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
10083}
10084
10085
10086/** Opcode 0xd8 11/2. */
10087FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
10088{
10089 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
10090 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
10091}
10092
10093
10094/** Opcode 0xd8 11/3. */
10095FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10096{
10097 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10098 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10099}
10100
10101
10102/** Opcode 0xd8 11/4. */
10103FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10104{
10105 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10106 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10107}
10108
10109
10110/** Opcode 0xd8 11/5. */
10111FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10112{
10113 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10114 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10115}
10116
10117
10118/** Opcode 0xd8 11/6. */
10119FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10120{
10121 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10122 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10123}
10124
10125
10126/** Opcode 0xd8 11/7. */
10127FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10128{
10129 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10130 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10131}
10132
10133
10134/**
10135 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10136 * the result in ST0.
10137 *
10138 * @param bRm Mod R/M byte.
10139 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10140 */
10141FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10142{
10143 IEM_MC_BEGIN(3, 3, 0, 0);
10144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10145 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10146 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10147 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10149 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10150
10151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10153
10154 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10155 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10156 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10157
10158 IEM_MC_PREPARE_FPU_USAGE();
10159 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10160 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10161 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10162 } IEM_MC_ELSE() {
10163 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10164 } IEM_MC_ENDIF();
10165 IEM_MC_ADVANCE_RIP_AND_FINISH();
10166
10167 IEM_MC_END();
10168}
10169
10170
10171/** Opcode 0xd8 !11/0. */
10172FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10173{
10174 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10175 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10176}
10177
10178
10179/** Opcode 0xd8 !11/1. */
10180FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10181{
10182 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10183 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10184}
10185
10186
10187/** Opcode 0xd8 !11/2. */
10188FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10189{
10190 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10191
10192 IEM_MC_BEGIN(3, 3, 0, 0);
10193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10194 IEM_MC_LOCAL(uint16_t, u16Fsw);
10195 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10196 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10197 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10199
10200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10202
10203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10204 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10205 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10206
10207 IEM_MC_PREPARE_FPU_USAGE();
10208 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10209 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10210 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10211 } IEM_MC_ELSE() {
10212 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10213 } IEM_MC_ENDIF();
10214 IEM_MC_ADVANCE_RIP_AND_FINISH();
10215
10216 IEM_MC_END();
10217}
10218
10219
10220/** Opcode 0xd8 !11/3. */
10221FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10222{
10223 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10224
10225 IEM_MC_BEGIN(3, 3, 0, 0);
10226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10227 IEM_MC_LOCAL(uint16_t, u16Fsw);
10228 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10229 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10230 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10231 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10232
10233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10235
10236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10237 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10238 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10239
10240 IEM_MC_PREPARE_FPU_USAGE();
10241 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10242 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10243 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10244 } IEM_MC_ELSE() {
10245 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10246 } IEM_MC_ENDIF();
10247 IEM_MC_ADVANCE_RIP_AND_FINISH();
10248
10249 IEM_MC_END();
10250}
10251
10252
10253/** Opcode 0xd8 !11/4. */
10254FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10255{
10256 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10257 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10258}
10259
10260
10261/** Opcode 0xd8 !11/5. */
10262FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10263{
10264 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10265 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10266}
10267
10268
10269/** Opcode 0xd8 !11/6. */
10270FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10271{
10272 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10273 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10274}
10275
10276
10277/** Opcode 0xd8 !11/7. */
10278FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10279{
10280 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10281 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10282}
10283
10284
10285/**
10286 * @opcode 0xd8
10287 */
10288FNIEMOP_DEF(iemOp_EscF0)
10289{
10290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10291 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10292
10293 if (IEM_IS_MODRM_REG_MODE(bRm))
10294 {
10295 switch (IEM_GET_MODRM_REG_8(bRm))
10296 {
10297 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10298 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10299 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10300 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10301 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10302 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10303 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10304 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10306 }
10307 }
10308 else
10309 {
10310 switch (IEM_GET_MODRM_REG_8(bRm))
10311 {
10312 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10313 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10314 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10315 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10316 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10317 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10318 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10319 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10321 }
10322 }
10323}
10324
10325
10326/** Opcode 0xd9 /0 mem32real
10327 * @sa iemOp_fld_m64r */
10328FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10329{
10330 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10331
10332 IEM_MC_BEGIN(2, 3, 0, 0);
10333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10334 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10335 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10336 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10337 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10338
10339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10341
10342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10344 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10345 IEM_MC_PREPARE_FPU_USAGE();
10346 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10347 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10348 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10349 } IEM_MC_ELSE() {
10350 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10351 } IEM_MC_ENDIF();
10352 IEM_MC_ADVANCE_RIP_AND_FINISH();
10353
10354 IEM_MC_END();
10355}
10356
10357
10358/** Opcode 0xd9 !11/2 mem32real */
10359FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10360{
10361 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10362 IEM_MC_BEGIN(3, 3, 0, 0);
10363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10365
10366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10367 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10368 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10369 IEM_MC_PREPARE_FPU_USAGE();
10370
10371 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10372 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10373 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10374
10375 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10377 IEM_MC_LOCAL(uint16_t, u16Fsw);
10378 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10379 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10380 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10381 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10382 } IEM_MC_ELSE() {
10383 IEM_MC_IF_FCW_IM() {
10384 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10385 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10386 } IEM_MC_ELSE() {
10387 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10388 } IEM_MC_ENDIF();
10389 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10390 } IEM_MC_ENDIF();
10391 IEM_MC_ADVANCE_RIP_AND_FINISH();
10392
10393 IEM_MC_END();
10394}
10395
10396
10397/** Opcode 0xd9 !11/3 */
10398FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10399{
10400 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10401 IEM_MC_BEGIN(3, 3, 0, 0);
10402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10404
10405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10407 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10408 IEM_MC_PREPARE_FPU_USAGE();
10409
10410 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10411 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10412 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10413
10414 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10415 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10416 IEM_MC_LOCAL(uint16_t, u16Fsw);
10417 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10418 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10419 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10420 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10421 } IEM_MC_ELSE() {
10422 IEM_MC_IF_FCW_IM() {
10423 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10424 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10425 } IEM_MC_ELSE() {
10426 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10427 } IEM_MC_ENDIF();
10428 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10429 } IEM_MC_ENDIF();
10430 IEM_MC_ADVANCE_RIP_AND_FINISH();
10431
10432 IEM_MC_END();
10433}
10434
10435
10436/** Opcode 0xd9 !11/4 */
10437FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10438{
10439 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10440 IEM_MC_BEGIN(3, 0, 0, 0);
10441 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10443
10444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10446 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10447
10448 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10449 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10450 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10451 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10452 IEM_MC_END();
10453}
10454
10455
10456/** Opcode 0xd9 !11/5 */
10457FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10458{
10459 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10460 IEM_MC_BEGIN(1, 1, 0, 0);
10461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10463
10464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10467
10468 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10469 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10470
10471 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10472 iemCImpl_fldcw, u16Fsw);
10473 IEM_MC_END();
10474}
10475
10476
10477/** Opcode 0xd9 !11/6 */
10478FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10479{
10480 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10481 IEM_MC_BEGIN(3, 0, 0, 0);
10482 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10484
10485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10487 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10488
10489 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10490 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10491 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10492 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10493 IEM_MC_END();
10494}
10495
10496
10497/** Opcode 0xd9 !11/7 */
10498FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10499{
10500 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10501 IEM_MC_BEGIN(2, 0, 0, 0);
10502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10503 IEM_MC_LOCAL(uint16_t, u16Fcw);
10504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10507 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10508 IEM_MC_FETCH_FCW(u16Fcw);
10509 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10510 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10511 IEM_MC_END();
10512}
10513
10514
10515/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10516FNIEMOP_DEF(iemOp_fnop)
10517{
10518 IEMOP_MNEMONIC(fnop, "fnop");
10519 IEM_MC_BEGIN(0, 0, 0, 0);
10520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10522 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10523 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10524 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10525 * intel optimizations. Investigate. */
10526 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10527 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10528 IEM_MC_END();
10529}
10530
10531
10532/** Opcode 0xd9 11/0 stN */
10533FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10534{
10535 IEMOP_MNEMONIC(fld_stN, "fld stN");
10536 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10537 * indicates that it does. */
10538 IEM_MC_BEGIN(0, 2, 0, 0);
10539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10540 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10541 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10542 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10543 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10544
10545 IEM_MC_PREPARE_FPU_USAGE();
10546 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10547 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10548 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10549 } IEM_MC_ELSE() {
10550 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10551 } IEM_MC_ENDIF();
10552
10553 IEM_MC_ADVANCE_RIP_AND_FINISH();
10554 IEM_MC_END();
10555}
10556
10557
10558/** Opcode 0xd9 11/3 stN */
10559FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10560{
10561 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10562 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10563 * indicates that it does. */
10564 IEM_MC_BEGIN(2, 3, 0, 0);
10565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10566 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10567 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10568 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10569 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10570 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10571 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10572 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10573
10574 IEM_MC_PREPARE_FPU_USAGE();
10575 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10576 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10577 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10578 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10579 } IEM_MC_ELSE() {
10580 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10581 } IEM_MC_ENDIF();
10582
10583 IEM_MC_ADVANCE_RIP_AND_FINISH();
10584 IEM_MC_END();
10585}
10586
10587
10588/** Opcode 0xd9 11/4, 0xdd 11/2. */
10589FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10590{
10591 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10592
10593 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10594 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10595 if (!iDstReg)
10596 {
10597 IEM_MC_BEGIN(0, 1, 0, 0);
10598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10599 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10600 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10601 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10602
10603 IEM_MC_PREPARE_FPU_USAGE();
10604 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10605 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10606 } IEM_MC_ELSE() {
10607 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10608 } IEM_MC_ENDIF();
10609
10610 IEM_MC_ADVANCE_RIP_AND_FINISH();
10611 IEM_MC_END();
10612 }
10613 else
10614 {
10615 IEM_MC_BEGIN(0, 2, 0, 0);
10616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10617 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10618 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10619 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10620 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10621
10622 IEM_MC_PREPARE_FPU_USAGE();
10623 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10624 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10625 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10626 } IEM_MC_ELSE() {
10627 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10628 } IEM_MC_ENDIF();
10629
10630 IEM_MC_ADVANCE_RIP_AND_FINISH();
10631 IEM_MC_END();
10632 }
10633}
10634
10635
10636/**
10637 * Common worker for FPU instructions working on ST0 and replaces it with the
10638 * result, i.e. unary operators.
10639 *
10640 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10641 */
10642FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10643{
10644 IEM_MC_BEGIN(2, 1, 0, 0);
10645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10646 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10647 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10648 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10649
10650 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10651 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10652 IEM_MC_PREPARE_FPU_USAGE();
10653 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10654 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10655 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10656 } IEM_MC_ELSE() {
10657 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10658 } IEM_MC_ENDIF();
10659 IEM_MC_ADVANCE_RIP_AND_FINISH();
10660
10661 IEM_MC_END();
10662}
10663
10664
10665/** Opcode 0xd9 0xe0. */
10666FNIEMOP_DEF(iemOp_fchs)
10667{
10668 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10669 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10670}
10671
10672
10673/** Opcode 0xd9 0xe1. */
10674FNIEMOP_DEF(iemOp_fabs)
10675{
10676 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10677 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10678}
10679
10680
10681/** Opcode 0xd9 0xe4. */
10682FNIEMOP_DEF(iemOp_ftst)
10683{
10684 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10685 IEM_MC_BEGIN(2, 1, 0, 0);
10686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10687 IEM_MC_LOCAL(uint16_t, u16Fsw);
10688 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10689 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10690
10691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10692 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10693 IEM_MC_PREPARE_FPU_USAGE();
10694 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10695 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10696 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10697 } IEM_MC_ELSE() {
10698 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10699 } IEM_MC_ENDIF();
10700 IEM_MC_ADVANCE_RIP_AND_FINISH();
10701
10702 IEM_MC_END();
10703}
10704
10705
10706/** Opcode 0xd9 0xe5. */
10707FNIEMOP_DEF(iemOp_fxam)
10708{
10709 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10710 IEM_MC_BEGIN(2, 1, 0, 0);
10711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10712 IEM_MC_LOCAL(uint16_t, u16Fsw);
10713 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10714 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10715
10716 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10717 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10718 IEM_MC_PREPARE_FPU_USAGE();
10719 IEM_MC_REF_FPUREG(pr80Value, 0);
10720 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10721 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10722 IEM_MC_ADVANCE_RIP_AND_FINISH();
10723
10724 IEM_MC_END();
10725}
10726
10727
10728/**
10729 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10730 *
10731 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10732 */
10733FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10734{
10735 IEM_MC_BEGIN(1, 1, 0, 0);
10736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10737 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10738 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10739
10740 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10741 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10742 IEM_MC_PREPARE_FPU_USAGE();
10743 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10744 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10745 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10746 } IEM_MC_ELSE() {
10747 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10748 } IEM_MC_ENDIF();
10749 IEM_MC_ADVANCE_RIP_AND_FINISH();
10750
10751 IEM_MC_END();
10752}
10753
10754
10755/** Opcode 0xd9 0xe8. */
10756FNIEMOP_DEF(iemOp_fld1)
10757{
10758 IEMOP_MNEMONIC(fld1, "fld1");
10759 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10760}
10761
10762
10763/** Opcode 0xd9 0xe9. */
10764FNIEMOP_DEF(iemOp_fldl2t)
10765{
10766 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10767 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10768}
10769
10770
10771/** Opcode 0xd9 0xea. */
10772FNIEMOP_DEF(iemOp_fldl2e)
10773{
10774 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10775 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10776}
10777
10778/** Opcode 0xd9 0xeb. */
10779FNIEMOP_DEF(iemOp_fldpi)
10780{
10781 IEMOP_MNEMONIC(fldpi, "fldpi");
10782 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10783}
10784
10785
10786/** Opcode 0xd9 0xec. */
10787FNIEMOP_DEF(iemOp_fldlg2)
10788{
10789 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10790 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10791}
10792
10793/** Opcode 0xd9 0xed. */
10794FNIEMOP_DEF(iemOp_fldln2)
10795{
10796 IEMOP_MNEMONIC(fldln2, "fldln2");
10797 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10798}
10799
10800
10801/** Opcode 0xd9 0xee. */
10802FNIEMOP_DEF(iemOp_fldz)
10803{
10804 IEMOP_MNEMONIC(fldz, "fldz");
10805 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10806}
10807
10808
10809/** Opcode 0xd9 0xf0.
10810 *
10811 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10812 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10813 * to produce proper results for +Inf and -Inf.
10814 *
10815 * This is probably usful in the implementation pow() and similar.
10816 */
10817FNIEMOP_DEF(iemOp_f2xm1)
10818{
10819 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10820 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10821}
10822
10823
10824/**
10825 * Common worker for FPU instructions working on STn and ST0, storing the result
10826 * in STn, and popping the stack unless IE, DE or ZE was raised.
10827 *
10828 * @param bRm Mod R/M byte.
10829 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10830 */
10831FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10832{
10833 IEM_MC_BEGIN(3, 1, 0, 0);
10834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10835 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10836 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10837 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10838 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10839
10840 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10841 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10842
10843 IEM_MC_PREPARE_FPU_USAGE();
10844 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10845 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10846 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10847 } IEM_MC_ELSE() {
10848 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10849 } IEM_MC_ENDIF();
10850 IEM_MC_ADVANCE_RIP_AND_FINISH();
10851
10852 IEM_MC_END();
10853}
10854
10855
10856/** Opcode 0xd9 0xf1. */
10857FNIEMOP_DEF(iemOp_fyl2x)
10858{
10859 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10860 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10861}
10862
10863
10864/**
10865 * Common worker for FPU instructions working on ST0 and having two outputs, one
10866 * replacing ST0 and one pushed onto the stack.
10867 *
10868 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10869 */
10870FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10871{
10872 IEM_MC_BEGIN(2, 1, 0, 0);
10873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10874 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10875 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10876 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10877
10878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10880 IEM_MC_PREPARE_FPU_USAGE();
10881 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10882 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10883 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10884 } IEM_MC_ELSE() {
10885 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10886 } IEM_MC_ENDIF();
10887 IEM_MC_ADVANCE_RIP_AND_FINISH();
10888
10889 IEM_MC_END();
10890}
10891
10892
10893/** Opcode 0xd9 0xf2. */
10894FNIEMOP_DEF(iemOp_fptan)
10895{
10896 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10897 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10898}
10899
10900
10901/** Opcode 0xd9 0xf3. */
10902FNIEMOP_DEF(iemOp_fpatan)
10903{
10904 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10905 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10906}
10907
10908
10909/** Opcode 0xd9 0xf4. */
10910FNIEMOP_DEF(iemOp_fxtract)
10911{
10912 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10913 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10914}
10915
10916
10917/** Opcode 0xd9 0xf5. */
10918FNIEMOP_DEF(iemOp_fprem1)
10919{
10920 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10921 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10922}
10923
10924
10925/** Opcode 0xd9 0xf6. */
10926FNIEMOP_DEF(iemOp_fdecstp)
10927{
10928 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10929 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10930 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10931 * FINCSTP and FDECSTP. */
10932 IEM_MC_BEGIN(0, 0, 0, 0);
10933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10934
10935 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10936 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10937
10938 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10939 IEM_MC_FPU_STACK_DEC_TOP();
10940 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10941
10942 IEM_MC_ADVANCE_RIP_AND_FINISH();
10943 IEM_MC_END();
10944}
10945
10946
10947/** Opcode 0xd9 0xf7. */
10948FNIEMOP_DEF(iemOp_fincstp)
10949{
10950 IEMOP_MNEMONIC(fincstp, "fincstp");
10951 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10952 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10953 * FINCSTP and FDECSTP. */
10954 IEM_MC_BEGIN(0, 0, 0, 0);
10955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10956
10957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10958 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10959
10960 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10961 IEM_MC_FPU_STACK_INC_TOP();
10962 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10963
10964 IEM_MC_ADVANCE_RIP_AND_FINISH();
10965 IEM_MC_END();
10966}
10967
10968
10969/** Opcode 0xd9 0xf8. */
10970FNIEMOP_DEF(iemOp_fprem)
10971{
10972 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10973 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10974}
10975
10976
10977/** Opcode 0xd9 0xf9. */
10978FNIEMOP_DEF(iemOp_fyl2xp1)
10979{
10980 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10981 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10982}
10983
10984
10985/** Opcode 0xd9 0xfa. */
10986FNIEMOP_DEF(iemOp_fsqrt)
10987{
10988 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10989 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10990}
10991
10992
10993/** Opcode 0xd9 0xfb. */
10994FNIEMOP_DEF(iemOp_fsincos)
10995{
10996 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10997 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10998}
10999
11000
11001/** Opcode 0xd9 0xfc. */
11002FNIEMOP_DEF(iemOp_frndint)
11003{
11004 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
11005 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
11006}
11007
11008
11009/** Opcode 0xd9 0xfd. */
11010FNIEMOP_DEF(iemOp_fscale)
11011{
11012 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
11013 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
11014}
11015
11016
11017/** Opcode 0xd9 0xfe. */
11018FNIEMOP_DEF(iemOp_fsin)
11019{
11020 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
11021 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
11022}
11023
11024
11025/** Opcode 0xd9 0xff. */
11026FNIEMOP_DEF(iemOp_fcos)
11027{
11028 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
11029 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
11030}
11031
11032
11033/** Used by iemOp_EscF1. */
11034IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
11035{
11036 /* 0xe0 */ iemOp_fchs,
11037 /* 0xe1 */ iemOp_fabs,
11038 /* 0xe2 */ iemOp_Invalid,
11039 /* 0xe3 */ iemOp_Invalid,
11040 /* 0xe4 */ iemOp_ftst,
11041 /* 0xe5 */ iemOp_fxam,
11042 /* 0xe6 */ iemOp_Invalid,
11043 /* 0xe7 */ iemOp_Invalid,
11044 /* 0xe8 */ iemOp_fld1,
11045 /* 0xe9 */ iemOp_fldl2t,
11046 /* 0xea */ iemOp_fldl2e,
11047 /* 0xeb */ iemOp_fldpi,
11048 /* 0xec */ iemOp_fldlg2,
11049 /* 0xed */ iemOp_fldln2,
11050 /* 0xee */ iemOp_fldz,
11051 /* 0xef */ iemOp_Invalid,
11052 /* 0xf0 */ iemOp_f2xm1,
11053 /* 0xf1 */ iemOp_fyl2x,
11054 /* 0xf2 */ iemOp_fptan,
11055 /* 0xf3 */ iemOp_fpatan,
11056 /* 0xf4 */ iemOp_fxtract,
11057 /* 0xf5 */ iemOp_fprem1,
11058 /* 0xf6 */ iemOp_fdecstp,
11059 /* 0xf7 */ iemOp_fincstp,
11060 /* 0xf8 */ iemOp_fprem,
11061 /* 0xf9 */ iemOp_fyl2xp1,
11062 /* 0xfa */ iemOp_fsqrt,
11063 /* 0xfb */ iemOp_fsincos,
11064 /* 0xfc */ iemOp_frndint,
11065 /* 0xfd */ iemOp_fscale,
11066 /* 0xfe */ iemOp_fsin,
11067 /* 0xff */ iemOp_fcos
11068};
11069
11070
11071/**
11072 * @opcode 0xd9
11073 */
11074FNIEMOP_DEF(iemOp_EscF1)
11075{
11076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11077 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
11078
11079 if (IEM_IS_MODRM_REG_MODE(bRm))
11080 {
11081 switch (IEM_GET_MODRM_REG_8(bRm))
11082 {
11083 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
11084 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
11085 case 2:
11086 if (bRm == 0xd0)
11087 return FNIEMOP_CALL(iemOp_fnop);
11088 IEMOP_RAISE_INVALID_OPCODE_RET();
11089 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
11090 case 4:
11091 case 5:
11092 case 6:
11093 case 7:
11094 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11095 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11097 }
11098 }
11099 else
11100 {
11101 switch (IEM_GET_MODRM_REG_8(bRm))
11102 {
11103 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11104 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11105 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11106 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11107 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11108 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11109 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11110 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11112 }
11113 }
11114}
11115
11116
11117/** Opcode 0xda 11/0. */
11118FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11119{
11120 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11121 IEM_MC_BEGIN(0, 1, 0, 0);
11122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11123 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11124
11125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11127
11128 IEM_MC_PREPARE_FPU_USAGE();
11129 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11130 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11131 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11132 } IEM_MC_ENDIF();
11133 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11134 } IEM_MC_ELSE() {
11135 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11136 } IEM_MC_ENDIF();
11137 IEM_MC_ADVANCE_RIP_AND_FINISH();
11138
11139 IEM_MC_END();
11140}
11141
11142
11143/** Opcode 0xda 11/1. */
11144FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11145{
11146 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11147 IEM_MC_BEGIN(0, 1, 0, 0);
11148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11149 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11150
11151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11152 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11153
11154 IEM_MC_PREPARE_FPU_USAGE();
11155 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11157 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11158 } IEM_MC_ENDIF();
11159 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11160 } IEM_MC_ELSE() {
11161 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11162 } IEM_MC_ENDIF();
11163 IEM_MC_ADVANCE_RIP_AND_FINISH();
11164
11165 IEM_MC_END();
11166}
11167
11168
11169/** Opcode 0xda 11/2. */
11170FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11171{
11172 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11173 IEM_MC_BEGIN(0, 1, 0, 0);
11174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11175 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11176
11177 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11178 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11179
11180 IEM_MC_PREPARE_FPU_USAGE();
11181 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11182 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11183 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11184 } IEM_MC_ENDIF();
11185 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11186 } IEM_MC_ELSE() {
11187 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11188 } IEM_MC_ENDIF();
11189 IEM_MC_ADVANCE_RIP_AND_FINISH();
11190
11191 IEM_MC_END();
11192}
11193
11194
11195/** Opcode 0xda 11/3. */
11196FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11197{
11198 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11199 IEM_MC_BEGIN(0, 1, 0, 0);
11200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11201 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11202
11203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11204 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11205
11206 IEM_MC_PREPARE_FPU_USAGE();
11207 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11208 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11209 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11210 } IEM_MC_ENDIF();
11211 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11212 } IEM_MC_ELSE() {
11213 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11214 } IEM_MC_ENDIF();
11215 IEM_MC_ADVANCE_RIP_AND_FINISH();
11216
11217 IEM_MC_END();
11218}
11219
11220
11221/**
11222 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11223 * flags, and popping twice when done.
11224 *
11225 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11226 */
11227FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11228{
11229 IEM_MC_BEGIN(3, 1, 0, 0);
11230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11231 IEM_MC_LOCAL(uint16_t, u16Fsw);
11232 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11233 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11234 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11235
11236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11237 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11238
11239 IEM_MC_PREPARE_FPU_USAGE();
11240 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11241 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11242 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11243 } IEM_MC_ELSE() {
11244 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11245 } IEM_MC_ENDIF();
11246 IEM_MC_ADVANCE_RIP_AND_FINISH();
11247
11248 IEM_MC_END();
11249}
11250
11251
11252/** Opcode 0xda 0xe9. */
11253FNIEMOP_DEF(iemOp_fucompp)
11254{
11255 IEMOP_MNEMONIC(fucompp, "fucompp");
11256 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11257}
11258
11259
11260/**
11261 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11262 * the result in ST0.
11263 *
11264 * @param bRm Mod R/M byte.
11265 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11266 */
11267FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11268{
11269 IEM_MC_BEGIN(3, 3, 0, 0);
11270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11271 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11272 IEM_MC_LOCAL(int32_t, i32Val2);
11273 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11275 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11276
11277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11279
11280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11282 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11283
11284 IEM_MC_PREPARE_FPU_USAGE();
11285 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11286 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11287 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11288 } IEM_MC_ELSE() {
11289 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11290 } IEM_MC_ENDIF();
11291 IEM_MC_ADVANCE_RIP_AND_FINISH();
11292
11293 IEM_MC_END();
11294}
11295
11296
11297/** Opcode 0xda !11/0. */
11298FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11299{
11300 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11301 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11302}
11303
11304
11305/** Opcode 0xda !11/1. */
11306FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11307{
11308 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11309 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11310}
11311
11312
11313/** Opcode 0xda !11/2. */
11314FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11315{
11316 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11317
11318 IEM_MC_BEGIN(3, 3, 0, 0);
11319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11320 IEM_MC_LOCAL(uint16_t, u16Fsw);
11321 IEM_MC_LOCAL(int32_t, i32Val2);
11322 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11323 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11324 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11325
11326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11328
11329 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11330 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11331 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11332
11333 IEM_MC_PREPARE_FPU_USAGE();
11334 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11335 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11336 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11337 } IEM_MC_ELSE() {
11338 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11339 } IEM_MC_ENDIF();
11340 IEM_MC_ADVANCE_RIP_AND_FINISH();
11341
11342 IEM_MC_END();
11343}
11344
11345
11346/** Opcode 0xda !11/3. */
11347FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11348{
11349 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11350
11351 IEM_MC_BEGIN(3, 3, 0, 0);
11352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11353 IEM_MC_LOCAL(uint16_t, u16Fsw);
11354 IEM_MC_LOCAL(int32_t, i32Val2);
11355 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11356 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11357 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11358
11359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11361
11362 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11363 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11364 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11365
11366 IEM_MC_PREPARE_FPU_USAGE();
11367 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11368 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11369 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11370 } IEM_MC_ELSE() {
11371 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11372 } IEM_MC_ENDIF();
11373 IEM_MC_ADVANCE_RIP_AND_FINISH();
11374
11375 IEM_MC_END();
11376}
11377
11378
11379/** Opcode 0xda !11/4. */
11380FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11381{
11382 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11383 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11384}
11385
11386
11387/** Opcode 0xda !11/5. */
11388FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11389{
11390 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11391 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11392}
11393
11394
11395/** Opcode 0xda !11/6. */
11396FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11397{
11398 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11399 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11400}
11401
11402
11403/** Opcode 0xda !11/7. */
11404FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11405{
11406 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11407 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11408}
11409
11410
11411/**
11412 * @opcode 0xda
11413 */
11414FNIEMOP_DEF(iemOp_EscF2)
11415{
11416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11417 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11418 if (IEM_IS_MODRM_REG_MODE(bRm))
11419 {
11420 switch (IEM_GET_MODRM_REG_8(bRm))
11421 {
11422 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11423 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11424 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11425 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11426 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11427 case 5:
11428 if (bRm == 0xe9)
11429 return FNIEMOP_CALL(iemOp_fucompp);
11430 IEMOP_RAISE_INVALID_OPCODE_RET();
11431 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11432 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11434 }
11435 }
11436 else
11437 {
11438 switch (IEM_GET_MODRM_REG_8(bRm))
11439 {
11440 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11441 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11442 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11443 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11444 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11445 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11446 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11447 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11449 }
11450 }
11451}
11452
11453
11454/** Opcode 0xdb !11/0. */
11455FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11456{
11457 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11458
11459 IEM_MC_BEGIN(2, 3, 0, 0);
11460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11461 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11462 IEM_MC_LOCAL(int32_t, i32Val);
11463 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11464 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11465
11466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11468
11469 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11470 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11471 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11472
11473 IEM_MC_PREPARE_FPU_USAGE();
11474 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11475 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11476 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11477 } IEM_MC_ELSE() {
11478 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11479 } IEM_MC_ENDIF();
11480 IEM_MC_ADVANCE_RIP_AND_FINISH();
11481
11482 IEM_MC_END();
11483}
11484
11485
11486/** Opcode 0xdb !11/1. */
11487FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11488{
11489 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11490 IEM_MC_BEGIN(3, 3, 0, 0);
11491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11493
11494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11497 IEM_MC_PREPARE_FPU_USAGE();
11498
11499 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11500 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11501 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11502
11503 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11504 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11505 IEM_MC_LOCAL(uint16_t, u16Fsw);
11506 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11507 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11508 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11509 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11510 } IEM_MC_ELSE() {
11511 IEM_MC_IF_FCW_IM() {
11512 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11513 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11514 } IEM_MC_ELSE() {
11515 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11516 } IEM_MC_ENDIF();
11517 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11518 } IEM_MC_ENDIF();
11519 IEM_MC_ADVANCE_RIP_AND_FINISH();
11520
11521 IEM_MC_END();
11522}
11523
11524
11525/** Opcode 0xdb !11/2. */
11526FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11527{
11528 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11529 IEM_MC_BEGIN(3, 3, 0, 0);
11530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11532
11533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11536 IEM_MC_PREPARE_FPU_USAGE();
11537
11538 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11539 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11540 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11541
11542 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11543 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11544 IEM_MC_LOCAL(uint16_t, u16Fsw);
11545 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11546 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11547 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11548 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11549 } IEM_MC_ELSE() {
11550 IEM_MC_IF_FCW_IM() {
11551 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11552 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11553 } IEM_MC_ELSE() {
11554 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11555 } IEM_MC_ENDIF();
11556 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11557 } IEM_MC_ENDIF();
11558 IEM_MC_ADVANCE_RIP_AND_FINISH();
11559
11560 IEM_MC_END();
11561}
11562
11563
11564/** Opcode 0xdb !11/3. */
11565FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11566{
11567 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11568 IEM_MC_BEGIN(3, 2, 0, 0);
11569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11571
11572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11575 IEM_MC_PREPARE_FPU_USAGE();
11576
11577 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11578 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11579 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11580
11581 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11582 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11583 IEM_MC_LOCAL(uint16_t, u16Fsw);
11584 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11585 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11586 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11587 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11588 } IEM_MC_ELSE() {
11589 IEM_MC_IF_FCW_IM() {
11590 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11591 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11592 } IEM_MC_ELSE() {
11593 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11594 } IEM_MC_ENDIF();
11595 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11596 } IEM_MC_ENDIF();
11597 IEM_MC_ADVANCE_RIP_AND_FINISH();
11598
11599 IEM_MC_END();
11600}
11601
11602
11603/** Opcode 0xdb !11/5. */
11604FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11605{
11606 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11607
11608 IEM_MC_BEGIN(2, 3, 0, 0);
11609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11610 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11611 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11612 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11613 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11614
11615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11617
11618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11620 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11621
11622 IEM_MC_PREPARE_FPU_USAGE();
11623 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11624 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11625 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11626 } IEM_MC_ELSE() {
11627 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11628 } IEM_MC_ENDIF();
11629 IEM_MC_ADVANCE_RIP_AND_FINISH();
11630
11631 IEM_MC_END();
11632}
11633
11634
11635/** Opcode 0xdb !11/7. */
11636FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11637{
11638 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11639 IEM_MC_BEGIN(3, 3, 0, 0);
11640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11642
11643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11646 IEM_MC_PREPARE_FPU_USAGE();
11647
11648 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11649 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11650 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11651
11652 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11653 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11654 IEM_MC_LOCAL(uint16_t, u16Fsw);
11655 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11656 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11657 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11658 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11659 } IEM_MC_ELSE() {
11660 IEM_MC_IF_FCW_IM() {
11661 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11662 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11663 } IEM_MC_ELSE() {
11664 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11665 } IEM_MC_ENDIF();
11666 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11667 } IEM_MC_ENDIF();
11668 IEM_MC_ADVANCE_RIP_AND_FINISH();
11669
11670 IEM_MC_END();
11671}
11672
11673
11674/** Opcode 0xdb 11/0. */
11675FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11676{
11677 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11678 IEM_MC_BEGIN(0, 1, 0, 0);
11679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11680 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11681
11682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11684
11685 IEM_MC_PREPARE_FPU_USAGE();
11686 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11687 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11688 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11689 } IEM_MC_ENDIF();
11690 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11691 } IEM_MC_ELSE() {
11692 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11693 } IEM_MC_ENDIF();
11694 IEM_MC_ADVANCE_RIP_AND_FINISH();
11695
11696 IEM_MC_END();
11697}
11698
11699
11700/** Opcode 0xdb 11/1. */
11701FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11702{
11703 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11704 IEM_MC_BEGIN(0, 1, 0, 0);
11705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11706 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11707
11708 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11709 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11710
11711 IEM_MC_PREPARE_FPU_USAGE();
11712 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11713 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11714 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11715 } IEM_MC_ENDIF();
11716 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11717 } IEM_MC_ELSE() {
11718 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11719 } IEM_MC_ENDIF();
11720 IEM_MC_ADVANCE_RIP_AND_FINISH();
11721
11722 IEM_MC_END();
11723}
11724
11725
11726/** Opcode 0xdb 11/2. */
11727FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11728{
11729 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11730 IEM_MC_BEGIN(0, 1, 0, 0);
11731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11732 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11733
11734 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11735 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11736
11737 IEM_MC_PREPARE_FPU_USAGE();
11738 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11739 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11740 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11741 } IEM_MC_ENDIF();
11742 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11743 } IEM_MC_ELSE() {
11744 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11745 } IEM_MC_ENDIF();
11746 IEM_MC_ADVANCE_RIP_AND_FINISH();
11747
11748 IEM_MC_END();
11749}
11750
11751
11752/** Opcode 0xdb 11/3. */
11753FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11754{
11755 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11756 IEM_MC_BEGIN(0, 1, 0, 0);
11757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11758 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11759
11760 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11761 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11762
11763 IEM_MC_PREPARE_FPU_USAGE();
11764 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11765 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11766 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11767 } IEM_MC_ENDIF();
11768 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11769 } IEM_MC_ELSE() {
11770 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11771 } IEM_MC_ENDIF();
11772 IEM_MC_ADVANCE_RIP_AND_FINISH();
11773
11774 IEM_MC_END();
11775}
11776
11777
11778/** Opcode 0xdb 0xe0. */
11779FNIEMOP_DEF(iemOp_fneni)
11780{
11781 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11782 IEM_MC_BEGIN(0, 0, 0, 0);
11783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11784 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11785 IEM_MC_ADVANCE_RIP_AND_FINISH();
11786 IEM_MC_END();
11787}
11788
11789
11790/** Opcode 0xdb 0xe1. */
11791FNIEMOP_DEF(iemOp_fndisi)
11792{
11793 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11794 IEM_MC_BEGIN(0, 0, 0, 0);
11795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11797 IEM_MC_ADVANCE_RIP_AND_FINISH();
11798 IEM_MC_END();
11799}
11800
11801
11802/** Opcode 0xdb 0xe2. */
11803FNIEMOP_DEF(iemOp_fnclex)
11804{
11805 IEMOP_MNEMONIC(fnclex, "fnclex");
11806 IEM_MC_BEGIN(0, 0, 0, 0);
11807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11808 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11809 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11810 IEM_MC_CLEAR_FSW_EX();
11811 IEM_MC_ADVANCE_RIP_AND_FINISH();
11812 IEM_MC_END();
11813}
11814
11815
11816/** Opcode 0xdb 0xe3. */
11817FNIEMOP_DEF(iemOp_fninit)
11818{
11819 IEMOP_MNEMONIC(fninit, "fninit");
11820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11821 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
11822 iemCImpl_finit, false /*fCheckXcpts*/);
11823}
11824
11825
11826/** Opcode 0xdb 0xe4. */
11827FNIEMOP_DEF(iemOp_fnsetpm)
11828{
11829 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11830 IEM_MC_BEGIN(0, 0, 0, 0);
11831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11832 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11833 IEM_MC_ADVANCE_RIP_AND_FINISH();
11834 IEM_MC_END();
11835}
11836
11837
11838/** Opcode 0xdb 0xe5. */
11839FNIEMOP_DEF(iemOp_frstpm)
11840{
11841 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11842#if 0 /* #UDs on newer CPUs */
11843 IEM_MC_BEGIN(0, 0, 0, 0);
11844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11845 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11846 IEM_MC_ADVANCE_RIP_AND_FINISH();
11847 IEM_MC_END();
11848 return VINF_SUCCESS;
11849#else
11850 IEMOP_RAISE_INVALID_OPCODE_RET();
11851#endif
11852}
11853
11854
11855/** Opcode 0xdb 11/5. */
11856FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11857{
11858 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11859 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11860 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11861 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11862}
11863
11864
11865/** Opcode 0xdb 11/6. */
11866FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11867{
11868 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11869 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11870 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11871 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11872}
11873
11874
11875/**
11876 * @opcode 0xdb
11877 */
11878FNIEMOP_DEF(iemOp_EscF3)
11879{
11880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11881 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11882 if (IEM_IS_MODRM_REG_MODE(bRm))
11883 {
11884 switch (IEM_GET_MODRM_REG_8(bRm))
11885 {
11886 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11887 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11888 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11889 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11890 case 4:
11891 switch (bRm)
11892 {
11893 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11894 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11895 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11896 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11897 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11898 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11899 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11900 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11902 }
11903 break;
11904 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11905 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11906 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11908 }
11909 }
11910 else
11911 {
11912 switch (IEM_GET_MODRM_REG_8(bRm))
11913 {
11914 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11915 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11916 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11917 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11918 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11919 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11920 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11921 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11923 }
11924 }
11925}
11926
11927
11928/**
11929 * Common worker for FPU instructions working on STn and ST0, and storing the
11930 * result in STn unless IE, DE or ZE was raised.
11931 *
11932 * @param bRm Mod R/M byte.
11933 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11934 */
11935FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11936{
11937 IEM_MC_BEGIN(3, 1, 0, 0);
11938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11939 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11940 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11941 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11942 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11943
11944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11946
11947 IEM_MC_PREPARE_FPU_USAGE();
11948 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11949 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11950 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11951 } IEM_MC_ELSE() {
11952 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11953 } IEM_MC_ENDIF();
11954 IEM_MC_ADVANCE_RIP_AND_FINISH();
11955
11956 IEM_MC_END();
11957}
11958
11959
11960/** Opcode 0xdc 11/0. */
11961FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11962{
11963 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11964 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11965}
11966
11967
11968/** Opcode 0xdc 11/1. */
11969FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11970{
11971 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11972 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11973}
11974
11975
11976/** Opcode 0xdc 11/4. */
11977FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11978{
11979 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11980 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11981}
11982
11983
11984/** Opcode 0xdc 11/5. */
11985FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11986{
11987 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11988 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11989}
11990
11991
11992/** Opcode 0xdc 11/6. */
11993FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11994{
11995 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11996 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11997}
11998
11999
12000/** Opcode 0xdc 11/7. */
12001FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
12002{
12003 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
12004 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
12005}
12006
12007
12008/**
12009 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
12010 * memory operand, and storing the result in ST0.
12011 *
12012 * @param bRm Mod R/M byte.
12013 * @param pfnImpl Pointer to the instruction implementation (assembly).
12014 */
12015FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
12016{
12017 IEM_MC_BEGIN(3, 3, 0, 0);
12018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12019 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12020 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
12021 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12022 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
12023 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
12024
12025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12027 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12028 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12029
12030 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12031 IEM_MC_PREPARE_FPU_USAGE();
12032 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
12033 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
12034 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12035 } IEM_MC_ELSE() {
12036 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12037 } IEM_MC_ENDIF();
12038 IEM_MC_ADVANCE_RIP_AND_FINISH();
12039
12040 IEM_MC_END();
12041}
12042
12043
12044/** Opcode 0xdc !11/0. */
12045FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
12046{
12047 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
12048 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
12049}
12050
12051
12052/** Opcode 0xdc !11/1. */
12053FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
12054{
12055 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
12056 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
12057}
12058
12059
12060/** Opcode 0xdc !11/2. */
12061FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
12062{
12063 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
12064
12065 IEM_MC_BEGIN(3, 3, 0, 0);
12066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12067 IEM_MC_LOCAL(uint16_t, u16Fsw);
12068 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12069 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12070 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12071 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12072
12073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12075
12076 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12077 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12078 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12079
12080 IEM_MC_PREPARE_FPU_USAGE();
12081 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12082 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12083 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12084 } IEM_MC_ELSE() {
12085 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12086 } IEM_MC_ENDIF();
12087 IEM_MC_ADVANCE_RIP_AND_FINISH();
12088
12089 IEM_MC_END();
12090}
12091
12092
12093/** Opcode 0xdc !11/3. */
12094FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12095{
12096 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12097
12098 IEM_MC_BEGIN(3, 3, 0, 0);
12099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12100 IEM_MC_LOCAL(uint16_t, u16Fsw);
12101 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12102 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12103 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12104 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12105
12106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12108
12109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12110 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12111 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12112
12113 IEM_MC_PREPARE_FPU_USAGE();
12114 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12115 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12116 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12117 } IEM_MC_ELSE() {
12118 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12119 } IEM_MC_ENDIF();
12120 IEM_MC_ADVANCE_RIP_AND_FINISH();
12121
12122 IEM_MC_END();
12123}
12124
12125
12126/** Opcode 0xdc !11/4. */
12127FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12128{
12129 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12130 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12131}
12132
12133
12134/** Opcode 0xdc !11/5. */
12135FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12136{
12137 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12138 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12139}
12140
12141
12142/** Opcode 0xdc !11/6. */
12143FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12144{
12145 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12146 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12147}
12148
12149
12150/** Opcode 0xdc !11/7. */
12151FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12152{
12153 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12154 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12155}
12156
12157
12158/**
12159 * @opcode 0xdc
12160 */
12161FNIEMOP_DEF(iemOp_EscF4)
12162{
12163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12164 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12165 if (IEM_IS_MODRM_REG_MODE(bRm))
12166 {
12167 switch (IEM_GET_MODRM_REG_8(bRm))
12168 {
12169 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12170 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12171 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12172 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12173 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12174 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12175 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12176 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12178 }
12179 }
12180 else
12181 {
12182 switch (IEM_GET_MODRM_REG_8(bRm))
12183 {
12184 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12185 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12186 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12187 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12188 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12189 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12190 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12191 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12193 }
12194 }
12195}
12196
12197
12198/** Opcode 0xdd !11/0.
12199 * @sa iemOp_fld_m32r */
12200FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12201{
12202 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12203
12204 IEM_MC_BEGIN(2, 3, 0, 0);
12205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12206 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12207 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12208 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12209 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12210
12211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12214 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12215
12216 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12217 IEM_MC_PREPARE_FPU_USAGE();
12218 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12219 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12220 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12221 } IEM_MC_ELSE() {
12222 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12223 } IEM_MC_ENDIF();
12224 IEM_MC_ADVANCE_RIP_AND_FINISH();
12225
12226 IEM_MC_END();
12227}
12228
12229
12230/** Opcode 0xdd !11/0. */
12231FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12232{
12233 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12234 IEM_MC_BEGIN(3, 3, 0, 0);
12235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12237
12238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12240 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12241 IEM_MC_PREPARE_FPU_USAGE();
12242
12243 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12244 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12245 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12246
12247 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12248 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12249 IEM_MC_LOCAL(uint16_t, u16Fsw);
12250 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12251 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12252 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12253 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12254 } IEM_MC_ELSE() {
12255 IEM_MC_IF_FCW_IM() {
12256 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12257 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12258 } IEM_MC_ELSE() {
12259 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12260 } IEM_MC_ENDIF();
12261 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12262 } IEM_MC_ENDIF();
12263 IEM_MC_ADVANCE_RIP_AND_FINISH();
12264
12265 IEM_MC_END();
12266}
12267
12268
12269/** Opcode 0xdd !11/0. */
12270FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12271{
12272 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12273 IEM_MC_BEGIN(3, 3, 0, 0);
12274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12276
12277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12278 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12279 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12280 IEM_MC_PREPARE_FPU_USAGE();
12281
12282 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12283 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12284 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12285
12286 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12288 IEM_MC_LOCAL(uint16_t, u16Fsw);
12289 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12290 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12291 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12292 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12293 } IEM_MC_ELSE() {
12294 IEM_MC_IF_FCW_IM() {
12295 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12296 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12297 } IEM_MC_ELSE() {
12298 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12299 } IEM_MC_ENDIF();
12300 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12301 } IEM_MC_ENDIF();
12302 IEM_MC_ADVANCE_RIP_AND_FINISH();
12303
12304 IEM_MC_END();
12305}
12306
12307
12308
12309
12310/** Opcode 0xdd !11/0. */
12311FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12312{
12313 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12314 IEM_MC_BEGIN(3, 3, 0, 0);
12315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12317
12318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12321 IEM_MC_PREPARE_FPU_USAGE();
12322
12323 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12324 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12325 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12326
12327 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12328 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12329 IEM_MC_LOCAL(uint16_t, u16Fsw);
12330 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12331 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12332 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12333 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12334 } IEM_MC_ELSE() {
12335 IEM_MC_IF_FCW_IM() {
12336 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12337 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12338 } IEM_MC_ELSE() {
12339 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12340 } IEM_MC_ENDIF();
12341 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12342 } IEM_MC_ENDIF();
12343 IEM_MC_ADVANCE_RIP_AND_FINISH();
12344
12345 IEM_MC_END();
12346}
12347
12348
12349/** Opcode 0xdd !11/0. */
12350FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12351{
12352 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12353 IEM_MC_BEGIN(3, 0, 0, 0);
12354 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12356
12357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12358 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12359 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12360
12361 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12362 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12363 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12364 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12365 IEM_MC_END();
12366}
12367
12368
12369/** Opcode 0xdd !11/0. */
12370FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12371{
12372 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12373 IEM_MC_BEGIN(3, 0, 0, 0);
12374 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12376
12377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12379 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12380
12381 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12382 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12383 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12384 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12385 IEM_MC_END();
12386}
12387
12388/** Opcode 0xdd !11/0. */
12389FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12390{
12391 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12392
12393 IEM_MC_BEGIN(0, 2, 0, 0);
12394 IEM_MC_LOCAL(uint16_t, u16Tmp);
12395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12396
12397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12400
12401 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12402 IEM_MC_FETCH_FSW(u16Tmp);
12403 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12404 IEM_MC_ADVANCE_RIP_AND_FINISH();
12405
12406/** @todo Debug / drop a hint to the verifier that things may differ
12407 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12408 * NT4SP1. (X86_FSW_PE) */
12409 IEM_MC_END();
12410}
12411
12412
12413/** Opcode 0xdd 11/0. */
12414FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12415{
12416 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12417 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12418 unmodified. */
12419 IEM_MC_BEGIN(0, 0, 0, 0);
12420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12421
12422 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12423 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12424
12425 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12426 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12427 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12428
12429 IEM_MC_ADVANCE_RIP_AND_FINISH();
12430 IEM_MC_END();
12431}
12432
12433
12434/** Opcode 0xdd 11/1. */
12435FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12436{
12437 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12438 IEM_MC_BEGIN(0, 2, 0, 0);
12439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12440 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12441 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12442 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12443 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12444
12445 IEM_MC_PREPARE_FPU_USAGE();
12446 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12447 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12448 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12449 } IEM_MC_ELSE() {
12450 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12451 } IEM_MC_ENDIF();
12452
12453 IEM_MC_ADVANCE_RIP_AND_FINISH();
12454 IEM_MC_END();
12455}
12456
12457
12458/** Opcode 0xdd 11/3. */
12459FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12460{
12461 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12462 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12463}
12464
12465
12466/** Opcode 0xdd 11/4. */
12467FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12468{
12469 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12470 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12471}
12472
12473
12474/**
12475 * @opcode 0xdd
12476 */
12477FNIEMOP_DEF(iemOp_EscF5)
12478{
12479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12480 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12481 if (IEM_IS_MODRM_REG_MODE(bRm))
12482 {
12483 switch (IEM_GET_MODRM_REG_8(bRm))
12484 {
12485 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12486 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12487 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12488 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12489 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12490 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12491 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12492 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12494 }
12495 }
12496 else
12497 {
12498 switch (IEM_GET_MODRM_REG_8(bRm))
12499 {
12500 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12501 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12502 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12503 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12504 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12505 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12506 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12507 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12509 }
12510 }
12511}
12512
12513
12514/** Opcode 0xde 11/0. */
12515FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12516{
12517 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12518 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12519}
12520
12521
12522/** Opcode 0xde 11/0. */
12523FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12524{
12525 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12526 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12527}
12528
12529
12530/** Opcode 0xde 0xd9. */
12531FNIEMOP_DEF(iemOp_fcompp)
12532{
12533 IEMOP_MNEMONIC(fcompp, "fcompp");
12534 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12535}
12536
12537
12538/** Opcode 0xde 11/4. */
12539FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12540{
12541 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12542 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12543}
12544
12545
12546/** Opcode 0xde 11/5. */
12547FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12548{
12549 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12550 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12551}
12552
12553
12554/** Opcode 0xde 11/6. */
12555FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12556{
12557 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12558 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12559}
12560
12561
12562/** Opcode 0xde 11/7. */
12563FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12564{
12565 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12566 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12567}
12568
12569
12570/**
12571 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12572 * the result in ST0.
12573 *
12574 * @param bRm Mod R/M byte.
12575 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12576 */
12577FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12578{
12579 IEM_MC_BEGIN(3, 3, 0, 0);
12580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12581 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12582 IEM_MC_LOCAL(int16_t, i16Val2);
12583 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12584 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12585 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12586
12587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12589
12590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12591 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12592 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12593
12594 IEM_MC_PREPARE_FPU_USAGE();
12595 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12596 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12597 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12598 } IEM_MC_ELSE() {
12599 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12600 } IEM_MC_ENDIF();
12601 IEM_MC_ADVANCE_RIP_AND_FINISH();
12602
12603 IEM_MC_END();
12604}
12605
12606
12607/** Opcode 0xde !11/0. */
12608FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12609{
12610 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12611 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12612}
12613
12614
12615/** Opcode 0xde !11/1. */
12616FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12617{
12618 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12619 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12620}
12621
12622
12623/** Opcode 0xde !11/2. */
12624FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12625{
12626 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12627
12628 IEM_MC_BEGIN(3, 3, 0, 0);
12629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12630 IEM_MC_LOCAL(uint16_t, u16Fsw);
12631 IEM_MC_LOCAL(int16_t, i16Val2);
12632 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12633 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12634 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12635
12636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12638
12639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12641 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12642
12643 IEM_MC_PREPARE_FPU_USAGE();
12644 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12645 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12646 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12647 } IEM_MC_ELSE() {
12648 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12649 } IEM_MC_ENDIF();
12650 IEM_MC_ADVANCE_RIP_AND_FINISH();
12651
12652 IEM_MC_END();
12653}
12654
12655
12656/** Opcode 0xde !11/3. */
12657FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12658{
12659 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12660
12661 IEM_MC_BEGIN(3, 3, 0, 0);
12662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12663 IEM_MC_LOCAL(uint16_t, u16Fsw);
12664 IEM_MC_LOCAL(int16_t, i16Val2);
12665 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12666 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12667 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12668
12669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12671
12672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12674 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12675
12676 IEM_MC_PREPARE_FPU_USAGE();
12677 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12678 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12679 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12680 } IEM_MC_ELSE() {
12681 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12682 } IEM_MC_ENDIF();
12683 IEM_MC_ADVANCE_RIP_AND_FINISH();
12684
12685 IEM_MC_END();
12686}
12687
12688
12689/** Opcode 0xde !11/4. */
12690FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12691{
12692 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12693 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12694}
12695
12696
12697/** Opcode 0xde !11/5. */
12698FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12699{
12700 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12701 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12702}
12703
12704
12705/** Opcode 0xde !11/6. */
12706FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12707{
12708 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12709 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12710}
12711
12712
12713/** Opcode 0xde !11/7. */
12714FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12715{
12716 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12717 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12718}
12719
12720
12721/**
12722 * @opcode 0xde
12723 */
12724FNIEMOP_DEF(iemOp_EscF6)
12725{
12726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12727 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12728 if (IEM_IS_MODRM_REG_MODE(bRm))
12729 {
12730 switch (IEM_GET_MODRM_REG_8(bRm))
12731 {
12732 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12733 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12734 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12735 case 3: if (bRm == 0xd9)
12736 return FNIEMOP_CALL(iemOp_fcompp);
12737 IEMOP_RAISE_INVALID_OPCODE_RET();
12738 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12739 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12740 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12741 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12743 }
12744 }
12745 else
12746 {
12747 switch (IEM_GET_MODRM_REG_8(bRm))
12748 {
12749 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12750 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12751 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12752 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12753 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12754 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12755 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12756 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12758 }
12759 }
12760}
12761
12762
12763/** Opcode 0xdf 11/0.
12764 * Undocument instruction, assumed to work like ffree + fincstp. */
12765FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12766{
12767 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12768 IEM_MC_BEGIN(0, 0, 0, 0);
12769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12770
12771 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12772 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12773
12774 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12775 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12776 IEM_MC_FPU_STACK_INC_TOP();
12777 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12778
12779 IEM_MC_ADVANCE_RIP_AND_FINISH();
12780 IEM_MC_END();
12781}
12782
12783
12784/** Opcode 0xdf 0xe0. */
12785FNIEMOP_DEF(iemOp_fnstsw_ax)
12786{
12787 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12788 IEM_MC_BEGIN(0, 1, 0, 0);
12789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12790 IEM_MC_LOCAL(uint16_t, u16Tmp);
12791 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12792 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12793 IEM_MC_FETCH_FSW(u16Tmp);
12794 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12795 IEM_MC_ADVANCE_RIP_AND_FINISH();
12796 IEM_MC_END();
12797}
12798
12799
12800/** Opcode 0xdf 11/5. */
12801FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12802{
12803 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12804 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12805 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12806 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12807}
12808
12809
12810/** Opcode 0xdf 11/6. */
12811FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12812{
12813 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12814 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12815 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12816 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12817}
12818
12819
12820/** Opcode 0xdf !11/0. */
12821FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12822{
12823 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12824
12825 IEM_MC_BEGIN(2, 3, 0, 0);
12826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12827 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12828 IEM_MC_LOCAL(int16_t, i16Val);
12829 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12830 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12831
12832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12834
12835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12836 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12837 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12838
12839 IEM_MC_PREPARE_FPU_USAGE();
12840 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12841 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12842 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12843 } IEM_MC_ELSE() {
12844 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12845 } IEM_MC_ENDIF();
12846 IEM_MC_ADVANCE_RIP_AND_FINISH();
12847
12848 IEM_MC_END();
12849}
12850
12851
12852/** Opcode 0xdf !11/1. */
12853FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12854{
12855 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12856 IEM_MC_BEGIN(3, 3, 0, 0);
12857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12859
12860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12861 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12862 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12863 IEM_MC_PREPARE_FPU_USAGE();
12864
12865 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12866 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12867 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12868
12869 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12870 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12871 IEM_MC_LOCAL(uint16_t, u16Fsw);
12872 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12873 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12874 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12875 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12876 } IEM_MC_ELSE() {
12877 IEM_MC_IF_FCW_IM() {
12878 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12879 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12880 } IEM_MC_ELSE() {
12881 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12882 } IEM_MC_ENDIF();
12883 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12884 } IEM_MC_ENDIF();
12885 IEM_MC_ADVANCE_RIP_AND_FINISH();
12886
12887 IEM_MC_END();
12888}
12889
12890
12891/** Opcode 0xdf !11/2. */
12892FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12893{
12894 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12895 IEM_MC_BEGIN(3, 3, 0, 0);
12896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12898
12899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12901 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12902 IEM_MC_PREPARE_FPU_USAGE();
12903
12904 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12905 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12906 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12907
12908 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12909 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12910 IEM_MC_LOCAL(uint16_t, u16Fsw);
12911 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12912 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12913 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12914 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12915 } IEM_MC_ELSE() {
12916 IEM_MC_IF_FCW_IM() {
12917 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12918 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12919 } IEM_MC_ELSE() {
12920 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12921 } IEM_MC_ENDIF();
12922 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12923 } IEM_MC_ENDIF();
12924 IEM_MC_ADVANCE_RIP_AND_FINISH();
12925
12926 IEM_MC_END();
12927}
12928
12929
12930/** Opcode 0xdf !11/3. */
12931FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12932{
12933 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12934 IEM_MC_BEGIN(3, 3, 0, 0);
12935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12937
12938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12940 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12941 IEM_MC_PREPARE_FPU_USAGE();
12942
12943 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12944 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12945 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12946
12947 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12948 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12949 IEM_MC_LOCAL(uint16_t, u16Fsw);
12950 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12951 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12952 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12953 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12954 } IEM_MC_ELSE() {
12955 IEM_MC_IF_FCW_IM() {
12956 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12957 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12958 } IEM_MC_ELSE() {
12959 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12960 } IEM_MC_ENDIF();
12961 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12962 } IEM_MC_ENDIF();
12963 IEM_MC_ADVANCE_RIP_AND_FINISH();
12964
12965 IEM_MC_END();
12966}
12967
12968
12969/** Opcode 0xdf !11/4. */
12970FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12971{
12972 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12973
12974 IEM_MC_BEGIN(2, 3, 0, 0);
12975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12976 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12977 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12978 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12979 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12980
12981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12983
12984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12986 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12987
12988 IEM_MC_PREPARE_FPU_USAGE();
12989 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12990 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12991 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12992 } IEM_MC_ELSE() {
12993 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12994 } IEM_MC_ENDIF();
12995 IEM_MC_ADVANCE_RIP_AND_FINISH();
12996
12997 IEM_MC_END();
12998}
12999
13000
13001/** Opcode 0xdf !11/5. */
13002FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
13003{
13004 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
13005
13006 IEM_MC_BEGIN(2, 3, 0, 0);
13007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13008 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13009 IEM_MC_LOCAL(int64_t, i64Val);
13010 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13011 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
13012
13013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13015
13016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13018 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13019
13020 IEM_MC_PREPARE_FPU_USAGE();
13021 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
13022 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
13023 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13024 } IEM_MC_ELSE() {
13025 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
13026 } IEM_MC_ENDIF();
13027 IEM_MC_ADVANCE_RIP_AND_FINISH();
13028
13029 IEM_MC_END();
13030}
13031
13032
13033/** Opcode 0xdf !11/6. */
13034FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
13035{
13036 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
13037 IEM_MC_BEGIN(3, 3, 0, 0);
13038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13040
13041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13042 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13043 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13044 IEM_MC_PREPARE_FPU_USAGE();
13045
13046 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13047 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
13048 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13049
13050 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13051 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13052 IEM_MC_LOCAL(uint16_t, u16Fsw);
13053 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13054 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
13055 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13056 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13057 } IEM_MC_ELSE() {
13058 IEM_MC_IF_FCW_IM() {
13059 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
13060 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13061 } IEM_MC_ELSE() {
13062 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13063 } IEM_MC_ENDIF();
13064 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13065 } IEM_MC_ENDIF();
13066 IEM_MC_ADVANCE_RIP_AND_FINISH();
13067
13068 IEM_MC_END();
13069}
13070
13071
13072/** Opcode 0xdf !11/7. */
13073FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
13074{
13075 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
13076 IEM_MC_BEGIN(3, 3, 0, 0);
13077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13079
13080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13082 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13083 IEM_MC_PREPARE_FPU_USAGE();
13084
13085 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13086 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13087 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13088
13089 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13090 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
13091 IEM_MC_LOCAL(uint16_t, u16Fsw);
13092 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13093 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13094 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13095 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13096 } IEM_MC_ELSE() {
13097 IEM_MC_IF_FCW_IM() {
13098 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13099 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13100 } IEM_MC_ELSE() {
13101 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13102 } IEM_MC_ENDIF();
13103 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13104 } IEM_MC_ENDIF();
13105 IEM_MC_ADVANCE_RIP_AND_FINISH();
13106
13107 IEM_MC_END();
13108}
13109
13110
13111/**
13112 * @opcode 0xdf
13113 */
13114FNIEMOP_DEF(iemOp_EscF7)
13115{
13116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13117 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13118 if (IEM_IS_MODRM_REG_MODE(bRm))
13119 {
13120 switch (IEM_GET_MODRM_REG_8(bRm))
13121 {
13122 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13123 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13124 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13125 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13126 case 4: if (bRm == 0xe0)
13127 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13128 IEMOP_RAISE_INVALID_OPCODE_RET();
13129 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13130 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13131 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13133 }
13134 }
13135 else
13136 {
13137 switch (IEM_GET_MODRM_REG_8(bRm))
13138 {
13139 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13140 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13141 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13142 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13143 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13144 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13145 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13146 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13148 }
13149 }
13150}
13151
13152
13153/**
13154 * @opcode 0xe0
13155 * @opfltest zf
13156 */
13157FNIEMOP_DEF(iemOp_loopne_Jb)
13158{
13159 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13160 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13161 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13162
13163 switch (pVCpu->iem.s.enmEffAddrMode)
13164 {
13165 case IEMMODE_16BIT:
13166 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13168 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13169 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13170 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13171 } IEM_MC_ELSE() {
13172 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13173 IEM_MC_ADVANCE_RIP_AND_FINISH();
13174 } IEM_MC_ENDIF();
13175 IEM_MC_END();
13176 break;
13177
13178 case IEMMODE_32BIT:
13179 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13181 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13182 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13183 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13184 } IEM_MC_ELSE() {
13185 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13186 IEM_MC_ADVANCE_RIP_AND_FINISH();
13187 } IEM_MC_ENDIF();
13188 IEM_MC_END();
13189 break;
13190
13191 case IEMMODE_64BIT:
13192 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13194 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13195 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13196 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13197 } IEM_MC_ELSE() {
13198 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13199 IEM_MC_ADVANCE_RIP_AND_FINISH();
13200 } IEM_MC_ENDIF();
13201 IEM_MC_END();
13202 break;
13203
13204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13205 }
13206}
13207
13208
13209/**
13210 * @opcode 0xe1
13211 * @opfltest zf
13212 */
13213FNIEMOP_DEF(iemOp_loope_Jb)
13214{
13215 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13216 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13217 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13218
13219 switch (pVCpu->iem.s.enmEffAddrMode)
13220 {
13221 case IEMMODE_16BIT:
13222 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13224 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13225 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13226 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13227 } IEM_MC_ELSE() {
13228 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13229 IEM_MC_ADVANCE_RIP_AND_FINISH();
13230 } IEM_MC_ENDIF();
13231 IEM_MC_END();
13232 break;
13233
13234 case IEMMODE_32BIT:
13235 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13237 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13238 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13239 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13240 } IEM_MC_ELSE() {
13241 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13242 IEM_MC_ADVANCE_RIP_AND_FINISH();
13243 } IEM_MC_ENDIF();
13244 IEM_MC_END();
13245 break;
13246
13247 case IEMMODE_64BIT:
13248 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13250 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13251 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13252 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13253 } IEM_MC_ELSE() {
13254 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13255 IEM_MC_ADVANCE_RIP_AND_FINISH();
13256 } IEM_MC_ENDIF();
13257 IEM_MC_END();
13258 break;
13259
13260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13261 }
13262}
13263
13264
13265/**
13266 * @opcode 0xe2
13267 */
13268FNIEMOP_DEF(iemOp_loop_Jb)
13269{
13270 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13271 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13272 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13273
13274 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13275 * using the 32-bit operand size override. How can that be restarted? See
13276 * weird pseudo code in intel manual. */
13277
13278 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13279 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13280 * the loop causes guest crashes, but when logging it's nice to skip a few million
13281 * lines of useless output. */
13282#if defined(LOG_ENABLED)
13283 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13284 switch (pVCpu->iem.s.enmEffAddrMode)
13285 {
13286 case IEMMODE_16BIT:
13287 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13289 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13290 IEM_MC_ADVANCE_RIP_AND_FINISH();
13291 IEM_MC_END();
13292 break;
13293
13294 case IEMMODE_32BIT:
13295 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13297 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13298 IEM_MC_ADVANCE_RIP_AND_FINISH();
13299 IEM_MC_END();
13300 break;
13301
13302 case IEMMODE_64BIT:
13303 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13305 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13306 IEM_MC_ADVANCE_RIP_AND_FINISH();
13307 IEM_MC_END();
13308 break;
13309
13310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13311 }
13312#endif
13313
13314 switch (pVCpu->iem.s.enmEffAddrMode)
13315 {
13316 case IEMMODE_16BIT:
13317 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13319 IEM_MC_IF_CX_IS_NOT_ONE() {
13320 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13321 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13322 } IEM_MC_ELSE() {
13323 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13324 IEM_MC_ADVANCE_RIP_AND_FINISH();
13325 } IEM_MC_ENDIF();
13326 IEM_MC_END();
13327 break;
13328
13329 case IEMMODE_32BIT:
13330 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13332 IEM_MC_IF_ECX_IS_NOT_ONE() {
13333 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13334 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13335 } IEM_MC_ELSE() {
13336 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13337 IEM_MC_ADVANCE_RIP_AND_FINISH();
13338 } IEM_MC_ENDIF();
13339 IEM_MC_END();
13340 break;
13341
13342 case IEMMODE_64BIT:
13343 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13345 IEM_MC_IF_RCX_IS_NOT_ONE() {
13346 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13347 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13348 } IEM_MC_ELSE() {
13349 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13350 IEM_MC_ADVANCE_RIP_AND_FINISH();
13351 } IEM_MC_ENDIF();
13352 IEM_MC_END();
13353 break;
13354
13355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13356 }
13357}
13358
13359
13360/**
13361 * @opcode 0xe3
13362 */
13363FNIEMOP_DEF(iemOp_jecxz_Jb)
13364{
13365 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13366 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13367 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13368
13369 switch (pVCpu->iem.s.enmEffAddrMode)
13370 {
13371 case IEMMODE_16BIT:
13372 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13374 IEM_MC_IF_CX_IS_NZ() {
13375 IEM_MC_ADVANCE_RIP_AND_FINISH();
13376 } IEM_MC_ELSE() {
13377 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13378 } IEM_MC_ENDIF();
13379 IEM_MC_END();
13380 break;
13381
13382 case IEMMODE_32BIT:
13383 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13385 IEM_MC_IF_ECX_IS_NZ() {
13386 IEM_MC_ADVANCE_RIP_AND_FINISH();
13387 } IEM_MC_ELSE() {
13388 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13389 } IEM_MC_ENDIF();
13390 IEM_MC_END();
13391 break;
13392
13393 case IEMMODE_64BIT:
13394 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13396 IEM_MC_IF_RCX_IS_NZ() {
13397 IEM_MC_ADVANCE_RIP_AND_FINISH();
13398 } IEM_MC_ELSE() {
13399 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13400 } IEM_MC_ENDIF();
13401 IEM_MC_END();
13402 break;
13403
13404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13405 }
13406}
13407
13408
13409/**
13410 * @opcode 0xe4
13411 * @opfltest iopl
13412 */
13413FNIEMOP_DEF(iemOp_in_AL_Ib)
13414{
13415 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13416 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13418 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13419 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13420}
13421
13422
13423/**
13424 * @opcode 0xe5
13425 * @opfltest iopl
13426 */
13427FNIEMOP_DEF(iemOp_in_eAX_Ib)
13428{
13429 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13430 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13432 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13433 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13434 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13435}
13436
13437
13438/**
13439 * @opcode 0xe6
13440 * @opfltest iopl
13441 */
13442FNIEMOP_DEF(iemOp_out_Ib_AL)
13443{
13444 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13445 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13447 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13448 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13449}
13450
13451
13452/**
13453 * @opcode 0xe7
13454 * @opfltest iopl
13455 */
13456FNIEMOP_DEF(iemOp_out_Ib_eAX)
13457{
13458 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13459 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13461 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13462 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13463 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13464}
13465
13466
13467/**
13468 * @opcode 0xe8
13469 */
13470FNIEMOP_DEF(iemOp_call_Jv)
13471{
13472 IEMOP_MNEMONIC(call_Jv, "call Jv");
13473 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13474 switch (pVCpu->iem.s.enmEffOpSize)
13475 {
13476 case IEMMODE_16BIT:
13477 {
13478 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13479 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13480 iemCImpl_call_rel_16, (int16_t)u16Imm);
13481 }
13482
13483 case IEMMODE_32BIT:
13484 {
13485 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13486 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13487 iemCImpl_call_rel_32, (int32_t)u32Imm);
13488 }
13489
13490 case IEMMODE_64BIT:
13491 {
13492 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13493 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13494 iemCImpl_call_rel_64, u64Imm);
13495 }
13496
13497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13498 }
13499}
13500
13501
13502/**
13503 * @opcode 0xe9
13504 */
13505FNIEMOP_DEF(iemOp_jmp_Jv)
13506{
13507 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13509 switch (pVCpu->iem.s.enmEffOpSize)
13510 {
13511 case IEMMODE_16BIT:
13512 IEM_MC_BEGIN(0, 0, 0, 0);
13513 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13515 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13516 IEM_MC_END();
13517 break;
13518
13519 case IEMMODE_64BIT:
13520 case IEMMODE_32BIT:
13521 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13522 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13524 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13525 IEM_MC_END();
13526 break;
13527
13528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13529 }
13530}
13531
13532
13533/**
13534 * @opcode 0xea
13535 */
13536FNIEMOP_DEF(iemOp_jmp_Ap)
13537{
13538 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13539 IEMOP_HLP_NO_64BIT();
13540
13541 /* Decode the far pointer address and pass it on to the far call C implementation. */
13542 uint32_t off32Seg;
13543 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13544 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13545 else
13546 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13547 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13549 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13550 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13551 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13552 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13553}
13554
13555
13556/**
13557 * @opcode 0xeb
13558 */
13559FNIEMOP_DEF(iemOp_jmp_Jb)
13560{
13561 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13562 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13563 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13564
13565 IEM_MC_BEGIN(0, 0, 0, 0);
13566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13567 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13568 IEM_MC_END();
13569}
13570
13571
13572/**
13573 * @opcode 0xec
13574 * @opfltest iopl
13575 */
13576FNIEMOP_DEF(iemOp_in_AL_DX)
13577{
13578 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13580 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13581 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13582 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13583}
13584
13585
13586/**
13587 * @opcode 0xed
13588 * @opfltest iopl
13589 */
13590FNIEMOP_DEF(iemOp_in_eAX_DX)
13591{
13592 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13594 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13595 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13596 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13597 pVCpu->iem.s.enmEffAddrMode);
13598}
13599
13600
13601/**
13602 * @opcode 0xee
13603 * @opfltest iopl
13604 */
13605FNIEMOP_DEF(iemOp_out_DX_AL)
13606{
13607 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13609 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13610 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13611}
13612
13613
13614/**
13615 * @opcode 0xef
13616 * @opfltest iopl
13617 */
13618FNIEMOP_DEF(iemOp_out_DX_eAX)
13619{
13620 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13622 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13623 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13624 pVCpu->iem.s.enmEffAddrMode);
13625}
13626
13627
13628/**
13629 * @opcode 0xf0
13630 */
13631FNIEMOP_DEF(iemOp_lock)
13632{
13633 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13634 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13635
13636 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13637 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13638}
13639
13640
13641/**
13642 * @opcode 0xf1
13643 */
13644FNIEMOP_DEF(iemOp_int1)
13645{
13646 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13647 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13648 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13649 * LOADALL memo. Needs some testing. */
13650 IEMOP_HLP_MIN_386();
13651 /** @todo testcase! */
13652 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13653 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13654 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13655}
13656
13657
13658/**
13659 * @opcode 0xf2
13660 */
13661FNIEMOP_DEF(iemOp_repne)
13662{
13663 /* This overrides any previous REPE prefix. */
13664 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13665 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13666 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13667
13668 /* For the 4 entry opcode tables, REPNZ overrides any previous
13669 REPZ and operand size prefixes. */
13670 pVCpu->iem.s.idxPrefix = 3;
13671
13672 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13673 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13674}
13675
13676
13677/**
13678 * @opcode 0xf3
13679 */
13680FNIEMOP_DEF(iemOp_repe)
13681{
13682 /* This overrides any previous REPNE prefix. */
13683 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13684 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13685 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13686
13687 /* For the 4 entry opcode tables, REPNZ overrides any previous
13688 REPNZ and operand size prefixes. */
13689 pVCpu->iem.s.idxPrefix = 2;
13690
13691 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13692 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13693}
13694
13695
13696/**
13697 * @opcode 0xf4
13698 */
13699FNIEMOP_DEF(iemOp_hlt)
13700{
13701 IEMOP_MNEMONIC(hlt, "hlt");
13702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13703 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13704}
13705
13706
13707/**
13708 * @opcode 0xf5
13709 * @opflmodify cf
13710 */
13711FNIEMOP_DEF(iemOp_cmc)
13712{
13713 IEMOP_MNEMONIC(cmc, "cmc");
13714 IEM_MC_BEGIN(0, 0, 0, 0);
13715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13716 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13717 IEM_MC_ADVANCE_RIP_AND_FINISH();
13718 IEM_MC_END();
13719}
13720
13721
13722/**
13723 * Body for of 'inc/dec/not/neg Eb'.
13724 */
13725#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13726 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13727 { \
13728 /* register access */ \
13729 IEM_MC_BEGIN(2, 0, 0, 0); \
13730 IEMOP_HLP_DONE_DECODING(); \
13731 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13732 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13733 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13734 IEM_MC_REF_EFLAGS(pEFlags); \
13735 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13736 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13737 IEM_MC_END(); \
13738 } \
13739 else \
13740 { \
13741 /* memory access. */ \
13742 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13743 { \
13744 IEM_MC_BEGIN(2, 2, 0, 0); \
13745 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13748 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13749 \
13750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13751 IEMOP_HLP_DONE_DECODING(); \
13752 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13753 IEM_MC_FETCH_EFLAGS(EFlags); \
13754 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13755 \
13756 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13757 IEM_MC_COMMIT_EFLAGS(EFlags); \
13758 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13759 IEM_MC_END(); \
13760 } \
13761 else \
13762 { \
13763 IEM_MC_BEGIN(2, 2, 0, 0); \
13764 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13765 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13767 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13768 \
13769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13770 IEMOP_HLP_DONE_DECODING(); \
13771 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13772 IEM_MC_FETCH_EFLAGS(EFlags); \
13773 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13774 \
13775 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13776 IEM_MC_COMMIT_EFLAGS(EFlags); \
13777 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13778 IEM_MC_END(); \
13779 } \
13780 } \
13781 (void)0
13782
13783
13784/**
13785 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13786 */
13787#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13788 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13789 { \
13790 /* \
13791 * Register target \
13792 */ \
13793 switch (pVCpu->iem.s.enmEffOpSize) \
13794 { \
13795 case IEMMODE_16BIT: \
13796 IEM_MC_BEGIN(2, 0, 0, 0); \
13797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13798 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13799 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13800 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13801 IEM_MC_REF_EFLAGS(pEFlags); \
13802 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13804 IEM_MC_END(); \
13805 break; \
13806 \
13807 case IEMMODE_32BIT: \
13808 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13810 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13811 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13812 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13813 IEM_MC_REF_EFLAGS(pEFlags); \
13814 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13815 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13817 IEM_MC_END(); \
13818 break; \
13819 \
13820 case IEMMODE_64BIT: \
13821 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13823 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13824 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13825 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13826 IEM_MC_REF_EFLAGS(pEFlags); \
13827 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13828 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13829 IEM_MC_END(); \
13830 break; \
13831 \
13832 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13833 } \
13834 } \
13835 else \
13836 { \
13837 /* \
13838 * Memory target. \
13839 */ \
13840 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13841 { \
13842 switch (pVCpu->iem.s.enmEffOpSize) \
13843 { \
13844 case IEMMODE_16BIT: \
13845 IEM_MC_BEGIN(2, 3, 0, 0); \
13846 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13847 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13849 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13850 \
13851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13852 IEMOP_HLP_DONE_DECODING(); \
13853 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13854 IEM_MC_FETCH_EFLAGS(EFlags); \
13855 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13856 \
13857 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13858 IEM_MC_COMMIT_EFLAGS(EFlags); \
13859 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13860 IEM_MC_END(); \
13861 break; \
13862 \
13863 case IEMMODE_32BIT: \
13864 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13865 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13866 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13868 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13869 \
13870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13871 IEMOP_HLP_DONE_DECODING(); \
13872 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13873 IEM_MC_FETCH_EFLAGS(EFlags); \
13874 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13875 \
13876 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13877 IEM_MC_COMMIT_EFLAGS(EFlags); \
13878 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13879 IEM_MC_END(); \
13880 break; \
13881 \
13882 case IEMMODE_64BIT: \
13883 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13884 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13885 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13887 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13888 \
13889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13890 IEMOP_HLP_DONE_DECODING(); \
13891 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13892 IEM_MC_FETCH_EFLAGS(EFlags); \
13893 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13894 \
13895 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13896 IEM_MC_COMMIT_EFLAGS(EFlags); \
13897 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13898 IEM_MC_END(); \
13899 break; \
13900 \
13901 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13902 } \
13903 } \
13904 else \
13905 { \
13906 (void)0
13907
13908#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13909 switch (pVCpu->iem.s.enmEffOpSize) \
13910 { \
13911 case IEMMODE_16BIT: \
13912 IEM_MC_BEGIN(2, 3, 0, 0); \
13913 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13916 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13917 \
13918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13919 IEMOP_HLP_DONE_DECODING(); \
13920 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13921 IEM_MC_FETCH_EFLAGS(EFlags); \
13922 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13923 \
13924 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13925 IEM_MC_COMMIT_EFLAGS(EFlags); \
13926 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13927 IEM_MC_END(); \
13928 break; \
13929 \
13930 case IEMMODE_32BIT: \
13931 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13932 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13933 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13935 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13936 \
13937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13938 IEMOP_HLP_DONE_DECODING(); \
13939 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13940 IEM_MC_FETCH_EFLAGS(EFlags); \
13941 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13942 \
13943 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13944 IEM_MC_COMMIT_EFLAGS(EFlags); \
13945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13946 IEM_MC_END(); \
13947 break; \
13948 \
13949 case IEMMODE_64BIT: \
13950 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13951 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13952 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13954 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13955 \
13956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13957 IEMOP_HLP_DONE_DECODING(); \
13958 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13959 IEM_MC_FETCH_EFLAGS(EFlags); \
13960 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13961 \
13962 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13963 IEM_MC_COMMIT_EFLAGS(EFlags); \
13964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13965 IEM_MC_END(); \
13966 break; \
13967 \
13968 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13969 } \
13970 } \
13971 } \
13972 (void)0
13973
13974
13975/**
13976 * @opmaps grp3_f6
13977 * @opcode /0
13978 * @opflclass logical
13979 * @todo also /1
13980 */
13981FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13982{
13983 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13984 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13985
13986 if (IEM_IS_MODRM_REG_MODE(bRm))
13987 {
13988 /* register access */
13989 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13990 IEM_MC_BEGIN(3, 0, 0, 0);
13991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13992 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13993 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13995 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13996 IEM_MC_REF_EFLAGS(pEFlags);
13997 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13998 IEM_MC_ADVANCE_RIP_AND_FINISH();
13999 IEM_MC_END();
14000 }
14001 else
14002 {
14003 /* memory access. */
14004 IEM_MC_BEGIN(3, 3, 0, 0);
14005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
14007
14008 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14010
14011 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14012 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
14013 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14014
14015 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
14016 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14017 IEM_MC_FETCH_EFLAGS(EFlags);
14018 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14019
14020 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14021 IEM_MC_COMMIT_EFLAGS(EFlags);
14022 IEM_MC_ADVANCE_RIP_AND_FINISH();
14023 IEM_MC_END();
14024 }
14025}
14026
14027
14028/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
14029#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
14030 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
14031 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14032 { \
14033 /* register access */ \
14034 IEM_MC_BEGIN(3, 1, 0, 0); \
14035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14036 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14037 IEM_MC_ARG(uint8_t, u8Value, 1); \
14038 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14039 IEM_MC_LOCAL(int32_t, rc); \
14040 \
14041 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14042 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14043 IEM_MC_REF_EFLAGS(pEFlags); \
14044 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14045 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14046 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14047 } IEM_MC_ELSE() { \
14048 IEM_MC_RAISE_DIVIDE_ERROR(); \
14049 } IEM_MC_ENDIF(); \
14050 \
14051 IEM_MC_END(); \
14052 } \
14053 else \
14054 { \
14055 /* memory access. */ \
14056 IEM_MC_BEGIN(3, 2, 0, 0); \
14057 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14058 IEM_MC_ARG(uint8_t, u8Value, 1); \
14059 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
14060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14061 IEM_MC_LOCAL(int32_t, rc); \
14062 \
14063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14065 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14066 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14067 IEM_MC_REF_EFLAGS(pEFlags); \
14068 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
14069 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14070 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14071 } IEM_MC_ELSE() { \
14072 IEM_MC_RAISE_DIVIDE_ERROR(); \
14073 } IEM_MC_ENDIF(); \
14074 \
14075 IEM_MC_END(); \
14076 } (void)0
14077
14078
14079/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
14080#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
14081 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
14082 if (IEM_IS_MODRM_REG_MODE(bRm)) \
14083 { \
14084 /* register access */ \
14085 switch (pVCpu->iem.s.enmEffOpSize) \
14086 { \
14087 case IEMMODE_16BIT: \
14088 IEM_MC_BEGIN(4, 1, 0, 0); \
14089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14090 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14091 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14092 IEM_MC_ARG(uint16_t, u16Value, 2); \
14093 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14094 IEM_MC_LOCAL(int32_t, rc); \
14095 \
14096 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14097 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14098 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14099 IEM_MC_REF_EFLAGS(pEFlags); \
14100 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14101 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14102 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14103 } IEM_MC_ELSE() { \
14104 IEM_MC_RAISE_DIVIDE_ERROR(); \
14105 } IEM_MC_ENDIF(); \
14106 \
14107 IEM_MC_END(); \
14108 break; \
14109 \
14110 case IEMMODE_32BIT: \
14111 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
14112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14113 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14114 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14115 IEM_MC_ARG(uint32_t, u32Value, 2); \
14116 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14117 IEM_MC_LOCAL(int32_t, rc); \
14118 \
14119 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14120 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14121 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14122 IEM_MC_REF_EFLAGS(pEFlags); \
14123 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14124 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14125 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14126 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14127 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14128 } IEM_MC_ELSE() { \
14129 IEM_MC_RAISE_DIVIDE_ERROR(); \
14130 } IEM_MC_ENDIF(); \
14131 \
14132 IEM_MC_END(); \
14133 break; \
14134 \
14135 case IEMMODE_64BIT: \
14136 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
14137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14138 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14139 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14140 IEM_MC_ARG(uint64_t, u64Value, 2); \
14141 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14142 IEM_MC_LOCAL(int32_t, rc); \
14143 \
14144 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14145 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14146 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14147 IEM_MC_REF_EFLAGS(pEFlags); \
14148 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14149 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14150 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14151 } IEM_MC_ELSE() { \
14152 IEM_MC_RAISE_DIVIDE_ERROR(); \
14153 } IEM_MC_ENDIF(); \
14154 \
14155 IEM_MC_END(); \
14156 break; \
14157 \
14158 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14159 } \
14160 } \
14161 else \
14162 { \
14163 /* memory access. */ \
14164 switch (pVCpu->iem.s.enmEffOpSize) \
14165 { \
14166 case IEMMODE_16BIT: \
14167 IEM_MC_BEGIN(4, 2, 0, 0); \
14168 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14169 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14170 IEM_MC_ARG(uint16_t, u16Value, 2); \
14171 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14173 IEM_MC_LOCAL(int32_t, rc); \
14174 \
14175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14177 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14178 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14179 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14180 IEM_MC_REF_EFLAGS(pEFlags); \
14181 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14182 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14183 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14184 } IEM_MC_ELSE() { \
14185 IEM_MC_RAISE_DIVIDE_ERROR(); \
14186 } IEM_MC_ENDIF(); \
14187 \
14188 IEM_MC_END(); \
14189 break; \
14190 \
14191 case IEMMODE_32BIT: \
14192 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14193 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14194 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14195 IEM_MC_ARG(uint32_t, u32Value, 2); \
14196 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14198 IEM_MC_LOCAL(int32_t, rc); \
14199 \
14200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14202 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14203 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14204 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14205 IEM_MC_REF_EFLAGS(pEFlags); \
14206 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14207 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14208 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14209 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14210 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14211 } IEM_MC_ELSE() { \
14212 IEM_MC_RAISE_DIVIDE_ERROR(); \
14213 } IEM_MC_ENDIF(); \
14214 \
14215 IEM_MC_END(); \
14216 break; \
14217 \
14218 case IEMMODE_64BIT: \
14219 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14220 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14221 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14222 IEM_MC_ARG(uint64_t, u64Value, 2); \
14223 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14225 IEM_MC_LOCAL(int32_t, rc); \
14226 \
14227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14229 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14230 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14231 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14232 IEM_MC_REF_EFLAGS(pEFlags); \
14233 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14234 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14235 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14236 } IEM_MC_ELSE() { \
14237 IEM_MC_RAISE_DIVIDE_ERROR(); \
14238 } IEM_MC_ENDIF(); \
14239 \
14240 IEM_MC_END(); \
14241 break; \
14242 \
14243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14244 } \
14245 } (void)0
14246
14247
14248/**
14249 * @opmaps grp3_f6
14250 * @opcode /2
14251 * @opflclass unchanged
14252 */
14253FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14254{
14255/** @todo does not modify EFLAGS. */
14256 IEMOP_MNEMONIC(not_Eb, "not Eb");
14257 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14258}
14259
14260
14261/**
14262 * @opmaps grp3_f6
14263 * @opcode /3
14264 * @opflclass arithmetic
14265 */
14266FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14267{
14268 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14269 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14270}
14271
14272
14273/**
14274 * @opcode 0xf6
14275 */
14276FNIEMOP_DEF(iemOp_Grp3_Eb)
14277{
14278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14279 switch (IEM_GET_MODRM_REG_8(bRm))
14280 {
14281 case 0:
14282 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14283 case 1:
14284 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14285 case 2:
14286 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14287 case 3:
14288 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14289 case 4:
14290 {
14291 /**
14292 * @opdone
14293 * @opmaps grp3_f6
14294 * @opcode /4
14295 * @opflclass multiply
14296 */
14297 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14298 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14299 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14300 break;
14301 }
14302 case 5:
14303 {
14304 /**
14305 * @opdone
14306 * @opmaps grp3_f6
14307 * @opcode /5
14308 * @opflclass multiply
14309 */
14310 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14311 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14312 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14313 break;
14314 }
14315 case 6:
14316 {
14317 /**
14318 * @opdone
14319 * @opmaps grp3_f6
14320 * @opcode /6
14321 * @opflclass division
14322 */
14323 IEMOP_MNEMONIC(div_Eb, "div Eb");
14324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14325 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14326 break;
14327 }
14328 case 7:
14329 {
14330 /**
14331 * @opdone
14332 * @opmaps grp3_f6
14333 * @opcode /7
14334 * @opflclass division
14335 */
14336 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14337 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14338 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14339 break;
14340 }
14341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14342 }
14343}
14344
14345
14346/**
14347 * @opmaps grp3_f7
14348 * @opcode /0
14349 * @opflclass logical
14350 */
14351FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14352{
14353 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14354 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14355
14356 if (IEM_IS_MODRM_REG_MODE(bRm))
14357 {
14358 /* register access */
14359 switch (pVCpu->iem.s.enmEffOpSize)
14360 {
14361 case IEMMODE_16BIT:
14362 IEM_MC_BEGIN(3, 0, 0, 0);
14363 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14365 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14366 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14367 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14368 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14369 IEM_MC_REF_EFLAGS(pEFlags);
14370 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14371 IEM_MC_ADVANCE_RIP_AND_FINISH();
14372 IEM_MC_END();
14373 break;
14374
14375 case IEMMODE_32BIT:
14376 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14377 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14380 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14382 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14383 IEM_MC_REF_EFLAGS(pEFlags);
14384 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14385 /* No clearing the high dword here - test doesn't write back the result. */
14386 IEM_MC_ADVANCE_RIP_AND_FINISH();
14387 IEM_MC_END();
14388 break;
14389
14390 case IEMMODE_64BIT:
14391 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14392 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14394 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14395 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14397 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14398 IEM_MC_REF_EFLAGS(pEFlags);
14399 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14400 IEM_MC_ADVANCE_RIP_AND_FINISH();
14401 IEM_MC_END();
14402 break;
14403
14404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14405 }
14406 }
14407 else
14408 {
14409 /* memory access. */
14410 switch (pVCpu->iem.s.enmEffOpSize)
14411 {
14412 case IEMMODE_16BIT:
14413 IEM_MC_BEGIN(3, 3, 0, 0);
14414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14416
14417 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14419
14420 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14421 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14422 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14423
14424 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14425 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14426 IEM_MC_FETCH_EFLAGS(EFlags);
14427 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14428
14429 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14430 IEM_MC_COMMIT_EFLAGS(EFlags);
14431 IEM_MC_ADVANCE_RIP_AND_FINISH();
14432 IEM_MC_END();
14433 break;
14434
14435 case IEMMODE_32BIT:
14436 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14439
14440 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14442
14443 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14444 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14445 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14446
14447 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14449 IEM_MC_FETCH_EFLAGS(EFlags);
14450 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14451
14452 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14453 IEM_MC_COMMIT_EFLAGS(EFlags);
14454 IEM_MC_ADVANCE_RIP_AND_FINISH();
14455 IEM_MC_END();
14456 break;
14457
14458 case IEMMODE_64BIT:
14459 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14462
14463 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14465
14466 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14467 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14468 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14469
14470 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14471 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14472 IEM_MC_FETCH_EFLAGS(EFlags);
14473 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14474
14475 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14476 IEM_MC_COMMIT_EFLAGS(EFlags);
14477 IEM_MC_ADVANCE_RIP_AND_FINISH();
14478 IEM_MC_END();
14479 break;
14480
14481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14482 }
14483 }
14484}
14485
14486
14487/**
14488 * @opmaps grp3_f7
14489 * @opcode /2
14490 * @opflclass unchanged
14491 */
14492FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14493{
14494/** @todo does not modify EFLAGS */
14495 IEMOP_MNEMONIC(not_Ev, "not Ev");
14496 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14497 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14498}
14499
14500
14501/**
14502 * @opmaps grp3_f7
14503 * @opcode /3
14504 * @opflclass arithmetic
14505 */
14506FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14507{
14508 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14509 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14510 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14511}
14512
14513
14514/**
14515 * @opmaps grp3_f7
14516 * @opcode /4
14517 * @opflclass multiply
14518 */
14519FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14520{
14521 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14522 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14523 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14524}
14525
14526
14527/**
14528 * @opmaps grp3_f7
14529 * @opcode /5
14530 * @opflclass multiply
14531 */
14532FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14533{
14534 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14535 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14536 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14537}
14538
14539
14540/**
14541 * @opmaps grp3_f7
14542 * @opcode /6
14543 * @opflclass division
14544 */
14545FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14546{
14547 IEMOP_MNEMONIC(div_Ev, "div Ev");
14548 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14549 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14550}
14551
14552
14553/**
14554 * @opmaps grp3_f7
14555 * @opcode /7
14556 * @opflclass division
14557 */
14558FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14559{
14560 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14561 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14562 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14563}
14564
14565
14566/**
14567 * @opcode 0xf7
14568 */
14569FNIEMOP_DEF(iemOp_Grp3_Ev)
14570{
14571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14572 switch (IEM_GET_MODRM_REG_8(bRm))
14573 {
14574 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14575 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14576 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14577 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14578 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14579 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14580 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14581 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14583 }
14584}
14585
14586
14587/**
14588 * @opcode 0xf8
14589 * @opflmodify cf
14590 * @opflclear cf
14591 */
14592FNIEMOP_DEF(iemOp_clc)
14593{
14594 IEMOP_MNEMONIC(clc, "clc");
14595 IEM_MC_BEGIN(0, 0, 0, 0);
14596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14597 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14598 IEM_MC_ADVANCE_RIP_AND_FINISH();
14599 IEM_MC_END();
14600}
14601
14602
14603/**
14604 * @opcode 0xf9
14605 * @opflmodify cf
14606 * @opflset cf
14607 */
14608FNIEMOP_DEF(iemOp_stc)
14609{
14610 IEMOP_MNEMONIC(stc, "stc");
14611 IEM_MC_BEGIN(0, 0, 0, 0);
14612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14613 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14614 IEM_MC_ADVANCE_RIP_AND_FINISH();
14615 IEM_MC_END();
14616}
14617
14618
14619/**
14620 * @opcode 0xfa
14621 * @opfltest iopl,vm
14622 * @opflmodify if,vif
14623 */
14624FNIEMOP_DEF(iemOp_cli)
14625{
14626 IEMOP_MNEMONIC(cli, "cli");
14627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14628 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14629}
14630
14631
14632/**
14633 * @opcode 0xfb
14634 * @opfltest iopl,vm
14635 * @opflmodify if,vif
14636 */
14637FNIEMOP_DEF(iemOp_sti)
14638{
14639 IEMOP_MNEMONIC(sti, "sti");
14640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14641 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14642 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14643}
14644
14645
14646/**
14647 * @opcode 0xfc
14648 * @opflmodify df
14649 * @opflclear df
14650 */
14651FNIEMOP_DEF(iemOp_cld)
14652{
14653 IEMOP_MNEMONIC(cld, "cld");
14654 IEM_MC_BEGIN(0, 0, 0, 0);
14655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14656 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14657 IEM_MC_ADVANCE_RIP_AND_FINISH();
14658 IEM_MC_END();
14659}
14660
14661
14662/**
14663 * @opcode 0xfd
14664 * @opflmodify df
14665 * @opflset df
14666 */
14667FNIEMOP_DEF(iemOp_std)
14668{
14669 IEMOP_MNEMONIC(std, "std");
14670 IEM_MC_BEGIN(0, 0, 0, 0);
14671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14672 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14673 IEM_MC_ADVANCE_RIP_AND_FINISH();
14674 IEM_MC_END();
14675}
14676
14677
14678/**
14679 * @opmaps grp4
14680 * @opcode /0
14681 * @opflclass incdec
14682 */
14683FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14684{
14685 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14686 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14687}
14688
14689
14690/**
14691 * @opmaps grp4
14692 * @opcode /1
14693 * @opflclass incdec
14694 */
14695FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14696{
14697 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14698 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14699}
14700
14701
14702/**
14703 * @opcode 0xfe
14704 */
14705FNIEMOP_DEF(iemOp_Grp4)
14706{
14707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14708 switch (IEM_GET_MODRM_REG_8(bRm))
14709 {
14710 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14711 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14712 default:
14713 /** @todo is the eff-addr decoded? */
14714 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14715 IEMOP_RAISE_INVALID_OPCODE_RET();
14716 }
14717}
14718
14719/**
14720 * @opmaps grp5
14721 * @opcode /0
14722 * @opflclass incdec
14723 */
14724FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14725{
14726 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14727 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14728 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14729}
14730
14731
14732/**
14733 * @opmaps grp5
14734 * @opcode /1
14735 * @opflclass incdec
14736 */
14737FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14738{
14739 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14740 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14741 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14742}
14743
14744
14745/**
14746 * Opcode 0xff /2.
14747 * @param bRm The RM byte.
14748 */
14749FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14750{
14751 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14753
14754 if (IEM_IS_MODRM_REG_MODE(bRm))
14755 {
14756 /* The new RIP is taken from a register. */
14757 switch (pVCpu->iem.s.enmEffOpSize)
14758 {
14759 case IEMMODE_16BIT:
14760 IEM_MC_BEGIN(1, 0, 0, 0);
14761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14762 IEM_MC_ARG(uint16_t, u16Target, 0);
14763 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14764 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14765 IEM_MC_END();
14766 break;
14767
14768 case IEMMODE_32BIT:
14769 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14771 IEM_MC_ARG(uint32_t, u32Target, 0);
14772 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14773 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14774 IEM_MC_END();
14775 break;
14776
14777 case IEMMODE_64BIT:
14778 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14780 IEM_MC_ARG(uint64_t, u64Target, 0);
14781 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14782 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14783 IEM_MC_END();
14784 break;
14785
14786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14787 }
14788 }
14789 else
14790 {
14791 /* The new RIP is taken from a register. */
14792 switch (pVCpu->iem.s.enmEffOpSize)
14793 {
14794 case IEMMODE_16BIT:
14795 IEM_MC_BEGIN(1, 1, 0, 0);
14796 IEM_MC_ARG(uint16_t, u16Target, 0);
14797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14800 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14801 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14802 IEM_MC_END();
14803 break;
14804
14805 case IEMMODE_32BIT:
14806 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14807 IEM_MC_ARG(uint32_t, u32Target, 0);
14808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14811 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14812 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14813 IEM_MC_END();
14814 break;
14815
14816 case IEMMODE_64BIT:
14817 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14818 IEM_MC_ARG(uint64_t, u64Target, 0);
14819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14822 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14823 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14824 IEM_MC_END();
14825 break;
14826
14827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14828 }
14829 }
14830}
14831
14832#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14833 /* Registers? How?? */ \
14834 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14835 { /* likely */ } \
14836 else \
14837 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14838 \
14839 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14840 /** @todo what does VIA do? */ \
14841 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14842 { /* likely */ } \
14843 else \
14844 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14845 \
14846 /* Far pointer loaded from memory. */ \
14847 switch (pVCpu->iem.s.enmEffOpSize) \
14848 { \
14849 case IEMMODE_16BIT: \
14850 IEM_MC_BEGIN(3, 1, 0, 0); \
14851 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14852 IEM_MC_ARG(uint16_t, offSeg, 1); \
14853 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14857 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14858 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14859 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14860 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14861 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14862 IEM_MC_END(); \
14863 break; \
14864 \
14865 case IEMMODE_32BIT: \
14866 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14867 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14868 IEM_MC_ARG(uint32_t, offSeg, 1); \
14869 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14873 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14874 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14875 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14876 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14877 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14878 IEM_MC_END(); \
14879 break; \
14880 \
14881 case IEMMODE_64BIT: \
14882 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14883 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14884 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14885 IEM_MC_ARG(uint64_t, offSeg, 1); \
14886 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14890 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14891 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14892 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14893 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14894 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14895 IEM_MC_END(); \
14896 break; \
14897 \
14898 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14899 } do {} while (0)
14900
14901
14902/**
14903 * Opcode 0xff /3.
14904 * @param bRm The RM byte.
14905 */
14906FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14907{
14908 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14909 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14910}
14911
14912
14913/**
14914 * Opcode 0xff /4.
14915 * @param bRm The RM byte.
14916 */
14917FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14918{
14919 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14921
14922 if (IEM_IS_MODRM_REG_MODE(bRm))
14923 {
14924 /* The new RIP is taken from a register. */
14925 switch (pVCpu->iem.s.enmEffOpSize)
14926 {
14927 case IEMMODE_16BIT:
14928 IEM_MC_BEGIN(0, 1, 0, 0);
14929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14930 IEM_MC_LOCAL(uint16_t, u16Target);
14931 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14932 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14933 IEM_MC_END();
14934 break;
14935
14936 case IEMMODE_32BIT:
14937 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14939 IEM_MC_LOCAL(uint32_t, u32Target);
14940 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14941 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14942 IEM_MC_END();
14943 break;
14944
14945 case IEMMODE_64BIT:
14946 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14948 IEM_MC_LOCAL(uint64_t, u64Target);
14949 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14950 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14951 IEM_MC_END();
14952 break;
14953
14954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14955 }
14956 }
14957 else
14958 {
14959 /* The new RIP is taken from a memory location. */
14960 switch (pVCpu->iem.s.enmEffOpSize)
14961 {
14962 case IEMMODE_16BIT:
14963 IEM_MC_BEGIN(0, 2, 0, 0);
14964 IEM_MC_LOCAL(uint16_t, u16Target);
14965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14968 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14969 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14970 IEM_MC_END();
14971 break;
14972
14973 case IEMMODE_32BIT:
14974 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14975 IEM_MC_LOCAL(uint32_t, u32Target);
14976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14979 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14980 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14981 IEM_MC_END();
14982 break;
14983
14984 case IEMMODE_64BIT:
14985 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14986 IEM_MC_LOCAL(uint64_t, u64Target);
14987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14990 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14991 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14992 IEM_MC_END();
14993 break;
14994
14995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14996 }
14997 }
14998}
14999
15000
15001/**
15002 * Opcode 0xff /5.
15003 * @param bRm The RM byte.
15004 */
15005FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15006{
15007 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
15008 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
15009}
15010
15011
15012/**
15013 * Opcode 0xff /6.
15014 * @param bRm The RM byte.
15015 */
15016FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15017{
15018 IEMOP_MNEMONIC(push_Ev, "push Ev");
15019
15020 /* Registers are handled by a common worker. */
15021 if (IEM_IS_MODRM_REG_MODE(bRm))
15022 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
15023
15024 /* Memory we do here. */
15025 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15026 switch (pVCpu->iem.s.enmEffOpSize)
15027 {
15028 case IEMMODE_16BIT:
15029 IEM_MC_BEGIN(0, 2, 0, 0);
15030 IEM_MC_LOCAL(uint16_t, u16Src);
15031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15034 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15035 IEM_MC_PUSH_U16(u16Src);
15036 IEM_MC_ADVANCE_RIP_AND_FINISH();
15037 IEM_MC_END();
15038 break;
15039
15040 case IEMMODE_32BIT:
15041 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
15042 IEM_MC_LOCAL(uint32_t, u32Src);
15043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15046 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15047 IEM_MC_PUSH_U32(u32Src);
15048 IEM_MC_ADVANCE_RIP_AND_FINISH();
15049 IEM_MC_END();
15050 break;
15051
15052 case IEMMODE_64BIT:
15053 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
15054 IEM_MC_LOCAL(uint64_t, u64Src);
15055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15058 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15059 IEM_MC_PUSH_U64(u64Src);
15060 IEM_MC_ADVANCE_RIP_AND_FINISH();
15061 IEM_MC_END();
15062 break;
15063
15064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15065 }
15066}
15067
15068
15069/**
15070 * @opcode 0xff
15071 */
15072FNIEMOP_DEF(iemOp_Grp5)
15073{
15074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15075 switch (IEM_GET_MODRM_REG_8(bRm))
15076 {
15077 case 0:
15078 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
15079 case 1:
15080 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
15081 case 2:
15082 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15083 case 3:
15084 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15085 case 4:
15086 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15087 case 5:
15088 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15089 case 6:
15090 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15091 case 7:
15092 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15093 IEMOP_RAISE_INVALID_OPCODE_RET();
15094 }
15095 AssertFailedReturn(VERR_IEM_IPE_3);
15096}
15097
15098
15099
15100const PFNIEMOP g_apfnOneByteMap[256] =
15101{
15102 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15103 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15104 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15105 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15106 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15107 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15108 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15109 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15110 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15111 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15112 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15113 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15114 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15115 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15116 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15117 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15118 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15119 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15120 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15121 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15122 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15123 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15124 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15125 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15126 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15127 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15128 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15129 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15130 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15131 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15132 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15133 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15134 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15135 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15136 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15137 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15138 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15139 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15140 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15141 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15142 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15143 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15144 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15145 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15146 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15147 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15148 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15149 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15150 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15151 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15152 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15153 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15154 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15155 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15156 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15157 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15158 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15159 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15160 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15161 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15162 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15163 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15164 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15165 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15166};
15167
15168
15169/** @} */
15170
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette